diff --git a/modules/Workflow/whale/main.py b/modules/Workflow/whale/main.py index b57c0e9dd..71bad4c89 100644 --- a/modules/Workflow/whale/main.py +++ b/modules/Workflow/whale/main.py @@ -391,12 +391,12 @@ def run_command(command): return str(result), returncode except WorkFlowInputError as e: - print(str(e).replace('\'','')) + print(str(e).replace("'", '')) print(str(result)) sys.exit(-20) except: - # if for whatever reason the above failes, move on + # if for whatever reason the above fails, move on return str(result), 0 return result, returncode diff --git a/modules/createEVENT/IsolatedBuildingCFD/post_process_output.py b/modules/createEVENT/IsolatedBuildingCFD/post_process_output.py index 461187f04..afa851574 100644 --- a/modules/createEVENT/IsolatedBuildingCFD/post_process_output.py +++ b/modules/createEVENT/IsolatedBuildingCFD/post_process_output.py @@ -38,9 +38,9 @@ # -# This script reads OpenFOAM output and plot the characteristics of the -# approaching wind. For now, it read and plots only velocity field data and -# pressure on predicted set of probes. +# This script reads OpenFOAM output and plot the characteristics of the +# approaching wind. For now, it read and plots only velocity field data and +# pressure on predicted set of probes. # import sys @@ -51,8 +51,8 @@ import shutil from pathlib import Path import numpy as np -import matplotlib.pyplot as plt -import matplotlib.gridspec as gridspec +import matplotlib.pyplot as plt +import matplotlib.gridspec as gridspec from scipy import signal from scipy.interpolate import interp1d from scipy.interpolate import UnivariateSpline @@ -63,199 +63,204 @@ import argparse import re + def read_bin_forces(fileName): - """ + """ Reads binData measured at the center of each bin. - Reads force data integrated over a surface from Open file and returns - bin heights, time, and the forces and moments vector on the bins for each - time step. - + Reads force data integrated over a surface from Open file and returns + bin heights, time, and the forces and moments vector on the bins for each + time step. + """ forces = [] moments = [] time = [] nbins = 0 - - with open(fileName, "r") as f: + + with open(fileName, 'r') as f: for line in f: - if line.startswith('#'): - #Read the origin where the force are integrated - if line.startswith('# bins'): - line = line.replace('# bins', '') + if line.startswith('#'): + # Read the origin where the force are integrated + if line.startswith('# bins'): + line = line.replace('# bins', '') line = line.replace(':', '') line = line.split() nbins = int(line[0]) coords = np.zeros((nbins, 3)) elif line.startswith('# x co-ords'): - line = line.replace('# x co-ords', '') + line = line.replace('# x co-ords', '') line = line.replace(':', '') line = line.split() for i in range(nbins): - coords[i,0] = line[i] + coords[i, 0] = line[i] elif line.startswith('# y co-ords'): - line = line.replace('# y co-ords', '') + line = line.replace('# y co-ords', '') line = line.replace(':', '') line = line.split() for i in range(nbins): - coords[i,1] = line[i] + coords[i, 1] = line[i] elif line.startswith('# z co-ords'): - line = line.replace('# z co-ords', '') + line = line.replace('# z co-ords', '') line = line.replace(':', '') line = line.split() for i in range(nbins): - coords[i,2] = line[i] + coords[i, 2] = line[i] else: continue # Read only the pressure part - else: - line = line.replace('(','') - line = line.replace(')','') + else: + line = line.replace('(', '') + line = line.replace(')', '') line = line.split() time.append(float(line[0])) story_force = np.zeros((nbins, 3)) story_moments = np.zeros((nbins, 3)) for i in range(nbins): - start = 12*i + 1 + start = 12 * i + 1 - #Take only pressure part - story_force[i,0] = line[start] - story_force[i,1] = line[start + 1] - story_force[i,2] = line[start + 2] + # Take only pressure part + story_force[i, 0] = line[start] + story_force[i, 1] = line[start + 1] + story_force[i, 2] = line[start + 2] - story_moments[i,0] = line[start + 6] - story_moments[i,1] = line[start + 6 + 1] - story_moments[i,2] = line[start + 6 + 2] + story_moments[i, 0] = line[start + 6] + story_moments[i, 1] = line[start + 6 + 1] + story_moments[i, 2] = line[start + 6 + 2] forces.append(story_force) moments.append(story_moments) - + time = np.asarray(time, dtype=np.float32) forces = np.asarray(forces, dtype=np.float32) - moments = np.asarray(moments, dtype=np.float32) - + moments = np.asarray(moments, dtype=np.float32) + return coords, time, forces, moments + def read_forces(fileName): - """ - Reads force data integrated over a surface from OpenFOAM file and returns - origin(center of rotation), time, and the forces and moments vector for - each time step. - + """ + Reads force data integrated over a surface from OpenFOAM file and returns + origin(center of rotation), time, and the forces and moments vector for + each time step. + """ origin = np.zeros(3) forces = [] moments = [] time = [] - - with open(fileName, "r") as f: + + with open(fileName, 'r') as f: for line in f: - if line.startswith('#'): - #Read the origin where the force are integrated - if line.startswith('# CofR'): - line = line.replace('(','') - line = line.replace(')','') + if line.startswith('#'): + # Read the origin where the force are integrated + if line.startswith('# CofR'): + line = line.replace('(', '') + line = line.replace(')', '') line = line.split() - origin[0] = line[3] # x-coordinate - origin[1] = line[4] # y-coordinate - origin[2] = line[5] # z-coordinate + origin[0] = line[3] # x-coordinate + origin[1] = line[4] # y-coordinate + origin[2] = line[5] # z-coordinate else: continue - # Read only the pressure part of force and moments. + # Read only the pressure part of force and moments. # Viscous and porous are ignored - else: - line = line.replace('(','') - line = line.replace(')','') + else: + line = line.replace('(', '') + line = line.replace(')', '') line = line.split() time.append(float(line[0])) forces.append([float(line[1]), float(line[2]), float(line[3])]) - moments.append([float(line[1 + 9]), float(line[2 + 9]), float(line[3 + 9])]) - + moments.append( + [float(line[1 + 9]), float(line[2 + 9]), float(line[3 + 9])] + ) + time = np.asarray(time, dtype=np.float32) forces = np.asarray(forces, dtype=np.float32) - moments = np.asarray(moments, dtype=np.float32) - + moments = np.asarray(moments, dtype=np.float32) + return origin, time, forces, moments def readPressureProbes(fileName): """ Created on Wed May 16 14:31:42 2018 - + Reads pressure probe data from OpenFOAM and return the probe location, time, and the pressure for each time step. - + @author: Abiy """ probes = [] p = [] - time = [] - - with open(fileName, "r") as f: + time = [] + + with open(fileName, 'r') as f: for line in f: if line.startswith('#'): if line.startswith('# Probe'): - line = line.replace('(','') - line = line.replace(')','') + line = line.replace('(', '') + line = line.replace(')', '') line = line.split() - probes.append([float(line[3]),float(line[4]),float(line[5])]) + probes.append([float(line[3]), float(line[4]), float(line[5])]) else: continue - else: + else: line = line.split() time.append(float(line[0])) p_probe_i = np.zeros([len(probes)]) - for i in range(len(probes)): + for i in range(len(probes)): p_probe_i[i] = float(line[i + 1]) p.append(p_probe_i) - + probes = np.asarray(probes, dtype=np.float32) time = np.asarray(time, dtype=np.float32) p = np.asarray(p, dtype=np.float32) - + return probes, time, p + def read_pressure_data(file_names): """ This functions takes names of different OpenFOAM pressure measurements and connect - them into one file removing overlaps if any. All the probes must be in the same - location, otherwise an error might show up. + them into one file removing overlaps if any. All the probes must be in the same + location, otherwise an error might show up. Parameters ---------- - *args - List of file pashes of pressure data to be connected together. + *args + List of file pashes of pressure data to be connected together. Returns ------- time, pressure Returns the pressure time and pressure data of the connected file. """ - no_files = len(file_names) - connected_time = [] # Connected array of time + no_files = len(file_names) + connected_time = [] # Connected array of time connected_p = [] # connected array of pressure. time1 = [] - p1 = [] + p1 = [] time2 = [] - p2 = [] - probes= [] - - for i in range(no_files): - probes, time2, p2 = readPressureProbes(file_names[i]) - - if i==0: + p2 = [] + probes = [] + + for i in range(no_files): + probes, time2, p2 = readPressureProbes(file_names[i]) + + if i == 0: connected_time = time2 - connected_p = p2 + connected_p = p2 else: try: index = np.where(time2 > time1[-1])[0][0] - # index += 1 + # index += 1 except: # sys.exit('Fatal Error!: the pressure filese have time gap') - index = 0 # Joint them even if they have a time gap + index = 0 # Joint them even if they have a time gap connected_time = np.concatenate((connected_time, time2[index:])) connected_p = np.concatenate((connected_p, p2[index:])) @@ -268,11 +273,13 @@ def read_pressure_data(file_names): class PressureData: """ A class that holds a pressure data and performs the following operations: - - mean and rms pressure coefficients - - peak pressure coefficients + - mean and rms pressure coefficients + - peak pressure coefficients """ - def __init__(self, path, u_ref=0.0, rho=1.25, p_ref=0.0, - start_time=None, end_time=None): + + def __init__( + self, path, u_ref=0.0, rho=1.25, p_ref=0.0, start_time=None, end_time=None + ): self.path = path self.u_ref = u_ref self.p_ref = p_ref @@ -283,164 +290,178 @@ def __init__(self, path, u_ref=0.0, rho=1.25, p_ref=0.0, self.set_time() self.Nt = len(self.time) self.T = self.time[-1] - self.z = self.probes[:,2] - self.y = self.probes[:,1] - self.x = self.probes[:,0] - self.dt = np.mean(np.diff(self.time)) + self.z = self.probes[:, 2] + self.y = self.probes[:, 1] + self.x = self.probes[:, 0] + self.dt = np.mean(np.diff(self.time)) self.probe_count = np.shape(self.probes)[0] - def read_cfd_data (self): + def read_cfd_data(self): if os.path.isdir(self.path): - print("Reading from path : %s" % (self.path)) + print('Reading from path : %s' % (self.path)) time_names = os.listdir(self.path) sorted_index = np.argsort(np.float_(time_names)).tolist() # print(sorted_index) # print("\tTime directories: %s" %(time_names)) file_names = [] - + for i in range(len(sorted_index)): - file_name = os.path.join(self.path, time_names[sorted_index[i]],'p') + file_name = os.path.join(self.path, time_names[sorted_index[i]], 'p') file_names.append(file_name) - + # print(file_names) self.probes, self.time, self.p = read_pressure_data(file_names) - self.p = self.rho*np.transpose(self.p) # OpenFOAM gives p/rho - p_dyn = 0.5*self.rho*self.u_ref**2.0 + self.p = self.rho * np.transpose(self.p) # OpenFOAM gives p/rho + p_dyn = 0.5 * self.rho * self.u_ref**2.0 if self.u_ref != 0.0: - self.cp = self.p/p_dyn - + self.cp = self.p / p_dyn # self.p = np.transpose(self.p) # OpenFOAM gives p/rho else: - print("Cannot find the file path: %s" % (self.path)) - - - def set_time (self): - if(self.start_time != None): + print('Cannot find the file path: %s' % (self.path)) + + def set_time(self): + if self.start_time != None: start_index = int(np.argmax(self.time > self.start_time)) self.time = self.time[start_index:] # self.cp = self.cp[:,start_index:] try: - self.p = self.p[:,start_index:] - self.cp = self.cp[:,start_index:] - except: + self.p = self.p[:, start_index:] + self.cp = self.cp[:, start_index:] + except: pass - - if(self.end_time != None): + if self.end_time != None: end_index = int(np.argmax(self.time > self.end_time)) self.time = self.time[:end_index] # self.cp = self.cp[:,:end_index] try: - self.p = self.p[:,:end_index] - self.cp = self.cp[:,:end_index] + self.p = self.p[:, :end_index] + self.cp = self.cp[:, :end_index] except: pass -if __name__ == '__main__': +if __name__ == '__main__': """" Entry point to read the simulation results from OpenFOAM case and post-process it. """ - #CLI parser - parser = argparse.ArgumentParser(description="Get EVENT file from OpenFOAM output") - parser.add_argument('-c', '--case', help="OpenFOAM case directory", required=True) + # CLI parser + parser = argparse.ArgumentParser( + description='Get EVENT file from OpenFOAM output' + ) + parser.add_argument( + '-c', '--case', help='OpenFOAM case directory', required=True + ) arguments, unknowns = parser.parse_known_args() - - case_path = arguments.case + case_path = arguments.case # case_path = "C:\\Users\\fanta\\Documents\\WE-UQ\\LocalWorkDir\\IsolatedBuildingCFD" - - print("Case full path: ", case_path) + print('Case full path: ', case_path) - #Read JSON data - json_path = os.path.join(case_path, "constant", "simCenter", "input", "IsolatedBuildingCFD.json") + # Read JSON data + json_path = os.path.join( + case_path, 'constant', 'simCenter', 'input', 'IsolatedBuildingCFD.json' + ) with open(json_path) as json_file: - json_data = json.load(json_file) - + json_data = json.load(json_file) + # Returns JSON object as a dictionary - rm_data = json_data["resultMonitoring"] - wc_data = json_data["windCharacteristics"] - duration = json_data["numericalSetup"]["duration"] - - load_output_path = os.path.join(case_path, "constant", "simCenter", "output", "windLoads") + rm_data = json_data['resultMonitoring'] + wc_data = json_data['windCharacteristics'] + duration = json_data['numericalSetup']['duration'] + + load_output_path = os.path.join( + case_path, 'constant', 'simCenter', 'output', 'windLoads' + ) - #Check if it exists and remove files + # Check if it exists and remove files if os.path.exists(load_output_path): shutil.rmtree(load_output_path) - - #Create new path - Path(load_output_path).mkdir(parents=True, exist_ok=True) + # Create new path + Path(load_output_path).mkdir(parents=True, exist_ok=True) - #Read and write the story forces - force_file_name = os.path.join(case_path, "postProcessing", "storyForces", "0", "forces_bins.dat") + # Read and write the story forces + force_file_name = os.path.join( + case_path, 'postProcessing', 'storyForces', '0', 'forces_bins.dat' + ) origin, time, forces, moments = read_bin_forces(force_file_name) start_time = 1000 time = time[start_time:] - forces = forces[start_time:,:,:] - moments = moments[start_time:,:,:] - - + forces = forces[start_time:, :, :] + moments = moments[start_time:, :, :] + print(force_file_name) - + num_times = len(time) - num_stories = rm_data["numStories"] - + num_stories = rm_data['numStories'] - storyLoads = np.zeros((num_times, num_stories*3 + 1)) - - - storyLoads[:, 0] = time + storyLoads = np.zeros((num_times, num_stories * 3 + 1)) + storyLoads[:, 0] = time for i in range(num_stories): - storyLoads[:, 3*i + 1] = forces[:,i,0] - storyLoads[:, 3*i + 2] = forces[:,i,1] - storyLoads[:, 3*i + 3] = moments[:,i,2] - - np.savetxt(os.path.join(load_output_path, "storyLoad.txt"), storyLoads, delimiter='\t') - - - #Write base loads - if rm_data["monitorBaseLoad"]: - force_file_name = os.path.join(case_path, "postProcessing", "baseForces", "0", "forces.dat") + storyLoads[:, 3 * i + 1] = forces[:, i, 0] + storyLoads[:, 3 * i + 2] = forces[:, i, 1] + storyLoads[:, 3 * i + 3] = moments[:, i, 2] + + np.savetxt( + os.path.join(load_output_path, 'storyLoad.txt'), storyLoads, delimiter='\t' + ) + + # Write base loads + if rm_data['monitorBaseLoad']: + force_file_name = os.path.join( + case_path, 'postProcessing', 'baseForces', '0', 'forces.dat' + ) origin, time, forces, moments = read_forces(force_file_name) time = time[start_time:] - forces = forces[start_time:,:] - moments = moments[start_time:,:] - + forces = forces[start_time:, :] + moments = moments[start_time:, :] + print(force_file_name) - + num_times = len(time) - baseLoads = np.zeros((num_times, 3*2 + 1)) + baseLoads = np.zeros((num_times, 3 * 2 + 1)) baseLoads[:, 0] = time baseLoads[:, 1:4] = forces baseLoads[:, 4:7] = moments - np.savetxt(os.path.join(load_output_path, "baseLoad.txt"), baseLoads, delimiter='\t') - - - #Write base loads - if rm_data["monitorSurfacePressure"]: - p_file_name = "" - if rm_data["importPressureSamplingPoints"]: - p_file_name = os.path.join(case_path, "postProcessing", "importedPressureSamplingPoints") + np.savetxt( + os.path.join(load_output_path, 'baseLoad.txt'), baseLoads, delimiter='\t' + ) + + # Write base loads + if rm_data['monitorSurfacePressure']: + p_file_name = '' + if rm_data['importPressureSamplingPoints']: + p_file_name = os.path.join( + case_path, 'postProcessing', 'importedPressureSamplingPoints' + ) else: - p_file_name = os.path.join(case_path, "postProcessing", "generatedPressureSamplingPoints") - + p_file_name = os.path.join( + case_path, 'postProcessing', 'generatedPressureSamplingPoints' + ) - wind_speed = wc_data["referenceWindSpeed"] + wind_speed = wc_data['referenceWindSpeed'] print(p_file_name) - cfd_p = PressureData(p_file_name, u_ref=wind_speed, rho=1.25, p_ref=0.0, start_time=duration*0.1, end_time=None) + cfd_p = PressureData( + p_file_name, + u_ref=wind_speed, + rho=1.25, + p_ref=0.0, + start_time=duration * 0.1, + end_time=None, + ) num_times = len(cfd_p.time) @@ -449,4 +470,8 @@ def set_time (self): pressureData[:, 0] = cfd_p.time pressureData[:, 1:] = np.transpose(cfd_p.cp) - np.savetxt(os.path.join(load_output_path, "pressureData.txt"), pressureData, delimiter='\t') + np.savetxt( + os.path.join(load_output_path, 'pressureData.txt'), + pressureData, + delimiter='\t', + ) diff --git a/modules/createEVENT/M9/M9App2.py b/modules/createEVENT/M9/M9App2.py index 1005eb6ad..c7e00cbeb 100644 --- a/modules/createEVENT/M9/M9App2.py +++ b/modules/createEVENT/M9/M9App2.py @@ -1,5 +1,5 @@ -#%% -import os +# %% +import os import json from datetime import datetime import time @@ -9,55 +9,78 @@ # change the directory to the current directory os.chdir(os.path.dirname(os.path.realpath(__file__))) + def Submit_tapis_job(username, password): t = Tapis( - base_url="https://designsafe.tapis.io", - username=username, - password=password) + base_url='https://designsafe.tapis.io', username=username, password=password + ) t.get_tokens() t.authenticator.list_profiles() - with open("TapisFiles/information.json", "r") as file: - information = json.load(file) + with open('TapisFiles/information.json', 'r') as file: + information = json.load(file) file.close() # %% - savingDirectory = information["directory"] + savingDirectory = information['directory'] if not os.path.exists(savingDirectory): os.makedirs(savingDirectory) - - #print("Uploading files to designsafe storage") - t.files.mkdir(systemId="designsafe.storage.default", path=f"{t.username}/physics_based") - t.files.mkdir(systemId="designsafe.storage.default", path=f"{t.username}/physics_based/M9") - with open("TapisFiles/M9.py", 'rb') as file: + # print("Uploading files to designsafe storage") + t.files.mkdir( + systemId='designsafe.storage.default', path=f'{t.username}/physics_based' + ) + t.files.mkdir( + systemId='designsafe.storage.default', path=f'{t.username}/physics_based/M9' + ) + with open('TapisFiles/M9.py', 'rb') as file: contents = file.read() - result = t.files.insert(systemId="designsafe.storage.default", path=f"{t.username}/physics_based/M9/M9.py", file=contents) - with open("TapisFiles/information.json", 'rb') as file: - contents = file.read() - result = t.files.insert(systemId="designsafe.storage.default", path=f"{t.username}/physics_based/M9/information.json", file=contents) - with open("TapisFiles/selectedSites.csv", 'rb') as file: + result = t.files.insert( + systemId='designsafe.storage.default', + path=f'{t.username}/physics_based/M9/M9.py', + file=contents, + ) + with open('TapisFiles/information.json', 'rb') as file: contents = file.read() - result = t.files.insert(systemId="designsafe.storage.default", path=f"{t.username}/physics_based/M9/selectedSites.csv", file=contents) + result = t.files.insert( + systemId='designsafe.storage.default', + path=f'{t.username}/physics_based/M9/information.json', + file=contents, + ) + with open('TapisFiles/selectedSites.csv', 'rb') as file: + contents = file.read() + result = t.files.insert( + systemId='designsafe.storage.default', + path=f'{t.username}/physics_based/M9/selectedSites.csv', + file=contents, + ) # %% # ------------------------------------------------------------------------------- # Define Inputs - input_Directory = f"tapis://designsafe.storage.default/{t.username}/physics_based/M9" - fileInputs = [{"name":"Input Directory","sourceUrl":input_Directory}] + input_Directory = ( + f'tapis://designsafe.storage.default/{t.username}/physics_based/M9' + ) + fileInputs = [{'name': 'Input Directory', 'sourceUrl': input_Directory}] # ------------------------------------------------------------------------------- # Define parameterSet - input_filename = "M9.py" - input_uri = f"tapis://designsafe.storage.default/{t.username}/physics_based/M9/" + input_filename = 'M9.py' + input_uri = f'tapis://designsafe.storage.default/{t.username}/physics_based/M9/' # parameterSet = {"envVariables": [{"key": "inputScript", "value": input_filename}, # {"key": "dataDirectory", "value": input_uri}]} - parameterSet = {"envVariables": [{"key": "inputScript", "value": input_filename}, - {"key": "_UserProjects", "value": "4127798437512801810-242ac118-0001-012,PRJ-4603"}]} - + parameterSet = { + 'envVariables': [ + {'key': 'inputScript', 'value': input_filename}, + { + 'key': '_UserProjects', + 'value': '4127798437512801810-242ac118-0001-012,PRJ-4603', + }, + ] + } jobdict = { 'name': '', @@ -69,60 +92,64 @@ def Submit_tapis_job(username, password): 'maxMinutes': 30, 'archiveOnAppError': True, 'archiveSystemId': 'designsafe.storage.default', - 'archiveSystemDir': f"{t.username}/physics_based/M9/", + 'archiveSystemDir': f'{t.username}/physics_based/M9/', 'fileInputs': fileInputs, 'parameterSet': parameterSet, - 'tags': ['portalName: DESIGNSAFE'] + 'tags': ['portalName: DESIGNSAFE'], } - # Generate a timestamp to append to the job name an - timestamp = datetime.now().strftime("%Y%m%d%H%M%S") - jobname = f"PhysicsBasedMotion_M9_{t.username}_{timestamp}" + timestamp = datetime.now().strftime('%Y%m%d%H%M%S') + jobname = f'PhysicsBasedMotion_M9_{t.username}_{timestamp}' - print("Submitting job") - # submit the job + print('Submitting job') + # submit the job jobdict['name'] = jobname # %% res = t.jobs.submitJob(**jobdict) - mjobUuid=res.uuid + mjobUuid = res.uuid tlapse = 1 - status =t.jobs.getJobStatus(jobUuid=mjobUuid).status + status = t.jobs.getJobStatus(jobUuid=mjobUuid).status previous = status while True: - if status in ["FINISHED","FAILED","STOPPED","STAGING_INPUTS"]: + if status in ['FINISHED', 'FAILED', 'STOPPED', 'STAGING_INPUTS']: break status = t.jobs.getJobStatus(jobUuid=mjobUuid).status if status == previous: continue - else : + else: previous = status - print(f"\tStatus: {status}") - time.sleep(tlapse) + print(f'\tStatus: {status}') + time.sleep(tlapse) # %% - print("Downloading extracted motions") + print('Downloading extracted motions') archivePath = t.jobs.getJob(jobUuid=mjobUuid).archiveSystemDir - archivePath = f"{archivePath}/M9/Events" - files = t.files.listFiles(systemId="designsafe.storage.default", path=archivePath) + archivePath = f'{archivePath}/M9/Events' + files = t.files.listFiles( + systemId='designsafe.storage.default', path=archivePath + ) - # %% + # %% if len(files) <= 1: - print("No files in the archive") - else : + print('No files in the archive') + else: for file in files: filename = file.name - if filename == ".": + if filename == '.': continue - path = f"{archivePath}/{filename}" - res = t.files.getContents(systemId="designsafe.storage.default", path=path) + path = f'{archivePath}/{filename}' + res = t.files.getContents( + systemId='designsafe.storage.default', path=path + ) - with open(f"{savingDirectory}/{filename}", "w") as f: - f.write(res.decode("utf-8")) + with open(f'{savingDirectory}/{filename}', 'w') as f: + f.write(res.decode('utf-8')) f.close() - print("Files downloaded") - print("Please check the directory for the extracted motions") + print('Files downloaded') + print('Please check the directory for the extracted motions') + - # %% +# %% diff --git a/modules/createEVENT/M9/M9Run.py b/modules/createEVENT/M9/M9Run.py index 2cabc67ee..ecb728d96 100644 --- a/modules/createEVENT/M9/M9Run.py +++ b/modules/createEVENT/M9/M9Run.py @@ -38,8 +38,8 @@ '-o', '--output', help='number of realizations', required=False ) parser.add_argument('-a', '--API', help='API FLAG', required=False) - parser.add_argument("--username", help="username", required=False) - parser.add_argument("--password", help="password", required=False) + parser.add_argument('--username', help='username', required=False) + parser.add_argument('--password', help='password', required=False) args = parser.parse_args() if args.lat: @@ -55,7 +55,6 @@ if args.API in ['true', 1, 'True', 'TRUE']: information['APIFLAG'] = True - # # go get the motions # diff --git a/modules/createEVENT/M9/TapisFiles/M9.py b/modules/createEVENT/M9/TapisFiles/M9.py index a13ffca4a..1b1ada183 100644 --- a/modules/createEVENT/M9/TapisFiles/M9.py +++ b/modules/createEVENT/M9/TapisFiles/M9.py @@ -4,42 +4,45 @@ import pandas as pd import json import xarray as xr + + # 'netcdf4', 'h5netcdf', 'scipy' -#%% +# %% def M9(information): - """ - the default is to select sites from all M9 sites, but + """ + the default is to select sites from all M9 sites, but grid type (options: A, B, C, D, E, Y, and Z, can be empty) (ref: https://sites.uw.edu/pnet/m9-simulations/about-m9-simulations/extent-of-model/) """ LocationFlag = information['LocationFlag'] numSiteGM = information['number_of_realizations'] - grid_type = information['grid_type'] ;# grid type (options: A, B, C, D, E, Y, and Z, can be "all") - - - randomFLag = True ;# if True, the realizations are selected randomly, otherwise, the first numSiteGM sites are selected - maxnumSiteGM = 30 - numSiteGM = min(numSiteGM, maxnumSiteGM) ;# number of realizations + grid_type = information[ + 'grid_type' + ] # grid type (options: A, B, C, D, E, Y, and Z, can be "all") + + randomFLag = True # if True, the realizations are selected randomly, otherwise, the first numSiteGM sites are selected + maxnumSiteGM = 30 + numSiteGM = min(numSiteGM, maxnumSiteGM) # number of realizations my_Directory = os.getcwd() - print(f"Current Directory: {my_Directory}") + print(f'Current Directory: {my_Directory}') files = [f for f in os.listdir() if os.path.isfile(f)] print(files) # changing realizations order # indicies = list(range(maxnumSiteGM)); - Realizations = ["032"] + Realizations = ['032'] indicies = [0] if randomFLag: np.random.shuffle(indicies) indicies = indicies[:numSiteGM] username = os.getlogin() - print(f"Username: {username}") - - M9Path = f"/home/{username}/work/projects/PRJ-4603" + print(f'Username: {username}') + + M9Path = f'/home/{username}/work/projects/PRJ-4603' # M9Path = "/home/parduino/work/projects/PRJ-4603" # M9Path = "/home/jovyan/work/projects/PRJ-4603" @@ -47,24 +50,25 @@ def M9(information): # M9Path = f{"tapis://project-"+_UserProjects" # M9Path = "/data/projects/PRJ-4603" - print("M9PATH defined") + print('M9PATH defined') - directory = "./Events" ;# directory to save the data + directory = './Events' # directory to save the data # create the directory if it does not exist if not os.path.exists(directory): os.makedirs(directory) - print("Checking if the directory is created") + print('Checking if the directory is created') all_entries = os.listdir() print(all_entries) - gdf = pd.read_csv('selectedSites.csv', index_col=0) - APIFLAG = information['APIFLAG'] # if the APIFLAG is True, we use M9 API to get the motion data - - if not(APIFLAG): + APIFLAG = information[ + 'APIFLAG' + ] # if the APIFLAG is True, we use M9 API to get the motion data + + if not (APIFLAG): for i in indicies: - for _,site in gdf.iterrows(): + for _, site in gdf.iterrows(): # find the first Letter of the site name site_name = site['Station Name'] lat = site['Latitude'] @@ -78,81 +82,59 @@ def M9(information): dt = data.coords['time'].values dt = dt[1] - dt[0] sitedata = { - "dT" : dt, - "accel_x" : subset['acc_x'].values.tolist(), - "accel_y" : subset['acc_y'].values.tolist(), - "accel_z" : subset['acc_z'].values.tolist(), + 'dT': dt, + 'accel_x': subset['acc_x'].values.tolist(), + 'accel_y': subset['acc_y'].values.tolist(), + 'accel_z': subset['acc_z'].values.tolist(), } write_motion(site_name, directory, i, sitedata, APIFLAG) - gdf["filename"] = f"{site_name}_{i}" + gdf['filename'] = f'{site_name}_{i}' if LocationFlag: - break; + break if LocationFlag: gdf = gdf.loc[[0]] # save the gdf to a csv file in the directory just "Station Name", "Latitude", "Longitude" - gdf[['filename', 'Latitude', 'Longitude']].to_csv(f'{directory}/sites.csv', index=False) - - print("Script is done") + gdf[['filename', 'Latitude', 'Longitude']].to_csv( + f'{directory}/sites.csv', index=False + ) + print('Script is done') def write_motion(site_name, directory, i, motiondict, APIFLAG): - filename = f"{directory}/{site_name}_{i}.json" + filename = f'{directory}/{site_name}_{i}.json' - print(f"Writing {filename}") + print(f'Writing {filename}') if APIFLAG: accel_x = 'AccelerationHistory-EW' accel_y = 'AccelerationHistory-NS' accel_z = 'AccelerationHistory-Vert' - dt = 'TimeStep' + dt = 'TimeStep' datatowrite = { - "Data": "Time history generated using M9 simulations", - "dT" : motiondict[dt], - "name": f"{site_name}_{i}", - "numSteps": len(motiondict[accel_x]), - "accel_x": motiondict[accel_x], - "accel_y": motiondict[accel_y], - "accel_z": motiondict[accel_z], + 'Data': 'Time history generated using M9 simulations', + 'dT': motiondict[dt], + 'name': f'{site_name}_{i}', + 'numSteps': len(motiondict[accel_x]), + 'accel_x': motiondict[accel_x], + 'accel_y': motiondict[accel_y], + 'accel_z': motiondict[accel_z], } else: datatowrite = motiondict - datatowrite['Data'] = "Time history generated using M9 simulations" - datatowrite['name'] = f"{site_name}_{i}" - + datatowrite['Data'] = 'Time history generated using M9 simulations' + datatowrite['name'] = f'{site_name}_{i}' with open(filename, 'w') as f: json.dump(datatowrite, f, indent=2) - - - -if __name__ == "__main__": +if __name__ == '__main__': # change the directory to the directory of the current file os.chdir(os.path.dirname(os.path.abspath(__file__))) - with open("information.json", "r") as file: + with open('information.json', 'r') as file: information = json.load(file) M9(information) - - - - - - - - - - - - - - - - - - - diff --git a/modules/createSAM/multiFidelityBuildingModel/MultiFidelityBuildingModel.py b/modules/createSAM/multiFidelityBuildingModel/MultiFidelityBuildingModel.py index c7a0bf738..40a596841 100644 --- a/modules/createSAM/multiFidelityBuildingModel/MultiFidelityBuildingModel.py +++ b/modules/createSAM/multiFidelityBuildingModel/MultiFidelityBuildingModel.py @@ -56,7 +56,8 @@ def create_SAM( # noqa: N802, D103, C901 current_building_id = AIM['GeneralInformation']['AIM_id'] database_path = AIM['Modeling']['buildingDatabase'] with open( # noqa: PTH123, UP015 - os.path.join('..', '..', '..', '..', 'input_data', database_path), 'r' # noqa: PTH118 + os.path.join('..', '..', '..', '..', 'input_data', database_path), + 'r', # noqa: PTH118 ) as f: json_string = f.read() database = json.loads(json_string) diff --git a/modules/performFEM/surrogateGP/gpPredict.py b/modules/performFEM/surrogateGP/gpPredict.py index 839eefb96..522ba6538 100644 --- a/modules/performFEM/surrogateGP/gpPredict.py +++ b/modules/performFEM/surrogateGP/gpPredict.py @@ -48,7 +48,6 @@ def main(params_dir, surrogate_dir, json_dir, result_file, input_json): # noqa: C901, D103, PLR0912, PLR0915 - global error_file # noqa: PLW0602 os_type = sys.platform.lower() diff --git a/modules/performSIMULATION/surrogateSimulation/SurrogateSimulation.py b/modules/performSIMULATION/surrogateSimulation/SurrogateSimulation.py index 20e296275..8d542e7db 100644 --- a/modules/performSIMULATION/surrogateSimulation/SurrogateSimulation.py +++ b/modules/performSIMULATION/surrogateSimulation/SurrogateSimulation.py @@ -111,7 +111,7 @@ def run_surrogateGP(AIM_input_path, EDP_input_path): # noqa: ARG001, N802, N803 surrogate_meta_name, surrogate_name, ] - #subprocess.run(command, check=True) # noqa: S603 + # subprocess.run(command, check=True) # noqa: S603 try: result = subprocess.check_output( # noqa: S603 @@ -128,7 +128,6 @@ def run_surrogateGP(AIM_input_path, EDP_input_path): # noqa: ARG001, N802, N803 file=sys.stderr, ) # noqa: T201 - # os.system( # noqa: RUF100, S605 # f'{pythonEXE} {surrogatePredictionPath} {params_name} {surrogate_meta_name} {surrogate_name}' # ) diff --git a/modules/performUQ/UCSD_UQ/mainscript.py b/modules/performUQ/UCSD_UQ/mainscript.py index edaceef17..1a93bf504 100644 --- a/modules/performUQ/UCSD_UQ/mainscript.py +++ b/modules/performUQ/UCSD_UQ/mainscript.py @@ -54,7 +54,20 @@ def main(input_args): # noqa: D103 f'{run_type} "{driver_file_name}" "{input_file_full_path}"' ) command_list = shlex.split(command) - main_function(command_list) + + # Check if 'UCSD_UQ.err' exists, and create it if not + err_file = path_to_working_directory / 'UCSD_UQ.err' + + if not err_file.exists(): + err_file.touch() # Create the file + + # Try running the main_function and catch any exceptions + try: + main_function(command_list) + except Exception as e: # noqa: BLE001 + # Write the exception message to the .err file + with err_file.open('a') as f: + f.write(f'ERROR: {e!s}\n') # ====================================================================================================================== diff --git a/modules/performUQ/common/parallel_runner_mpi4py.py b/modules/performUQ/common/parallel_runner_mpi4py.py index bf8de20bc..9aaca3a28 100644 --- a/modules/performUQ/common/parallel_runner_mpi4py.py +++ b/modules/performUQ/common/parallel_runner_mpi4py.py @@ -1,4 +1,4 @@ -from mpi4py import MPI # noqa: INP001, D100 +from mpi4py import MPI # noqa: D100 from mpi4py.futures import MPIPoolExecutor diff --git a/modules/performUQ/common/quoFEM_RV_models.py b/modules/performUQ/common/quoFEM_RV_models.py index 689c4ca8c..2caece96b 100644 --- a/modules/performUQ/common/quoFEM_RV_models.py +++ b/modules/performUQ/common/quoFEM_RV_models.py @@ -1,4 +1,4 @@ -import typing # noqa: INP001, D100 +import typing # noqa: D100, N999 from typing import Any import numpy as np diff --git a/modules/performUQ/common/uq_utilities.py b/modules/performUQ/common/uq_utilities.py index aef7cafe2..1d21c0b49 100644 --- a/modules/performUQ/common/uq_utilities.py +++ b/modules/performUQ/common/uq_utilities.py @@ -1,4 +1,4 @@ -import glob # noqa: INP001, D100 +import glob # noqa: D100 import os import shutil import subprocess diff --git a/modules/tools/DRM/MeshGenerator/Infowriter.py b/modules/tools/DRM/MeshGenerator/Infowriter.py index 43c226d25..759463578 100644 --- a/modules/tools/DRM/MeshGenerator/Infowriter.py +++ b/modules/tools/DRM/MeshGenerator/Infowriter.py @@ -1,6 +1,4 @@ - - -def infowriter(info,meshdir): +def infowriter(info, meshdir): """ Write the information dictionary to a tcl file. @@ -10,7 +8,7 @@ def infowriter(info,meshdir): Information dictionary. meshdir : str Directory where the tcl file will be written - + Returns ------- None @@ -19,21 +17,28 @@ def infowriter(info,meshdir): # ============================================================================ # Create a tcl file # ============================================================================ - f = open(f"{meshdir}/Modelinfo.tcl", "w") - f.write(f"wipe\n") - f.write(f"# ============================================================================\n") - f.write(f"# Cores Information\n") - f.write(f"# ============================================================================\n") + f = open(f'{meshdir}/Modelinfo.tcl', 'w') + f.write(f'wipe\n') + f.write( + f'# ============================================================================\n' + ) + f.write(f'# Cores Information\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set regcores {info['regcores']}\n") f.write(f"set pmlcores {info['pmlcores']}\n") f.write(f"set drmcores {info['drmcores']}\n") f.write(f"set structurecores {info['structurecores']}\n") f.write(f"set AnalysisType {info['AnalysisType']}\n") - - f.write(f"# ============================================================================\n") - f.write(f"# Model Information\n") - f.write(f"# ============================================================================\n") + f.write( + f'# ============================================================================\n' + ) + f.write(f'# Model Information\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set StructureType \"{info['StructureType']}\"\n") f.write(f"set NStory {info['NStory']}\n") f.write(f"set NBay {info['NBay']}\n") @@ -46,9 +51,13 @@ def infowriter(info,meshdir): f.write(f"set LGird {info['LGird']}\n") f.write(f"set SectionType {info['SectionType']}\n") f.write(f"set HaveStructure \"{info['HaveStructure']}\"\n") - f.write(f"# ============================================================================\n") - f.write(f"# Soil Information\n") - f.write(f"# ============================================================================\n") + f.write( + f'# ============================================================================\n' + ) + f.write(f'# Soil Information\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set dx {info['dx']}\n") f.write(f"set dy {info['dy']}\n") f.write(f"set dz {info['dz']}\n") @@ -65,9 +74,13 @@ def infowriter(info,meshdir): f.write(f"set nx {info['nx']}\n") f.write(f"set ny {info['ny']}\n") f.write(f"set nz {info['nz']}\n") - f.write(f"# ============================================================================\n") - f.write(f"# PML information\n") - f.write(f"# ============================================================================\n") + f.write( + f'# ============================================================================\n' + ) + f.write(f'# PML information\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set AbsorbingElements \"{info['AbsorbingElements']}\"\n") f.write(f"set numpmllayers {info['numpmllayers']}\n") f.write(f"set pmlthicknessx {info['pmlthicknessx']}\n") @@ -77,45 +90,81 @@ def infowriter(info,meshdir): f.write(f"set HaveAbsorbingElements \"{info['HaveAbsorbingElements']}\"\n") f.write(f"set Absorbing_rayleigh_alpha {info['Absorbing_rayleigh_alpha']}\n") f.write(f"set Absorbing_rayleigh_beta {info['Absorbing_rayleigh_beta']}\n") - f.write(f"# ============================================================================\n") - f.write(f"# General information\n") - f.write(f"# ============================================================================\n") + f.write( + f'# ============================================================================\n' + ) + f.write(f'# General information\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set meshdir \"{info['meshdir']}\"\n") f.write(f"set outputdir \"{info['outputdir']}\"\n") - f.write(f"# ============================================================================\n") - f.write(f"# Embedding foundation\n") - f.write(f"# ============================================================================\n") + f.write( + f'# ============================================================================\n' + ) + f.write(f'# Embedding foundation\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set EmbeddingFoundation \"{info['EmbeddingFoundation']}\"\n") info2 = info['EmbeddedFoundation'] - f.write(f"set EmbeddedFoundation [dict create xmax {info2['xmax']} xmin {info2['xmin']} ymax {info2['ymax']} ymin {info2['ymin']} zmax {info2['zmax']} zmin {info2['zmin']}]\n") - f.write(f"# ============================================================================\n") - f.write(f"# Fondation information\n") - f.write(f"# ============================================================================\n") + f.write( + f"set EmbeddedFoundation [dict create xmax {info2['xmax']} xmin {info2['xmin']} ymax {info2['ymax']} ymin {info2['ymin']} zmax {info2['zmax']} zmin {info2['zmin']}]\n" + ) + f.write( + f'# ============================================================================\n' + ) + f.write(f'# Fondation information\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set HaveFoundation \"{info['HaveFoundation']}\"\n") - f.write("set foundationBlocks {}\n") - for i,block in enumerate(info['foundationBlocks']): - f.write(f"lappend foundationBlocks [dict create matTag {block['matTag']} xmax {block['xmax']} xmin {block['xmin']} ymax {block['ymax']} ymin {block['ymin']} zmax {block['zmax']} zmin {block['zmin']} Xmeshsize {block['Xmeshsize']} Ymeshsize {block['Ymeshsize']} Zmeshsize {block['Zmeshsize']}]\n") - f.write(f"# ============================================================================\n") - f.write(f"# Piles information\n") - f.write(f"# ============================================================================\n") + f.write('set foundationBlocks {}\n') + for i, block in enumerate(info['foundationBlocks']): + f.write( + f"lappend foundationBlocks [dict create matTag {block['matTag']} xmax {block['xmax']} xmin {block['xmin']} ymax {block['ymax']} ymin {block['ymin']} zmax {block['zmax']} zmin {block['zmin']} Xmeshsize {block['Xmeshsize']} Ymeshsize {block['Ymeshsize']} Zmeshsize {block['Zmeshsize']}]\n" + ) + f.write( + f'# ============================================================================\n' + ) + f.write(f'# Piles information\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set HavePiles \"{info['HavePiles']}\"\n") - f.write("set pilelist {}\n") - for i,pile in enumerate(info['pilelist']): - f.write(f"lappend pilelist [dict create xtop {pile['xtop']} ytop {pile['ytop']} ztop {pile['ztop']} xbottom {pile['xbottom']} ybottom {pile['ybottom']} zbottom {pile['zbottom']} numberofElements {pile['numberofElements']}]\n") - f.write(f"# ============================================================================\n") - f.write(f"# cells and nodes information\n") - f.write(f"# ============================================================================\n") + f.write('set pilelist {}\n') + for i, pile in enumerate(info['pilelist']): + f.write( + f"lappend pilelist [dict create xtop {pile['xtop']} ytop {pile['ytop']} ztop {pile['ztop']} xbottom {pile['xbottom']} ybottom {pile['ybottom']} zbottom {pile['zbottom']} numberofElements {pile['numberofElements']}]\n" + ) + f.write( + f'# ============================================================================\n' + ) + f.write(f'# cells and nodes information\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set soilfoundation_num_cells {info['soilfoundation_num_cells']}\n") f.write(f"set soilfoundation_num_nodes {info['soilfoundation_num_points']}\n") - f.write(f"# ============================================================================\n") - f.write(f"# DRM information\n") - f.write(f"# ============================================================================\n") + f.write( + f'# ============================================================================\n' + ) + f.write(f'# DRM information\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set DRMFile \"{info['DRMFile']}\"\n") - f.write(f"set DRM_Provider_Software \"{info['DRMinformation']['DRM_Provider_Software']}\"\n") + f.write( + f"set DRM_Provider_Software \"{info['DRMinformation']['DRM_Provider_Software']}\"\n" + ) f.write(f"set DRM_factor \"{info['DRMinformation']['factor']}\"\n") f.write(f"set crd_scale \"{info['DRMinformation']['crd_scale']}\"\n") - f.write(f"set distance_tolerance \"{info['DRMinformation']['distance_tolerance']}\"\n") - f.write(f"set do_coordinate_transformation \"{info['DRMinformation']['do_coordinate_transformation']}\"\n") + f.write( + f"set distance_tolerance \"{info['DRMinformation']['distance_tolerance']}\"\n" + ) + f.write( + f"set do_coordinate_transformation \"{info['DRMinformation']['do_coordinate_transformation']}\"\n" + ) f.write(f"set T00 \"{info['DRMinformation']['T00']}\"\n") f.write(f"set T01 \"{info['DRMinformation']['T01']}\"\n") f.write(f"set T02 \"{info['DRMinformation']['T02']}\"\n") @@ -128,26 +177,29 @@ def infowriter(info,meshdir): f.write(f"set originX \"{info['DRMinformation']['originX']}\"\n") f.write(f"set originY \"{info['DRMinformation']['originY']}\"\n") f.write(f"set originZ \"{info['DRMinformation']['originZ']}\"\n") - f.write(f"set DRM_Location \"{info['DRMinformation']['DRM_Location'].lower()}\"\n") - f.write(f"# ============================================================================\n") - f.write(f"# Analysis information\n") - f.write(f"# ============================================================================\n") + f.write( + f"set DRM_Location \"{info['DRMinformation']['DRM_Location'].lower()}\"\n" + ) + f.write( + f'# ============================================================================\n' + ) + f.write(f'# Analysis information\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set Analysis_dt {info['AnalysisInfo']['dt']}\n") f.write(f"set Analysis_duration {info['AnalysisInfo']['t_final']}\n") f.write(f"set Analysis_record_dt {info['AnalysisInfo']['recording_dt']}\n") - f.write(f"# ============================================================================\n") - f.write(f"# material information\n") - f.write(f"# ============================================================================\n") + f.write( + f'# ============================================================================\n' + ) + f.write(f'# material information\n') + f.write( + f'# ============================================================================\n' + ) f.write(f"set Vs {info['MaterialInfo']['Vs']}\n") f.write(f"set rho {info['MaterialInfo']['rho']}\n") f.write(f"set nu {info['MaterialInfo']['nu']}\n") f.close() - return None - - - - - - diff --git a/modules/tools/DRM/MeshGenerator/MeshGenrator.py b/modules/tools/DRM/MeshGenerator/MeshGenrator.py index 5bf7e75ca..4261a7c08 100644 --- a/modules/tools/DRM/MeshGenerator/MeshGenrator.py +++ b/modules/tools/DRM/MeshGenerator/MeshGenrator.py @@ -1,271 +1,381 @@ # %% -import pyvista as pv +import pyvista as pv import numpy as np -import ctypes -import os +import ctypes +import os import sys import h5py from scipy.spatial import KDTree -def partition_with_pilebox(messh,numcores, pileboxinfo,tol=1e-6): - if pileboxinfo["Havepilebox"] == False : - print("No pilebox is defined") - partition(messh,numcores) + +def partition_with_pilebox(messh, numcores, pileboxinfo, tol=1e-6): + if pileboxinfo['Havepilebox'] == False: + print('No pilebox is defined') + partition(messh, numcores) return # seperate the core of the pilebox mesh = messh.copy() eps = 1e-6 # check if the xmin,xmax, ymin ymax ,zmin zmax is exist in the infokeys - if "xmin" in pileboxinfo.keys(): - pilebox_xmin = pileboxinfo["xmin"] - if "xmax" in pileboxinfo.keys(): - pilebox_xmax = pileboxinfo["xmax"] - if "ymin" in pileboxinfo.keys(): - pilebox_ymin = pileboxinfo["ymin"] - if "ymax" in pileboxinfo.keys(): - pilebox_ymax = pileboxinfo["ymax"] - if "zmin" in pileboxinfo.keys(): - pilebox_zmin = pileboxinfo["zmin"] - if "zmax" in pileboxinfo.keys(): - pilebox_zmax = pileboxinfo["zmax"] - if "center" in pileboxinfo.keys(): - pilebox_xmin = pileboxinfo["center"][0] - pileboxinfo["lx"]/2 + eps - pilebox_xmax = pileboxinfo["center"][0] + pileboxinfo["lx"]/2 - eps - pilebox_ymin = pileboxinfo["center"][1] - pileboxinfo["ly"]/2 + eps - pilebox_ymax = pileboxinfo["center"][1] + pileboxinfo["ly"]/2 - eps - pilebox_zmin = pileboxinfo["center"][2] - pileboxinfo["depth"] + eps - pilebox_zmax = pileboxinfo["center"][2] + tol + eps - + if 'xmin' in pileboxinfo.keys(): + pilebox_xmin = pileboxinfo['xmin'] + if 'xmax' in pileboxinfo.keys(): + pilebox_xmax = pileboxinfo['xmax'] + if 'ymin' in pileboxinfo.keys(): + pilebox_ymin = pileboxinfo['ymin'] + if 'ymax' in pileboxinfo.keys(): + pilebox_ymax = pileboxinfo['ymax'] + if 'zmin' in pileboxinfo.keys(): + pilebox_zmin = pileboxinfo['zmin'] + if 'zmax' in pileboxinfo.keys(): + pilebox_zmax = pileboxinfo['zmax'] + if 'center' in pileboxinfo.keys(): + pilebox_xmin = pileboxinfo['center'][0] - pileboxinfo['lx'] / 2 + eps + pilebox_xmax = pileboxinfo['center'][0] + pileboxinfo['lx'] / 2 - eps + pilebox_ymin = pileboxinfo['center'][1] - pileboxinfo['ly'] / 2 + eps + pilebox_ymax = pileboxinfo['center'][1] + pileboxinfo['ly'] / 2 - eps + pilebox_zmin = pileboxinfo['center'][2] - pileboxinfo['depth'] + eps + pilebox_zmax = pileboxinfo['center'][2] + tol + eps # find the cells that are inside the pilebox - cube = pv.Cube(bounds=[pilebox_xmin,pilebox_xmax,pilebox_ymin,pilebox_ymax,pilebox_zmin,pilebox_zmax]) + cube = pv.Cube( + bounds=[ + pilebox_xmin, + pilebox_xmax, + pilebox_ymin, + pilebox_ymax, + pilebox_zmin, + pilebox_zmax, + ] + ) # extract the cells that are outside the pilebox indices = mesh.find_cells_within_bounds(cube.bounds) - + # crete newindices for outside the pilebox - newindices = np.ones(mesh.n_cells,dtype=bool) + newindices = np.ones(mesh.n_cells, dtype=bool) newindices[indices] = False newindices = np.where(newindices)[0] - # extract the cells that are inside the pilebox mesh2 = mesh.extract_cells(newindices) # partition the mesh - + if numcores > 2: - partition(mesh2,numcores-1) + partition(mesh2, numcores - 1) if numcores == 2: - mesh2.cell_data['partitioned'] = np.zeros(mesh2.n_cells,dtype=np.int32) - - mesh.cell_data['partitioned'] = np.zeros(mesh.n_cells,dtype=np.int32) + mesh2.cell_data['partitioned'] = np.zeros(mesh2.n_cells, dtype=np.int32) + + mesh.cell_data['partitioned'] = np.zeros(mesh.n_cells, dtype=np.int32) mesh.cell_data['partitioned'][newindices] = mesh2.cell_data['partitioned'] + 1 messh.cell_data['partitioned'] = mesh.cell_data['partitioned'] - - def partition(mesh, numcores): # ============================================================================= # define partioner # ============================================================================= - libpath = os.getcwd().split("OpenSeesProjects")[0] + "OpenSeesProjects/" + "MeshGenerator/lib" + libpath = ( + os.getcwd().split('OpenSeesProjects')[0] + + 'OpenSeesProjects/' + + 'MeshGenerator/lib' + ) if os.name == 'nt': metis_partition_lib = ctypes.CDLL(f'{libpath}/Partitioner.dll') if os.name == 'posix': metis_partition_lib = ctypes.CDLL(f'{libpath}/libPartitioner.so') # Define function argument and return types - metis_partition_lib.Partition.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.POINTER(ctypes.c_int32), ctypes.c_int, ctypes.POINTER(ctypes.c_int32)] - metis_partition_lib.Partition.restype = ctypes.c_int - + metis_partition_lib.Partition.argtypes = [ + ctypes.c_int, + ctypes.c_int, + ctypes.POINTER(ctypes.c_int32), + ctypes.c_int, + ctypes.POINTER(ctypes.c_int32), + ] + metis_partition_lib.Partition.restype = ctypes.c_int + numcells = mesh.n_cells numpoints = mesh.n_points numweights = 1 cells = np.array(mesh.cells.reshape(-1, 9), dtype=np.int32) - cells = cells[:,1:] + cells = cells[:, 1:] cellspointer = cells.flatten().ctypes.data_as(ctypes.POINTER(ctypes.c_int32)) partitioned = np.empty(numcells, dtype=np.int32) partitionedpointer = partitioned.ctypes.data_as(ctypes.POINTER(ctypes.c_int32)) - metis_partition_lib.Partition(numcells, numpoints, cellspointer, numcores, partitionedpointer) + metis_partition_lib.Partition( + numcells, numpoints, cellspointer, numcores, partitionedpointer + ) mesh.cell_data['partitioned'] = partitioned - def kdtree_partition(grid, n_partitions=4): blocks = grid.partition(n_partitions, generate_global_id=True, as_composite=True) - i = 0 - grid["partitioned"] = np.zeros(grid.n_cells,dtype=np.int32) + i = 0 + grid['partitioned'] = np.zeros(grid.n_cells, dtype=np.int32) for block in blocks: - grid["partitioned"][block["vtkGlobalCellIds"]] = i + grid['partitioned'][block['vtkGlobalCellIds']] = i i += 1 - - - - -def DRM_PML_Foundation_Meshgenrator(meshinfo) : - xwidth, ywidth, zwidth = (meshinfo["xwidth"], meshinfo["ywidth"], meshinfo["zwidth"]) - eps = 1e-6 - Xmeshsize, Ymeshsize, Zmeshsize = (meshinfo["Xmeshsize"], meshinfo["Ymeshsize"], meshinfo["Zmeshsize"]) - PMLThickness = np.array([meshinfo["PMLThickness"][0], meshinfo["PMLThickness"][1], meshinfo["PMLThickness"][2]]) ; # thickness of the each PML layer - numPMLLayers = meshinfo["numPMLLayers"] ; # number of PML layers - PMLTotalThickness = PMLThickness * numPMLLayers ; # total thickness of the PML layers - DRMThickness = np.array([meshinfo["DRMThickness"][0], meshinfo["DRMThickness"][1], meshinfo["DRMThickness"][2]]) ; # thickness of the DRM layers - numDrmLayers = meshinfo["numDrmLayers"] ; # number of DRM layers - DRMTotalThickness = DRMThickness * numDrmLayers ; # total thickness of the DRM layers - padLayers = numPMLLayers + numDrmLayers ; # number of layers to pad the meshgrid - padThickness = PMLTotalThickness + DRMThickness ; # thickness of the padding layers - reg_num_cores = meshinfo["reg_num_cores"] - DRM_num_cores = meshinfo["DRM_num_cores"] - PML_num_cores = meshinfo["PML_num_cores"] - structurecores = meshinfo["Structure_num_cores"] - Dir = meshinfo["Dir"] - OutputDir = meshinfo["OutputDir"] - AbsorbingElements = meshinfo["AbsorbingElements"] - DRMfile = meshinfo["DRMfile"] - foundationBlocks = meshinfo["foundationBlocks"] - pilelist = meshinfo["pilelist"] - EmbeddeFoundation = meshinfo["EmbeddedFoundationDict"] - HaveEmbeddingFoundation = meshinfo["EmbeddingFoundation"] - HaveFoundation = meshinfo["HaveFoundation"] - HavePiles = meshinfo["HavePiles"] - HaveStructure = meshinfo["HaveStructure"] - AttachFoundation = meshinfo["AttachFoundation"] - DRMinformation = meshinfo["DRMinformation"] - PartitionAlgorithm = meshinfo["PartitionAlgorithm"] - +def DRM_PML_Foundation_Meshgenrator(meshinfo): + xwidth, ywidth, zwidth = ( + meshinfo['xwidth'], + meshinfo['ywidth'], + meshinfo['zwidth'], + ) + eps = 1e-6 + Xmeshsize, Ymeshsize, Zmeshsize = ( + meshinfo['Xmeshsize'], + meshinfo['Ymeshsize'], + meshinfo['Zmeshsize'], + ) + PMLThickness = np.array( + [ + meshinfo['PMLThickness'][0], + meshinfo['PMLThickness'][1], + meshinfo['PMLThickness'][2], + ] + ) # thickness of the each PML layer + numPMLLayers = meshinfo['numPMLLayers'] # number of PML layers + PMLTotalThickness = ( + PMLThickness * numPMLLayers + ) # total thickness of the PML layers + DRMThickness = np.array( + [ + meshinfo['DRMThickness'][0], + meshinfo['DRMThickness'][1], + meshinfo['DRMThickness'][2], + ] + ) # thickness of the DRM layers + numDrmLayers = meshinfo['numDrmLayers'] # number of DRM layers + DRMTotalThickness = ( + DRMThickness * numDrmLayers + ) # total thickness of the DRM layers + padLayers = numPMLLayers + numDrmLayers # number of layers to pad the meshgrid + padThickness = ( + PMLTotalThickness + DRMThickness + ) # thickness of the padding layers + reg_num_cores = meshinfo['reg_num_cores'] + DRM_num_cores = meshinfo['DRM_num_cores'] + PML_num_cores = meshinfo['PML_num_cores'] + structurecores = meshinfo['Structure_num_cores'] + Dir = meshinfo['Dir'] + OutputDir = meshinfo['OutputDir'] + AbsorbingElements = meshinfo['AbsorbingElements'] + DRMfile = meshinfo['DRMfile'] + foundationBlocks = meshinfo['foundationBlocks'] + pilelist = meshinfo['pilelist'] + EmbeddeFoundation = meshinfo['EmbeddedFoundationDict'] + HaveEmbeddingFoundation = meshinfo['EmbeddingFoundation'] + HaveFoundation = meshinfo['HaveFoundation'] + HavePiles = meshinfo['HavePiles'] + HaveStructure = meshinfo['HaveStructure'] + AttachFoundation = meshinfo['AttachFoundation'] + DRMinformation = meshinfo['DRMinformation'] + PartitionAlgorithm = meshinfo['PartitionAlgorithm'] # ============================================================================= - if HaveStructure != "YES": - structurecores = 0 + if HaveStructure != 'YES': + structurecores = 0 # create a dictionary for meshing information info = { - "Foundation": 1, - "RegularDomain": 2, - "DRMDomain": 3, - "PMLDomain": 4, + 'Foundation': 1, + 'RegularDomain': 2, + 'DRMDomain': 3, + 'PMLDomain': 4, } - DomainNames = ["Foundation","Soil", "Soil", "PML"] - + DomainNames = ['Foundation', 'Soil', 'Soil', 'PML'] pileboxinfo = {} if not os.path.exists(Dir): os.makedirs(Dir) - else : + else: # remove the files in the directory for file in os.listdir(Dir): try: - os.remove(os.path.join(Dir,file)) + os.remove(os.path.join(Dir, file)) except: continue # adding different plots - meshplotdir = Dir + "/../meshplots" + meshplotdir = Dir + '/../meshplots' if not os.path.exists(meshplotdir): os.makedirs(meshplotdir) - # ============================================================================= # meshing # ============================================================================= - x = np.arange(-xwidth/2., xwidth/2.+eps, Xmeshsize) - y = np.arange(-ywidth/2., ywidth/2.+eps, Ymeshsize) - z = np.arange(-zwidth, 0+eps, Zmeshsize) + x = np.arange(-xwidth / 2.0, xwidth / 2.0 + eps, Xmeshsize) + y = np.arange(-ywidth / 2.0, ywidth / 2.0 + eps, Ymeshsize) + z = np.arange(-zwidth, 0 + eps, Zmeshsize) # padding x and y for PML and DRM layers - x = np.pad(x, (numDrmLayers,numDrmLayers), "linear_ramp", end_values=(x.min()-DRMTotalThickness[0], x.max()+DRMTotalThickness[0])) - y = np.pad(y, (numDrmLayers,numDrmLayers), "linear_ramp", end_values=(y.min()-DRMTotalThickness[1], y.max()+DRMTotalThickness[1])) - z = np.pad(z, (numDrmLayers,0), "linear_ramp", end_values=(z.min()-DRMTotalThickness[2])) + x = np.pad( + x, + (numDrmLayers, numDrmLayers), + 'linear_ramp', + end_values=(x.min() - DRMTotalThickness[0], x.max() + DRMTotalThickness[0]), + ) + y = np.pad( + y, + (numDrmLayers, numDrmLayers), + 'linear_ramp', + end_values=(y.min() - DRMTotalThickness[1], y.max() + DRMTotalThickness[1]), + ) + z = np.pad( + z, + (numDrmLayers, 0), + 'linear_ramp', + end_values=(z.min() - DRMTotalThickness[2]), + ) # padding the x and y for PML and PML layers - x = np.pad(x, (numPMLLayers,numPMLLayers), "linear_ramp", end_values=(x.min()-PMLTotalThickness[0], x.max()+PMLTotalThickness[0])) - y = np.pad(y, (numPMLLayers,numPMLLayers), "linear_ramp", end_values=(y.min()-PMLTotalThickness[1], y.max()+PMLTotalThickness[1])) - z = np.pad(z, (numPMLLayers,0), "linear_ramp", end_values=(z.min()-PMLTotalThickness[2])) - + x = np.pad( + x, + (numPMLLayers, numPMLLayers), + 'linear_ramp', + end_values=(x.min() - PMLTotalThickness[0], x.max() + PMLTotalThickness[0]), + ) + y = np.pad( + y, + (numPMLLayers, numPMLLayers), + 'linear_ramp', + end_values=(y.min() - PMLTotalThickness[1], y.max() + PMLTotalThickness[1]), + ) + z = np.pad( + z, + (numPMLLayers, 0), + 'linear_ramp', + end_values=(z.min() - PMLTotalThickness[2]), + ) x, y, z = np.meshgrid(x, y, z, indexing='ij') mesh = pv.StructuredGrid(x, y, z) - mesh.cell_data['Domain'] = np.ones(mesh.n_cells,dtype=np.int8)*info["RegularDomain"] + mesh.cell_data['Domain'] = ( + np.ones(mesh.n_cells, dtype=np.int8) * info['RegularDomain'] + ) # ============================================================================= - # sperate embedding layer + # sperate embedding layer # ============================================================================= - if HaveEmbeddingFoundation=="YES": - cube = pv.Cube(bounds=[EmbeddeFoundation["xmin"],EmbeddeFoundation["xmax"],EmbeddeFoundation["ymin"],EmbeddeFoundation["ymax"],EmbeddeFoundation["zmin"],EmbeddeFoundation["zmax"]]) - mesh = mesh.clip_box(cube,invert=True,crinkle=True,progress_bar = False) + if HaveEmbeddingFoundation == 'YES': + cube = pv.Cube( + bounds=[ + EmbeddeFoundation['xmin'], + EmbeddeFoundation['xmax'], + EmbeddeFoundation['ymin'], + EmbeddeFoundation['ymax'], + EmbeddeFoundation['zmin'], + EmbeddeFoundation['zmax'], + ] + ) + mesh = mesh.clip_box(cube, invert=True, crinkle=True, progress_bar=False) mesh.clear_data() - mesh.cell_data['Domain'] = np.ones(mesh.n_cells,dtype=np.int8)*info["RegularDomain"] + mesh.cell_data['Domain'] = ( + np.ones(mesh.n_cells, dtype=np.int8) * info['RegularDomain'] + ) # ============================================================================= # Add foundation blocks # ============================================================================= - if HaveFoundation == "YES": - for i,block in enumerate(foundationBlocks): - xBLOCK = np.arange(block["xmin"], block["xmax"]+eps, block["Xmeshsize"]) - yBLOCK = np.arange(block["ymin"], block["ymax"]+eps, block["Ymeshsize"]) - zBLOCK = np.arange(block["zmin"], block["zmax"]+eps, block["Zmeshsize"]) - xBLOCK, yBLOCK, zBLOCK = np.meshgrid(xBLOCK, yBLOCK, zBLOCK, indexing='ij') + if HaveFoundation == 'YES': + for i, block in enumerate(foundationBlocks): + xBLOCK = np.arange( + block['xmin'], block['xmax'] + eps, block['Xmeshsize'] + ) + yBLOCK = np.arange( + block['ymin'], block['ymax'] + eps, block['Ymeshsize'] + ) + zBLOCK = np.arange( + block['zmin'], block['zmax'] + eps, block['Zmeshsize'] + ) + xBLOCK, yBLOCK, zBLOCK = np.meshgrid( + xBLOCK, yBLOCK, zBLOCK, indexing='ij' + ) if i == 0: foundation = pv.StructuredGrid(xBLOCK, yBLOCK, zBLOCK) else: - foundation = foundation.merge(pv.StructuredGrid(xBLOCK, yBLOCK, zBLOCK),merge_points=False,tolerance=1e-6,progress_bar = False) - - + foundation = foundation.merge( + pv.StructuredGrid(xBLOCK, yBLOCK, zBLOCK), + merge_points=False, + tolerance=1e-6, + progress_bar=False, + ) # ============================================================================= # adding piles # ============================================================================= - pilenodes = np.zeros((len(pilelist)*2,3)) - pileelement = np.zeros((len(pilelist),3),dtype=int) + pilenodes = np.zeros((len(pilelist) * 2, 3)) + pileelement = np.zeros((len(pilelist), 3), dtype=int) for i in range(len(pilelist)): - pilenodes[i*2] = [pilelist[i]["xtop"],pilelist[i]["ytop"],pilelist[i]["ztop"]] - pilenodes[i*2+1] = [pilelist[i]["xbottom"],pilelist[i]["ybottom"],pilelist[i]["zbottom"]] - pileelement[i] = [2,2*i,2*i+1] - celltypes = np.ones(pileelement.shape[0],dtype= int) * pv.CellType.LINE - piles = pv.UnstructuredGrid(pileelement.tolist(),celltypes.tolist(),pilenodes.tolist()) - + pilenodes[i * 2] = [ + pilelist[i]['xtop'], + pilelist[i]['ytop'], + pilelist[i]['ztop'], + ] + pilenodes[i * 2 + 1] = [ + pilelist[i]['xbottom'], + pilelist[i]['ybottom'], + pilelist[i]['zbottom'], + ] + pileelement[i] = [2, 2 * i, 2 * i + 1] + celltypes = np.ones(pileelement.shape[0], dtype=int) * pv.CellType.LINE + piles = pv.UnstructuredGrid( + pileelement.tolist(), celltypes.tolist(), pilenodes.tolist() + ) pl = pv.Plotter() - if HavePiles == "YES": - pl.add_mesh(piles, show_edges=True, color = "r" , line_width=4.0,) - if HaveFoundation == "YES": - pl.add_mesh(foundation,show_edges=True, color="gray", opacity=0.5 ) - pl.add_mesh(mesh, opacity=0.5 ) + if HavePiles == 'YES': + pl.add_mesh( + piles, + show_edges=True, + color='r', + line_width=4.0, + ) + if HaveFoundation == 'YES': + pl.add_mesh(foundation, show_edges=True, color='gray', opacity=0.5) + pl.add_mesh(mesh, opacity=0.5) pl.camera_position = 'xz' - pl.export_html(meshplotdir+"/pile.html") + pl.export_html(meshplotdir + '/pile.html') # pl.show() - # merge the piles and foundation - if HaveFoundation == "YES" and HavePiles == "YES": - foundation2 = foundation.merge(piles,merge_points=False,tolerance=1e-6,progress_bar = False) + if HaveFoundation == 'YES' and HavePiles == 'YES': + foundation2 = foundation.merge( + piles, merge_points=False, tolerance=1e-6, progress_bar=False + ) foundationbounds = foundation2.bounds - pileboxinfo["xmin"] = foundationbounds[0] - pileboxinfo["xmax"] = foundationbounds[1] - pileboxinfo["ymin"] = foundationbounds[2] - pileboxinfo["ymax"] = foundationbounds[3] - pileboxinfo["zmin"] = foundationbounds[4] - pileboxinfo["zmax"] = foundationbounds[5] - pileboxinfo["Havepilebox"] = True + pileboxinfo['xmin'] = foundationbounds[0] + pileboxinfo['xmax'] = foundationbounds[1] + pileboxinfo['ymin'] = foundationbounds[2] + pileboxinfo['ymax'] = foundationbounds[3] + pileboxinfo['zmin'] = foundationbounds[4] + pileboxinfo['zmax'] = foundationbounds[5] + pileboxinfo['Havepilebox'] = True del foundation2 - else : - pileboxinfo["Havepilebox"] = False - if HaveFoundation == "YES" : - foundation.cell_data['Domain'] = np.ones(foundation.n_cells,dtype=np.int8)*info["Foundation"] - if AttachFoundation == "YES": - mesh = mesh.merge(foundation,merge_points=True,tolerance=1e-6,progress_bar = False) + else: + pileboxinfo['Havepilebox'] = False + if HaveFoundation == 'YES': + foundation.cell_data['Domain'] = ( + np.ones(foundation.n_cells, dtype=np.int8) * info['Foundation'] + ) + if AttachFoundation == 'YES': + mesh = mesh.merge( + foundation, merge_points=True, tolerance=1e-6, progress_bar=False + ) else: - mesh = mesh.merge(foundation,merge_points=False,tolerance=1e-6,progress_bar = False) + mesh = mesh.merge( + foundation, merge_points=False, tolerance=1e-6, progress_bar=False + ) # delete foundation2 # print("Havepilebox:",pileboxinfo["Havepilebox"]) # ============================================================================= - # sperate PML layer + # sperate PML layer # ============================================================================= xmin = x.min() + PMLTotalThickness[0] xmax = x.max() - PMLTotalThickness[0] @@ -273,150 +383,139 @@ def DRM_PML_Foundation_Meshgenrator(meshinfo) : ymax = y.max() - PMLTotalThickness[1] zmin = z.min() + PMLTotalThickness[2] zmax = z.max() + PMLTotalThickness[2] - cube = pv.Cube(bounds=[xmin,xmax,ymin,ymax,zmin,1000.0]) - PML = mesh.clip_box(cube,invert=True, crinkle=True, progress_bar = False) - reg = mesh.clip_box(cube,invert=False,crinkle=True, progress_bar = False) - - - - - + cube = pv.Cube(bounds=[xmin, xmax, ymin, ymax, zmin, 1000.0]) + PML = mesh.clip_box(cube, invert=True, crinkle=True, progress_bar=False) + reg = mesh.clip_box(cube, invert=False, crinkle=True, progress_bar=False) # now find DRM layer - indices = reg.find_cells_within_bounds([xmin + DRMTotalThickness[0] + eps, - xmax - DRMTotalThickness[0] - eps, - ymin + DRMTotalThickness[1] + eps, - ymax - DRMTotalThickness[1] - eps, - zmin + DRMTotalThickness[2] + eps, - zmax + DRMTotalThickness[2] + eps]) - - - + indices = reg.find_cells_within_bounds( + [ + xmin + DRMTotalThickness[0] + eps, + xmax - DRMTotalThickness[0] - eps, + ymin + DRMTotalThickness[1] + eps, + ymax - DRMTotalThickness[1] - eps, + zmin + DRMTotalThickness[2] + eps, + zmax + DRMTotalThickness[2] + eps, + ] + ) # now create complemntary indices for DRM - DRMindices = np.ones(reg.n_cells,dtype=bool) + DRMindices = np.ones(reg.n_cells, dtype=bool) DRMindices[indices] = False DRMindices = np.where(DRMindices)[0] - - # reg.cell_data['Domain'] = np.ones(reg.n_cells,dtype=np.int8)*info["DRMDomain"] # reg.cell_data['Domain'][indices] = info["RegularDomain"] - - reg.cell_data['Domain'][DRMindices] = info["DRMDomain"] - PML.cell_data['Domain'] = np.ones(PML.n_cells,dtype=np.int8)*info["PMLDomain"] - reg.cell_data['partitioned'] = np.zeros(reg.n_cells,dtype=np.int32) - PML.cell_data['partitioned'] = np.zeros(PML.n_cells,dtype=np.int32) + + reg.cell_data['Domain'][DRMindices] = info['DRMDomain'] + PML.cell_data['Domain'] = np.ones(PML.n_cells, dtype=np.int8) * info['PMLDomain'] + reg.cell_data['partitioned'] = np.zeros(reg.n_cells, dtype=np.int32) + PML.cell_data['partitioned'] = np.zeros(PML.n_cells, dtype=np.int32) # partitioning regular mesh - regular = reg.extract_cells(indices,progress_bar=False) - DRM = reg.extract_cells(DRMindices,progress_bar=False) - regularvtkOriginalCellIds = regular["vtkOriginalCellIds"] - DRMvtkOriginalCellIds = DRM["vtkOriginalCellIds"] + regular = reg.extract_cells(indices, progress_bar=False) + DRM = reg.extract_cells(DRMindices, progress_bar=False) + regularvtkOriginalCellIds = regular['vtkOriginalCellIds'] + DRMvtkOriginalCellIds = DRM['vtkOriginalCellIds'] if reg_num_cores > 1: - if PartitionAlgorithm.lower() == "metis": - print("Using Metis partitioning") - partition_with_pilebox(regular,reg_num_cores,pileboxinfo,tol=10) - if PartitionAlgorithm.lower() == "kd-tree": - print("Using KD-Tree partitioning") - kdtree_partition(regular,reg_num_cores) + if PartitionAlgorithm.lower() == 'metis': + print('Using Metis partitioning') + partition_with_pilebox(regular, reg_num_cores, pileboxinfo, tol=10) + if PartitionAlgorithm.lower() == 'kd-tree': + print('Using KD-Tree partitioning') + kdtree_partition(regular, reg_num_cores) if DRM_num_cores > 1: - if PartitionAlgorithm.lower() == "metis": - partition(DRM,DRM_num_cores) - if PartitionAlgorithm.lower() == "kd-tree": - kdtree_partition(DRM,DRM_num_cores) + if PartitionAlgorithm.lower() == 'metis': + partition(DRM, DRM_num_cores) + if PartitionAlgorithm.lower() == 'kd-tree': + kdtree_partition(DRM, DRM_num_cores) if PML_num_cores > 1: - if PartitionAlgorithm.lower() == "metis": - partition(PML,PML_num_cores) - if PartitionAlgorithm.lower() == "kd-tree": + if PartitionAlgorithm.lower() == 'metis': + partition(PML, PML_num_cores) + if PartitionAlgorithm.lower() == 'kd-tree': PMLcopy = PML.copy() - kdtree_partition(PMLcopy,PML_num_cores) - PML.cell_data['partitioned'] = PMLcopy["partitioned"] + kdtree_partition(PMLcopy, PML_num_cores) + PML.cell_data['partitioned'] = PMLcopy['partitioned'] del PMLcopy - reg.cell_data['partitioned'][regularvtkOriginalCellIds] = regular.cell_data['partitioned'] - reg.cell_data['partitioned'][DRMvtkOriginalCellIds] = DRM.cell_data['partitioned'] + reg_num_cores - PML.cell_data['partitioned'] = PML.cell_data['partitioned'] + reg_num_cores + DRM_num_cores - - - + reg.cell_data['partitioned'][regularvtkOriginalCellIds] = regular.cell_data[ + 'partitioned' + ] + reg.cell_data['partitioned'][DRMvtkOriginalCellIds] = ( + DRM.cell_data['partitioned'] + reg_num_cores + ) + PML.cell_data['partitioned'] = ( + PML.cell_data['partitioned'] + reg_num_cores + DRM_num_cores + ) # regular.plot(scalars="partitioned",show_edges=True) # DRM.plot(scalars="partitioned",show_edges=True) # PML.plot(scalars="partitioned",show_edges=True) # merging PML and regular mesh to create a single mesh - mesh = reg.merge(PML,merge_points=False,tolerance=1e-6,progress_bar = False) + mesh = reg.merge(PML, merge_points=False, tolerance=1e-6, progress_bar=False) # stack the regular and DRM partitioned data # mesh.plot(scalars="partitioned",show_edges=True) # mapping between PML and regular mesh on the boundary - mapping = mesh.clean(produce_merge_map=True)["PointMergeMap"] - regindicies = np.where(mapping[PML.n_points:]0)[0] - - - mesh["matTag"] = np.ones(mesh.n_cells,dtype=np.uint8) - - + mapping = mesh.clean(produce_merge_map=True)['PointMergeMap'] + regindicies = np.where(mapping[PML.n_points :] < PML.n_points)[0] + PMLindicies = mapping[PML.n_points + regindicies] + mesh.point_data['boundary'] = np.zeros(mesh.n_points, dtype=int) - 1 + mesh.point_data['boundary'][PMLindicies] = regindicies + PML.n_points + mesh.point_data['boundary'][PML.n_points + regindicies] = PMLindicies + indices = np.where(mesh.point_data['boundary'] > 0)[0] + mesh['matTag'] = np.ones(mesh.n_cells, dtype=np.uint8) # define the ASDA absorbing elements - if AbsorbingElements.lower() == "asda": - - mesh = mesh.clean(tolerance=1e-6,remove_unused_points=False) - mesh["ASDA_type"] = np.zeros(mesh.n_cells,dtype=np.uint8) + if AbsorbingElements.lower() == 'asda': + mesh = mesh.clean(tolerance=1e-6, remove_unused_points=False) + mesh['ASDA_type'] = np.zeros(mesh.n_cells, dtype=np.uint8) ASDAelem_type = { - "B" :1 , - "L" :2 , - "R" :3 , - "F" :4 , - "K" :5 , - "BL" :6 , - "BR" :7 , - "BF" :8 , - "BK" :9 , - "LF" :10, - "LK" :11, - "RF" :12, - "RK" :13, - "BLF" :14, - "BLK" :15, - "BRF" :16, - "BRK" :17, + 'B': 1, + 'L': 2, + 'R': 3, + 'F': 4, + 'K': 5, + 'BL': 6, + 'BR': 7, + 'BF': 8, + 'BK': 9, + 'LF': 10, + 'LK': 11, + 'RF': 12, + 'RK': 13, + 'BLF': 14, + 'BLK': 15, + 'BRF': 16, + 'BRK': 17, } ASDAelem_typereverse = { - 1 :"B" , - 2 :"L" , - 3 :"R" , - 4 :"F" , - 5 :"K" , - 6 :"BL" , - 7 :"BR" , - 8 :"BF" , - 9 :"BK" , - 10:"LF" , - 11:"LK" , - 12:"RF" , - 13:"RK" , - 14:"BLF" , - 15:"BLK" , - 16:"BRF" , - 17:"BRK" , + 1: 'B', + 2: 'L', + 3: 'R', + 4: 'F', + 5: 'K', + 6: 'BL', + 7: 'BR', + 8: 'BF', + 9: 'BK', + 10: 'LF', + 11: 'LK', + 12: 'RF', + 13: 'RK', + 14: 'BLF', + 15: 'BLK', + 16: 'BRF', + 17: 'BRK', } xmin, xmax, ymin, ymax, zmin, zmax = reg.bounds ASDA_xwidth = xmax - xmin @@ -429,224 +528,263 @@ def DRM_PML_Foundation_Meshgenrator(meshinfo) : i = 0 for ele_center in mesh.cell_centers().points: # check if the element is in the left or rightside - if ele_center[0] < (-ASDA_xwidth/2.): + if ele_center[0] < (-ASDA_xwidth / 2.0): # it is in the left side # check if it is in the front or back - if ele_center[1] < (-ASDA_ywidth/2.): - # it is in the back + if ele_center[1] < (-ASDA_ywidth / 2.0): + # it is in the back # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BLK"] + mesh['ASDA_type'][i] = ASDAelem_type['BLK'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["LK"] - elif ele_center[1] > (ASDA_ywidth/2.): + mesh['ASDA_type'][i] = ASDAelem_type['LK'] + elif ele_center[1] > (ASDA_ywidth / 2.0): # it is in the front # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BLF"] + mesh['ASDA_type'][i] = ASDAelem_type['BLF'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["LF"] + mesh['ASDA_type'][i] = ASDAelem_type['LF'] else: # it is in the middle # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BL"] + mesh['ASDA_type'][i] = ASDAelem_type['BL'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["L"] + mesh['ASDA_type'][i] = ASDAelem_type['L'] - elif ele_center[0] > (ASDA_xwidth/2.): + elif ele_center[0] > (ASDA_xwidth / 2.0): # it is in the right side # check if it is in the front or back - if ele_center[1] < (-ASDA_ywidth/2.): - # it is in the back + if ele_center[1] < (-ASDA_ywidth / 2.0): + # it is in the back # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BRK"] + mesh['ASDA_type'][i] = ASDAelem_type['BRK'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["RK"] - elif ele_center[1] > (ASDA_ywidth/2.): + mesh['ASDA_type'][i] = ASDAelem_type['RK'] + elif ele_center[1] > (ASDA_ywidth / 2.0): # it is in the front # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BRF"] + mesh['ASDA_type'][i] = ASDAelem_type['BRF'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["RF"] + mesh['ASDA_type'][i] = ASDAelem_type['RF'] else: # it is in the middle # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BR"] + mesh['ASDA_type'][i] = ASDAelem_type['BR'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["R"] + mesh['ASDA_type'][i] = ASDAelem_type['R'] else: # it is in the middle # check if it is in the front or back - if ele_center[1] < (-ASDA_ywidth/2.): - # it is in the back + if ele_center[1] < (-ASDA_ywidth / 2.0): + # it is in the back # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BK"] + mesh['ASDA_type'][i] = ASDAelem_type['BK'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["K"] - elif ele_center[1] > (ASDA_ywidth/2.): + mesh['ASDA_type'][i] = ASDAelem_type['K'] + elif ele_center[1] > (ASDA_ywidth / 2.0): # it is in the front # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BF"] + mesh['ASDA_type'][i] = ASDAelem_type['BF'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["F"] + mesh['ASDA_type'][i] = ASDAelem_type['F'] else: # it is in the middle # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["B"] - - i += 1 + mesh['ASDA_type'][i] = ASDAelem_type['B'] + i += 1 - if AbsorbingElements.lower() == "normal": - mesh = mesh.clean(tolerance=1e-6,remove_unused_points=False) + if AbsorbingElements.lower() == 'normal': + mesh = mesh.clean(tolerance=1e-6, remove_unused_points=False) # ============================================================================= # write the mesh # ============================================================================= min_core = mesh.cell_data['partitioned'].min() max_core = mesh.cell_data['partitioned'].max() - - # write the mesh nodes - for core in range(min_core,max_core+1): - tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned']==core)[0]) - f = open(Dir + "/Nodes" + str(core + structurecores) + ".tcl", "w") + for core in range(min_core, max_core + 1): + tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned'] == core)[0]) + f = open(Dir + '/Nodes' + str(core + structurecores) + '.tcl', 'w') for i in range(tmp.n_points): - f.write(f"node [expr int($StructureMaxNodeTag + 1 + {tmp['vtkOriginalPointIds'][i]})] {tmp.points[i][0]} {tmp.points[i][1]} {tmp.points[i][2]}\n") + f.write( + f"node [expr int($StructureMaxNodeTag + 1 + {tmp['vtkOriginalPointIds'][i]})] {tmp.points[i][0]} {tmp.points[i][1]} {tmp.points[i][2]}\n" + ) f.close() - # writing the mesh elements - if AbsorbingElements == "ASDA": + if AbsorbingElements == 'ASDA': # writing the mesh elements - for core in range(min_core,max_core+1): - tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned']==core)[0]) - f = open(Dir + "/Elements" + str(core + structurecores) + ".tcl", "w") + for core in range(min_core, max_core + 1): + tmp = mesh.extract_cells( + np.where(mesh.cell_data['partitioned'] == core)[0] + ) + f = open(Dir + '/Elements' + str(core + structurecores) + '.tcl', 'w') if core >= reg_num_cores + DRM_num_cores: for eletag in range(tmp.n_cells): - tmpeletag = "[expr int($StructureMaxEleTag + 1 + " + str(tmp['vtkOriginalCellIds'][eletag]) + ")]" - tmpNodeTags = [f"[expr int($StructureMaxNodeTag + 1 + {x})]" for x in tmp['vtkOriginalPointIds'][tmp.get_cell(eletag).point_ids]] - Domainname = DomainNames[tmp['Domain'][eletag]-1] - if Domainname == "PML": - Domainname = "Absorbing" + tmpeletag = ( + '[expr int($StructureMaxEleTag + 1 + ' + + str(tmp['vtkOriginalCellIds'][eletag]) + + ')]' + ) + tmpNodeTags = [ + f'[expr int($StructureMaxNodeTag + 1 + {x})]' + for x in tmp['vtkOriginalPointIds'][ + tmp.get_cell(eletag).point_ids + ] + ] + Domainname = DomainNames[tmp['Domain'][eletag] - 1] + if Domainname == 'PML': + Domainname = 'Absorbing' tmpmatTag = f"${Domainname}matTag{tmp['matTag'][eletag]}" tmpASDA_type = ASDAelem_typereverse[tmp['ASDA_type'][eletag]] - f.write(f"eval \"element $elementType {tmpeletag} {' '.join(tmpNodeTags)} {tmpmatTag} {tmpASDA_type}\" \n") + f.write( + f"eval \"element $elementType {tmpeletag} {' '.join(tmpNodeTags)} {tmpmatTag} {tmpASDA_type}\" \n" + ) else: for eletag in range(tmp.n_cells): - tmpeletag = "[expr int($StructureMaxEleTag + 1 + " + str(tmp['vtkOriginalCellIds'][eletag]) + ")]" - tmpNodeTags = [f"[expr int($StructureMaxNodeTag + 1 + {x})]" for x in tmp['vtkOriginalPointIds'][tmp.get_cell(eletag).point_ids]] - Domainname = DomainNames[tmp['Domain'][eletag]-1] + tmpeletag = ( + '[expr int($StructureMaxEleTag + 1 + ' + + str(tmp['vtkOriginalCellIds'][eletag]) + + ')]' + ) + tmpNodeTags = [ + f'[expr int($StructureMaxNodeTag + 1 + {x})]' + for x in tmp['vtkOriginalPointIds'][ + tmp.get_cell(eletag).point_ids + ] + ] + Domainname = DomainNames[tmp['Domain'][eletag] - 1] tmpmatTag = f"${Domainname}matTag{tmp['matTag'][eletag]}" - f.write(f"eval \"element $elementType {tmpeletag} {' '.join(tmpNodeTags)} {tmpmatTag}\" \n") + f.write( + f"eval \"element $elementType {tmpeletag} {' '.join(tmpNodeTags)} {tmpmatTag}\" \n" + ) f.close() - else : - for core in range(min_core,max_core+1): - tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned']==core)[0]) - f = open(Dir + "/Elements" + str(core + structurecores) + ".tcl", "w") + else: + for core in range(min_core, max_core + 1): + tmp = mesh.extract_cells( + np.where(mesh.cell_data['partitioned'] == core)[0] + ) + f = open(Dir + '/Elements' + str(core + structurecores) + '.tcl', 'w') for eletag in range(tmp.n_cells): - tmpeletag = "[expr int($StructureMaxEleTag + 1 + " + str(tmp['vtkOriginalCellIds'][eletag]) + ")]" - tmpNodeTags = [f"[expr int($StructureMaxNodeTag + 1 + {x})]" for x in tmp['vtkOriginalPointIds'][tmp.get_cell(eletag).point_ids]] - Domainname = DomainNames[tmp['Domain'][eletag]-1] - if Domainname == "PML": - Domainname = "Absorbing" + tmpeletag = ( + '[expr int($StructureMaxEleTag + 1 + ' + + str(tmp['vtkOriginalCellIds'][eletag]) + + ')]' + ) + tmpNodeTags = [ + f'[expr int($StructureMaxNodeTag + 1 + {x})]' + for x in tmp['vtkOriginalPointIds'][ + tmp.get_cell(eletag).point_ids + ] + ] + Domainname = DomainNames[tmp['Domain'][eletag] - 1] + if Domainname == 'PML': + Domainname = 'Absorbing' tmpmatTag = f"${Domainname}matTag{tmp['matTag'][eletag]}" - f.write(f"eval \"element $elementType {tmpeletag} {' '.join(tmpNodeTags)} {tmpmatTag}\" \n") + f.write( + f"eval \"element $elementType {tmpeletag} {' '.join(tmpNodeTags)} {tmpmatTag}\" \n" + ) f.close() - if AbsorbingElements == "PML": + if AbsorbingElements == 'PML': # writing the boundary files - for core in range(reg_num_cores + DRM_num_cores , max_core+1): - tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned']==core)[0]) - f = open(Dir + "/Boundary" + str(core + structurecores) + ".tcl", "w") + for core in range(reg_num_cores + DRM_num_cores, max_core + 1): + tmp = mesh.extract_cells( + np.where(mesh.cell_data['partitioned'] == core)[0] + ) + f = open(Dir + '/Boundary' + str(core + structurecores) + '.tcl', 'w') for i in range(tmp.n_points): - if tmp["boundary"][i] != -1: - x,y,z = tmp.points[i] + if tmp['boundary'][i] != -1: + x, y, z = tmp.points[i] nodeTag1 = tmp['vtkOriginalPointIds'][i] nodeTag2 = tmp['boundary'][i] - f.write(f"node [expr int($StructureMaxNodeTag + 1 +{nodeTag2})] {str(x)} {str(y)} {str(z)}\n") - f.write(f"equalDOF [expr int($StructureMaxNodeTag + 1 + {nodeTag2})] [expr int($StructureMaxNodeTag + 1 +{nodeTag1})] 1 2 3\n") - - + f.write( + f'node [expr int($StructureMaxNodeTag + 1 +{nodeTag2})] {str(x)} {str(y)} {str(z)}\n' + ) + f.write( + f'equalDOF [expr int($StructureMaxNodeTag + 1 + {nodeTag2})] [expr int($StructureMaxNodeTag + 1 +{nodeTag1})] 1 2 3\n' + ) # ============================================================================= # printing information # ============================================================================= - print(f"Number of regular cores: {reg_num_cores}") - print(f"Number of DRM cores: {DRM_num_cores}") - print(f"Number of PML cores: {PML_num_cores}") - print(f"Number of regular elements: {regular.n_cells} roughly {int(regular.n_cells/reg_num_cores)} each core") - print(f"Number of DRM elements: {DRM.n_cells} roughly {int(DRM.n_cells/DRM_num_cores)} each core") - print(f"Number of PML elements: {PML.n_cells} roughly {int(PML.n_cells/PML_num_cores)} each core") - print(f"Number of total elements: {mesh.n_cells}") - print(f"Number of total points: {mesh.n_points}") - print(f"Number of cores: {max_core-min_core+1}") - print(f"Number of PML nodes: {PML.n_points}") - print(f"Number of regular nodes: {regular.n_points}") - print(f"Number of DRM nodes: {DRM.n_points}") - if AbsorbingElements == "PML": - print(f"Number of MP constraints: {regindicies.size}") + print(f'Number of regular cores: {reg_num_cores}') + print(f'Number of DRM cores: {DRM_num_cores}') + print(f'Number of PML cores: {PML_num_cores}') + print( + f'Number of regular elements: {regular.n_cells} roughly {int(regular.n_cells/reg_num_cores)} each core' + ) + print( + f'Number of DRM elements: {DRM.n_cells} roughly {int(DRM.n_cells/DRM_num_cores)} each core' + ) + print( + f'Number of PML elements: {PML.n_cells} roughly {int(PML.n_cells/PML_num_cores)} each core' + ) + print(f'Number of total elements: {mesh.n_cells}') + print(f'Number of total points: {mesh.n_points}') + print(f'Number of cores: {max_core-min_core+1}') + print(f'Number of PML nodes: {PML.n_points}') + print(f'Number of regular nodes: {regular.n_points}') + print(f'Number of DRM nodes: {DRM.n_points}') + if AbsorbingElements == 'PML': + print(f'Number of MP constraints: {regindicies.size}') # calculating number of surface points on the boundaries eps = 1e-2 - bounds = np.array(mesh.bounds)-np.array([-eps,eps,-eps,eps,-eps,-eps]) + bounds = np.array(mesh.bounds) - np.array([-eps, eps, -eps, eps, -eps, -eps]) cube = pv.Cube(bounds=bounds) # points outside the cube - selected = mesh.select_enclosed_points(cube,inside_out = True) - pts = mesh.extract_points( + selected = mesh.select_enclosed_points(cube, inside_out=True) + pts = mesh.extract_points( selected['SelectedPoints'].view(bool), include_cells=False, ) - print(f"number of sp constriants: {pts.n_points*9}") - - - - - + print(f'number of sp constriants: {pts.n_points*9}') # f = h5py.File('./DRMloadSmall.h5drm', 'r') - if DRMinformation["DRM_Location"].lower() == "local": + if DRMinformation['DRM_Location'].lower() == 'local': f = h5py.File(DRMfile, 'r') - pts = f["DRM_Data"]["xyz"][:] - internal = f["DRM_Data"]["internal"][:] - xyz0 = f["DRM_QA_Data"]["xyz"][:] - - - - - if DRMinformation["DRM_Provider_Software"].lower() == "shakermaker" and DRMinformation["DRM_Location"].lower() == "local": + pts = f['DRM_Data']['xyz'][:] + internal = f['DRM_Data']['internal'][:] + xyz0 = f['DRM_QA_Data']['xyz'][:] + + if ( + DRMinformation['DRM_Provider_Software'].lower() == 'shakermaker' + and DRMinformation['DRM_Location'].lower() == 'local' + ): pts = pts - xyz0 - pts[:,[0,1]] = pts[:,[1,0]] - pts[:,2] = -pts[:,2] - pts = pts * DRMinformation["crd_scale"] - + pts[:, [0, 1]] = pts[:, [1, 0]] + pts[:, 2] = -pts[:, 2] + pts = pts * DRMinformation['crd_scale'] - if DRMinformation["DRM_Location"].lower() == "local": + if DRMinformation['DRM_Location'].lower() == 'local': set1 = DRM.points set2 = pts @@ -660,77 +798,64 @@ def DRM_PML_Foundation_Meshgenrator(meshinfo) : if dist > tolernace: issubset = False break - + if issubset: - print("The DRM nodes in the loading file are subset of the DRM mesh") + print('The DRM nodes in the loading file are subset of the DRM mesh') else: - print("The DRM nodes in the loading file are not subset of the DRM mesh") - print("Please check the DRM nodes in the loading file or change the DRM mesh") - else : - print("The DRM nodes are not checked for subset") - - - - + print('The DRM nodes in the loading file are not subset of the DRM mesh') + print( + 'Please check the DRM nodes in the loading file or change the DRM mesh' + ) + else: + print('The DRM nodes are not checked for subset') pl = pv.Plotter() # plot the DRM layer with the DRM nodes loaded from the DRM file - pl.add_mesh(DRM,scalars="partitioned",show_edges=True) - if DRMinformation["DRM_Location"].lower() == "local": + pl.add_mesh(DRM, scalars='partitioned', show_edges=True) + if DRMinformation['DRM_Location'].lower() == 'local': pl.add_points(pts, color='r') - pl.export_html(meshplotdir+"/DRM.html") + pl.export_html(meshplotdir + '/DRM.html') pl.clear() - # plot the regular mesh with the internal nodes loaded from the DRM file - pl.add_mesh(regular,scalars="partitioned",show_edges=True) - pl.export_html(meshplotdir+"/Regular.html") + pl.add_mesh(regular, scalars='partitioned', show_edges=True) + pl.export_html(meshplotdir + '/Regular.html') pl.clear() # plot the PML mesh - pl.add_mesh(PML,scalars="partitioned",show_edges=True) - pl.export_html(meshplotdir+"/PML.html") + pl.add_mesh(PML, scalars='partitioned', show_edges=True) + pl.export_html(meshplotdir + '/PML.html') pl.clear() - # plot the total mesh - pl.add_mesh(mesh,scalars="partitioned",show_edges=True) - pl.export_html(meshplotdir+"/Total_partitioned.html") + pl.add_mesh(mesh, scalars='partitioned', show_edges=True) + pl.export_html(meshplotdir + '/Total_partitioned.html') pl.clear() # plot the total mesh with domain scalars - pl.add_mesh(mesh,scalars="Domain",show_edges=True) - pl.export_html(meshplotdir+"/Total_domain.html") + pl.add_mesh(mesh, scalars='Domain', show_edges=True) + pl.export_html(meshplotdir + '/Total_domain.html') pl.clear() - - if AbsorbingElements == "ASDA": - pl.add_mesh(mesh,scalars="ASDA_type",show_edges=True,cmap="tab20") - pl.export_html(meshplotdir+"/ASDA_total.html") + if AbsorbingElements == 'ASDA': + pl.add_mesh(mesh, scalars='ASDA_type', show_edges=True, cmap='tab20') + pl.export_html(meshplotdir + '/ASDA_total.html') pl.clear() # filter the mesh with domain pml - indices = mesh['Domain'] == info["PMLDomain"] + indices = mesh['Domain'] == info['PMLDomain'] grid = mesh.extract_cells(indices) - pl.add_mesh(grid,scalars="ASDA_type",show_edges=True,cmap="tab20b") - pl.export_html(meshplotdir+"/ASDA_PML.html") + pl.add_mesh(grid, scalars='ASDA_type', show_edges=True, cmap='tab20b') + pl.export_html(meshplotdir + '/ASDA_PML.html') pl.clear() pl.close() # save the mesh - mesh.save(os.path.join(OutputDir,"mesh.vtk"),binary=True) - - + mesh.save(os.path.join(OutputDir, 'mesh.vtk'), binary=True) # return thenumber of elements return (mesh.n_cells, mesh.n_points) - - - - - - # %% diff --git a/modules/tools/DRM/MeshGenerator/Structure.py b/modules/tools/DRM/MeshGenerator/Structure.py index 954781558..b2a14ad16 100644 --- a/modules/tools/DRM/MeshGenerator/Structure.py +++ b/modules/tools/DRM/MeshGenerator/Structure.py @@ -1,8 +1,8 @@ # %% -import pyvista as pv +import pyvista as pv import numpy as np -import ctypes -import os +import ctypes +import os import sys # ============================================================================= # information @@ -35,33 +35,36 @@ # DRMfile = sys.argv[27] - -xwidth = 100 -ywidth = 100 -zwidth = 40 -eps = 1e-6 +xwidth = 100 +ywidth = 100 +zwidth = 40 +eps = 1e-6 Xmeshsize, Ymeshsize, Zmeshsize = (2.5, 2.5, 2.5) -PMLThickness = np.array([Xmeshsize, Ymeshsize, Zmeshsize]) ; # thickness of the each PML layer -numPMLLayers = 2 ; # number of PML layers -PMLTotalThickness = PMLThickness * numPMLLayers ; # total thickness of the PML layers -DRMThickness = np.array([Xmeshsize, Ymeshsize, Zmeshsize]) ; # thickness of the DRM layers -numDrmLayers = 1 ; # number of DRM layers -DRMTotalThickness = DRMThickness * numDrmLayers ; # total thickness of the DRM layers -padLayers = numPMLLayers + numDrmLayers ; # number of layers to pad the meshgrid -padThickness = PMLTotalThickness + DRMThickness ; # thickness of the padding layers -reg_num_cores = 3 -DRM_num_cores = 3 -PML_num_cores = 3 -Dir = "OpenSeesMesh" -OutputDir = "results" -pileboxcenterx = 0 -pileboxcentery = 0 -pileboxcenterz = 0 -pileboxlx = 20 -pileboxly = 20 -pileboxdepth = 10.0 -AbsorbingElements = "PML" -DRMfile = "SurfaceWave.h5drm" +PMLThickness = np.array( + [Xmeshsize, Ymeshsize, Zmeshsize] +) # thickness of the each PML layer +numPMLLayers = 2 # number of PML layers +PMLTotalThickness = PMLThickness * numPMLLayers # total thickness of the PML layers +DRMThickness = np.array( + [Xmeshsize, Ymeshsize, Zmeshsize] +) # thickness of the DRM layers +numDrmLayers = 1 # number of DRM layers +DRMTotalThickness = DRMThickness * numDrmLayers # total thickness of the DRM layers +padLayers = numPMLLayers + numDrmLayers # number of layers to pad the meshgrid +padThickness = PMLTotalThickness + DRMThickness # thickness of the padding layers +reg_num_cores = 3 +DRM_num_cores = 3 +PML_num_cores = 3 +Dir = 'OpenSeesMesh' +OutputDir = 'results' +pileboxcenterx = 0 +pileboxcentery = 0 +pileboxcenterz = 0 +pileboxlx = 20 +pileboxly = 20 +pileboxdepth = 10.0 +AbsorbingElements = 'PML' +DRMfile = 'SurfaceWave.h5drm' # # print the input information @@ -95,49 +98,48 @@ # create a dictionary for meshing information info = { - "RegularDomain": 1, - "DRMDomain": 2, - "PMLDomain": 3, + 'RegularDomain': 1, + 'DRMDomain': 2, + 'PMLDomain': 3, } pileboxinfo = {} # adding different plots -meshplotdir = OutputDir + "/meshplots" +meshplotdir = OutputDir + '/meshplots' if not os.path.exists(meshplotdir): os.makedirs(meshplotdir) # ============================================================================= # buiding an structure mesh # ============================================================================= embededDepth = { - "xmax": 20, - "xmin": -20, - "ymax": 15, - "ymin": -15, - "zmax": 0, - "zmin": -5, + 'xmax': 20, + 'xmin': -20, + 'ymax': 15, + 'ymin': -15, + 'zmax': 0, + 'zmin': -5, } - foundationBlocks = [] -cnetersx = np.arange(-15,15+eps,7.5) -cnetersy = np.arange(-7.5,7.5+eps,7.5) -cnetersx,cnetersy = np.meshgrid(cnetersx,cnetersy) -# make them tuples -centers = np.vstack((cnetersx.flatten(),cnetersy.flatten())).T +cnetersx = np.arange(-15, 15 + eps, 7.5) +cnetersy = np.arange(-7.5, 7.5 + eps, 7.5) +cnetersx, cnetersy = np.meshgrid(cnetersx, cnetersy) +# make them tuples +centers = np.vstack((cnetersx.flatten(), cnetersy.flatten())).T for i in range(len(centers)): blockinfo = { - "xmax" : centers[i][0] + 2, - "xmin" : centers[i][0] - 2, - "ymax" : centers[i][1] + 2, - "ymin" : centers[i][1] - 2, - "zmax" : -3, - "zmin" : -5, - "Xmeshsize": 1.0, - "Ymeshsize": 1.0, - "Zmeshsize": 1.0, + 'xmax': centers[i][0] + 2, + 'xmin': centers[i][0] - 2, + 'ymax': centers[i][1] + 2, + 'ymin': centers[i][1] - 2, + 'zmax': -3, + 'zmin': -5, + 'Xmeshsize': 1.0, + 'Ymeshsize': 1.0, + 'Zmeshsize': 1.0, } foundationBlocks.append(blockinfo) @@ -146,18 +148,18 @@ # ============================================================================= pilelist = [] for ii in range(len(centers)): - - for tu in [(1,1),(1,-1),(-1,1),(-1,-1)]: - i,j = tu - pilelist.append({ - "xtop": centers[ii][0] + i, - "ytop": centers[ii][1] + j, - "ztop": -4, - "xbottom": centers[ii][0] + i, - "ybottom": centers[ii][1] + j, - "zbottom": -10.0, - }) - + for tu in [(1, 1), (1, -1), (-1, 1), (-1, -1)]: + i, j = tu + pilelist.append( + { + 'xtop': centers[ii][0] + i, + 'ytop': centers[ii][1] + j, + 'ztop': -4, + 'xbottom': centers[ii][0] + i, + 'ybottom': centers[ii][1] + j, + 'zbottom': -10.0, + } + ) # ============================================================================= @@ -165,7 +167,11 @@ # ============================================================================= # change the directory to the current directory os.chdir(os.path.dirname(os.path.abspath(__file__))) -libpath = os.getcwd().split("OpenSeesProjects")[0] + "OpenSeesProjects/" + "MeshGenerator/lib" +libpath = ( + os.getcwd().split('OpenSeesProjects')[0] + + 'OpenSeesProjects/' + + 'MeshGenerator/lib' +) print(libpath) if os.name == 'nt': metis_partition_lib = ctypes.CDLL(f'{libpath}/Partitioner.dll') @@ -173,71 +179,85 @@ metis_partition_lib = ctypes.CDLL(f'{libpath}/libPartitioner.so') # Define function argument and return types -metis_partition_lib.Partition.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.POINTER(ctypes.c_int32), ctypes.c_int, ctypes.POINTER(ctypes.c_int32)] -metis_partition_lib.Partition.restype = ctypes.c_int +metis_partition_lib.Partition.argtypes = [ + ctypes.c_int, + ctypes.c_int, + ctypes.POINTER(ctypes.c_int32), + ctypes.c_int, + ctypes.POINTER(ctypes.c_int32), +] +metis_partition_lib.Partition.restype = ctypes.c_int + def partition(mesh, numcores): numcells = mesh.n_cells numpoints = mesh.n_points numweights = 1 cells = np.array(mesh.cells.reshape(-1, 9), dtype=np.int32) - cells = cells[:,1:] + cells = cells[:, 1:] cellspointer = cells.flatten().ctypes.data_as(ctypes.POINTER(ctypes.c_int32)) partitioned = np.empty(numcells, dtype=np.int32) partitionedpointer = partitioned.ctypes.data_as(ctypes.POINTER(ctypes.c_int32)) - metis_partition_lib.Partition(numcells, numpoints, cellspointer, numcores, partitionedpointer) + metis_partition_lib.Partition( + numcells, numpoints, cellspointer, numcores, partitionedpointer + ) mesh.cell_data['partitioned'] = partitioned - - -def partition_with_pilebox(messh,numcores, pileboxinfo,tol=1e-6): +def partition_with_pilebox(messh, numcores, pileboxinfo, tol=1e-6): # seperate the core of the pilebox mesh = messh.copy() eps = 1e-6 # check if the xmin,xmax, ymin ymax ,zmin zmax is exist in the infokeys - if "xmin" in pileboxinfo.keys(): - pilebox_xmin = pileboxinfo["xmin"] - if "xmax" in pileboxinfo.keys(): - pilebox_xmax = pileboxinfo["xmax"] - if "ymin" in pileboxinfo.keys(): - pilebox_ymin = pileboxinfo["ymin"] - if "ymax" in pileboxinfo.keys(): - pilebox_ymax = pileboxinfo["ymax"] - if "zmin" in pileboxinfo.keys(): - pilebox_zmin = pileboxinfo["zmin"] - if "zmax" in pileboxinfo.keys(): - pilebox_zmax = pileboxinfo["zmax"] - if "center" in pileboxinfo.keys(): - pilebox_xmin = pileboxinfo["center"][0] - pileboxinfo["lx"]/2 + eps - pilebox_xmax = pileboxinfo["center"][0] + pileboxinfo["lx"]/2 - eps - pilebox_ymin = pileboxinfo["center"][1] - pileboxinfo["ly"]/2 + eps - pilebox_ymax = pileboxinfo["center"][1] + pileboxinfo["ly"]/2 - eps - pilebox_zmin = pileboxinfo["center"][2] - pileboxinfo["depth"] + eps - pilebox_zmax = pileboxinfo["center"][2] + tol + eps - + if 'xmin' in pileboxinfo.keys(): + pilebox_xmin = pileboxinfo['xmin'] + if 'xmax' in pileboxinfo.keys(): + pilebox_xmax = pileboxinfo['xmax'] + if 'ymin' in pileboxinfo.keys(): + pilebox_ymin = pileboxinfo['ymin'] + if 'ymax' in pileboxinfo.keys(): + pilebox_ymax = pileboxinfo['ymax'] + if 'zmin' in pileboxinfo.keys(): + pilebox_zmin = pileboxinfo['zmin'] + if 'zmax' in pileboxinfo.keys(): + pilebox_zmax = pileboxinfo['zmax'] + if 'center' in pileboxinfo.keys(): + pilebox_xmin = pileboxinfo['center'][0] - pileboxinfo['lx'] / 2 + eps + pilebox_xmax = pileboxinfo['center'][0] + pileboxinfo['lx'] / 2 - eps + pilebox_ymin = pileboxinfo['center'][1] - pileboxinfo['ly'] / 2 + eps + pilebox_ymax = pileboxinfo['center'][1] + pileboxinfo['ly'] / 2 - eps + pilebox_zmin = pileboxinfo['center'][2] - pileboxinfo['depth'] + eps + pilebox_zmax = pileboxinfo['center'][2] + tol + eps # find the cells that are inside the pilebox - cube = pv.Cube(bounds=[pilebox_xmin,pilebox_xmax,pilebox_ymin,pilebox_ymax,pilebox_zmin,pilebox_zmax]) + cube = pv.Cube( + bounds=[ + pilebox_xmin, + pilebox_xmax, + pilebox_ymin, + pilebox_ymax, + pilebox_zmin, + pilebox_zmax, + ] + ) # extract the cells that are outside the pilebox indices = mesh.find_cells_within_bounds(cube.bounds) - + # crete newindices for outside the pilebox - newindices = np.ones(mesh.n_cells,dtype=bool) + newindices = np.ones(mesh.n_cells, dtype=bool) newindices[indices] = False newindices = np.where(newindices)[0] - # extract the cells that are inside the pilebox mesh2 = mesh.extract_cells(newindices) # partition the mesh - + if numcores > 2: - partition(mesh2,numcores-1) + partition(mesh2, numcores - 1) if numcores == 2: - mesh2.cell_data['partitioned'] = np.zeros(mesh2.n_cells,dtype=np.int32) - - mesh.cell_data['partitioned'] = np.zeros(mesh.n_cells,dtype=np.int32) + mesh2.cell_data['partitioned'] = np.zeros(mesh2.n_cells, dtype=np.int32) + + mesh.cell_data['partitioned'] = np.zeros(mesh.n_cells, dtype=np.int32) mesh.cell_data['partitioned'][newindices] = mesh2.cell_data['partitioned'] + 1 messh.cell_data['partitioned'] = mesh.cell_data['partitioned'] @@ -245,80 +265,134 @@ def partition_with_pilebox(messh,numcores, pileboxinfo,tol=1e-6): # ============================================================================= # meshing # ============================================================================= -x = np.arange(-xwidth/2., xwidth/2.+eps, Xmeshsize) -y = np.arange(-ywidth/2., ywidth/2.+eps, Ymeshsize) -z = np.arange(-zwidth, 0+eps, Zmeshsize) +x = np.arange(-xwidth / 2.0, xwidth / 2.0 + eps, Xmeshsize) +y = np.arange(-ywidth / 2.0, ywidth / 2.0 + eps, Ymeshsize) +z = np.arange(-zwidth, 0 + eps, Zmeshsize) # padding x and y for PML and DRM layers -x = np.pad(x, (numDrmLayers,numDrmLayers), "linear_ramp", end_values=(x.min()-DRMTotalThickness[0], x.max()+DRMTotalThickness[0])) -y = np.pad(y, (numDrmLayers,numDrmLayers), "linear_ramp", end_values=(y.min()-DRMTotalThickness[1], y.max()+DRMTotalThickness[1])) -z = np.pad(z, (numDrmLayers,0), "linear_ramp", end_values=(z.min()-DRMTotalThickness[2])) +x = np.pad( + x, + (numDrmLayers, numDrmLayers), + 'linear_ramp', + end_values=(x.min() - DRMTotalThickness[0], x.max() + DRMTotalThickness[0]), +) +y = np.pad( + y, + (numDrmLayers, numDrmLayers), + 'linear_ramp', + end_values=(y.min() - DRMTotalThickness[1], y.max() + DRMTotalThickness[1]), +) +z = np.pad( + z, (numDrmLayers, 0), 'linear_ramp', end_values=(z.min() - DRMTotalThickness[2]) +) # padding the x and y for PML and PML layers -x = np.pad(x, (numPMLLayers,numPMLLayers), "linear_ramp", end_values=(x.min()-PMLTotalThickness[0], x.max()+PMLTotalThickness[0])) -y = np.pad(y, (numPMLLayers,numPMLLayers), "linear_ramp", end_values=(y.min()-PMLTotalThickness[1], y.max()+PMLTotalThickness[1])) -z = np.pad(z, (numPMLLayers,0), "linear_ramp", end_values=(z.min()-PMLTotalThickness[2])) +x = np.pad( + x, + (numPMLLayers, numPMLLayers), + 'linear_ramp', + end_values=(x.min() - PMLTotalThickness[0], x.max() + PMLTotalThickness[0]), +) +y = np.pad( + y, + (numPMLLayers, numPMLLayers), + 'linear_ramp', + end_values=(y.min() - PMLTotalThickness[1], y.max() + PMLTotalThickness[1]), +) +z = np.pad( + z, (numPMLLayers, 0), 'linear_ramp', end_values=(z.min() - PMLTotalThickness[2]) +) # %% x, y, z = np.meshgrid(x, y, z, indexing='ij') mesh = pv.StructuredGrid(x, y, z) # ============================================================================= -# sperate embedding layer +# sperate embedding layer # ============================================================================= -cube = pv.Cube(bounds=[embededDepth["xmin"],embededDepth["xmax"],embededDepth["ymin"],embededDepth["ymax"],embededDepth["zmin"],embededDepth["zmax"]]) -mesh = mesh.clip_box(cube,invert=True,crinkle=True,progress_bar = True) +cube = pv.Cube( + bounds=[ + embededDepth['xmin'], + embededDepth['xmax'], + embededDepth['ymin'], + embededDepth['ymax'], + embededDepth['zmin'], + embededDepth['zmax'], + ] +) +mesh = mesh.clip_box(cube, invert=True, crinkle=True, progress_bar=True) mesh.clear_data() # ============================================================================= # Add foundation blocks # ============================================================================= -for i,block in enumerate(foundationBlocks): - xBLOCK = np.arange(block["xmin"], block["xmax"]+eps, block["Xmeshsize"]) - yBLOCK = np.arange(block["ymin"], block["ymax"]+eps, block["Ymeshsize"]) - zBLOCK = np.arange(block["zmin"], block["zmax"]+eps, block["Zmeshsize"]) +for i, block in enumerate(foundationBlocks): + xBLOCK = np.arange(block['xmin'], block['xmax'] + eps, block['Xmeshsize']) + yBLOCK = np.arange(block['ymin'], block['ymax'] + eps, block['Ymeshsize']) + zBLOCK = np.arange(block['zmin'], block['zmax'] + eps, block['Zmeshsize']) xBLOCK, yBLOCK, zBLOCK = np.meshgrid(xBLOCK, yBLOCK, zBLOCK, indexing='ij') if i == 0: foundation = pv.StructuredGrid(xBLOCK, yBLOCK, zBLOCK) else: - foundation = foundation.merge(pv.StructuredGrid(xBLOCK, yBLOCK, zBLOCK),merge_points=False,tolerance=1e-6,progress_bar = True) - + foundation = foundation.merge( + pv.StructuredGrid(xBLOCK, yBLOCK, zBLOCK), + merge_points=False, + tolerance=1e-6, + progress_bar=True, + ) # ============================================================================= # adding piles # ============================================================================= -pilenodes = np.zeros((len(pilelist)*2,3)) -pileelement = np.zeros((len(pilelist),3),dtype=int) +pilenodes = np.zeros((len(pilelist) * 2, 3)) +pileelement = np.zeros((len(pilelist), 3), dtype=int) for i in range(len(pilelist)): - pilenodes[i*2] = [pilelist[i]["xtop"],pilelist[i]["ytop"],pilelist[i]["ztop"]] - pilenodes[i*2+1] = [pilelist[i]["xbottom"],pilelist[i]["ybottom"],pilelist[i]["zbottom"]] - pileelement[i] = [2,2*i,2*i+1] -celltypes = np.ones(pileelement.shape[0],dtype= int) * pv.CellType.LINE -piles = pv.UnstructuredGrid(pileelement.tolist(),celltypes.tolist(),pilenodes.tolist()) - + pilenodes[i * 2] = [ + pilelist[i]['xtop'], + pilelist[i]['ytop'], + pilelist[i]['ztop'], + ] + pilenodes[i * 2 + 1] = [ + pilelist[i]['xbottom'], + pilelist[i]['ybottom'], + pilelist[i]['zbottom'], + ] + pileelement[i] = [2, 2 * i, 2 * i + 1] +celltypes = np.ones(pileelement.shape[0], dtype=int) * pv.CellType.LINE +piles = pv.UnstructuredGrid( + pileelement.tolist(), celltypes.tolist(), pilenodes.tolist() +) + # %% pl = pv.Plotter() -pl.add_mesh(piles, show_edges=True, color = "r" , line_width=4.0,) -pl.add_mesh(foundation, color="gray", opacity=0.5 ) -pl.add_mesh(mesh, opacity=0.5 ) +pl.add_mesh( + piles, + show_edges=True, + color='r', + line_width=4.0, +) +pl.add_mesh(foundation, color='gray', opacity=0.5) +pl.add_mesh(mesh, opacity=0.5) pl.camera_position = 'xz' -pl.export_html(meshplotdir+"/pile.html") +pl.export_html(meshplotdir + '/pile.html') # pl.show() # %% # merge the piles and foundation -foundation = foundation.merge(piles,merge_points=False,tolerance=1e-6,progress_bar = True) +foundation = foundation.merge( + piles, merge_points=False, tolerance=1e-6, progress_bar=True +) foundationbounds = foundation.bounds -pileboxinfo["xmin"] = foundationbounds[0] -pileboxinfo["xmax"] = foundationbounds[1] -pileboxinfo["ymin"] = foundationbounds[2] -pileboxinfo["ymax"] = foundationbounds[3] -pileboxinfo["zmin"] = foundationbounds[4] -pileboxinfo["zmax"] = foundationbounds[5] -mesh = mesh.merge(foundation,merge_points=False,tolerance=1e-6,progress_bar = True) +pileboxinfo['xmin'] = foundationbounds[0] +pileboxinfo['xmax'] = foundationbounds[1] +pileboxinfo['ymin'] = foundationbounds[2] +pileboxinfo['ymax'] = foundationbounds[3] +pileboxinfo['zmin'] = foundationbounds[4] +pileboxinfo['zmax'] = foundationbounds[5] +mesh = mesh.merge(foundation, merge_points=False, tolerance=1e-6, progress_bar=True) # %% # ============================================================================= -# sperate PML layer +# sperate PML layer # ============================================================================= xmin = x.min() + PMLTotalThickness[0] xmax = x.max() - PMLTotalThickness[0] @@ -326,32 +400,29 @@ def partition_with_pilebox(messh,numcores, pileboxinfo,tol=1e-6): ymax = y.max() - PMLTotalThickness[1] zmin = z.min() + PMLTotalThickness[2] zmax = z.max() + PMLTotalThickness[2] -cube = pv.Cube(bounds=[xmin,xmax,ymin,ymax,zmin,zmax]) -PML = mesh.clip_box(cube,invert=True, crinkle=True, progress_bar = True) -reg = mesh.clip_box(cube,invert=False,crinkle=True, progress_bar = True) - - - - - - +cube = pv.Cube(bounds=[xmin, xmax, ymin, ymax, zmin, zmax]) +PML = mesh.clip_box(cube, invert=True, crinkle=True, progress_bar=True) +reg = mesh.clip_box(cube, invert=False, crinkle=True, progress_bar=True) # reg.plot(show_edges=True,show_grid=True,show_axes=True,show_bounds=True) pl = pv.Plotter() -pl.add_mesh(reg, show_edges=True, style="wireframe" ) +pl.add_mesh(reg, show_edges=True, style='wireframe') pl.show() - # %% # now find DRM layer -indices = reg.find_cells_within_bounds([xmin + DRMTotalThickness[0] + eps, - xmax - DRMTotalThickness[0] - eps, - ymin + DRMTotalThickness[1] + eps, - ymax - DRMTotalThickness[1] - eps, - zmin + DRMTotalThickness[2] + eps, - zmax + DRMTotalThickness[2] + eps]) +indices = reg.find_cells_within_bounds( + [ + xmin + DRMTotalThickness[0] + eps, + xmax - DRMTotalThickness[0] - eps, + ymin + DRMTotalThickness[1] + eps, + ymax - DRMTotalThickness[1] - eps, + zmin + DRMTotalThickness[2] + eps, + zmax + DRMTotalThickness[2] + eps, + ] +) # print(DRMTotalThickness) # print(xmax, xmin, ymax, ymin, zmax, zmin) # pl = pv.Plotter() @@ -366,204 +437,205 @@ def partition_with_pilebox(messh,numcores, pileboxinfo,tol=1e-6): # pl.show() # exit # now create complemntary indices for DRM -DRMindices = np.ones(reg.n_cells,dtype=bool) +DRMindices = np.ones(reg.n_cells, dtype=bool) DRMindices[indices] = False DRMindices = np.where(DRMindices)[0] - -reg.cell_data['Domain'] = np.ones(reg.n_cells,dtype=np.int8)*info["DRMDomain"] -reg.cell_data['Domain'][indices] = info["RegularDomain"] -PML.cell_data['Domain'] = np.ones(PML.n_cells,dtype=np.int8)*info["PMLDomain"] -reg.cell_data['partitioned'] = np.zeros(reg.n_cells,dtype=np.int32) -PML.cell_data['partitioned'] = np.zeros(PML.n_cells,dtype=np.int32) +reg.cell_data['Domain'] = np.ones(reg.n_cells, dtype=np.int8) * info['DRMDomain'] +reg.cell_data['Domain'][indices] = info['RegularDomain'] +PML.cell_data['Domain'] = np.ones(PML.n_cells, dtype=np.int8) * info['PMLDomain'] +reg.cell_data['partitioned'] = np.zeros(reg.n_cells, dtype=np.int32) +PML.cell_data['partitioned'] = np.zeros(PML.n_cells, dtype=np.int32) # partitioning regular mesh -regular = reg.extract_cells(indices,progress_bar=True) -DRM = reg.extract_cells(DRMindices,progress_bar=True) +regular = reg.extract_cells(indices, progress_bar=True) +DRM = reg.extract_cells(DRMindices, progress_bar=True) if reg_num_cores > 1: - partition_with_pilebox(regular,reg_num_cores,pileboxinfo,tol=10) + partition_with_pilebox(regular, reg_num_cores, pileboxinfo, tol=10) if DRM_num_cores > 1: - partition(DRM,DRM_num_cores) + partition(DRM, DRM_num_cores) if PML_num_cores > 1: - partition(PML,PML_num_cores) + partition(PML, PML_num_cores) -reg.cell_data['partitioned'][regular["vtkOriginalCellIds"]] = regular.cell_data['partitioned'] -reg.cell_data['partitioned'][DRM["vtkOriginalCellIds"]] = DRM.cell_data['partitioned'] + reg_num_cores -PML.cell_data['partitioned'] = PML.cell_data['partitioned'] + reg_num_cores + DRM_num_cores +reg.cell_data['partitioned'][regular['vtkOriginalCellIds']] = regular.cell_data[ + 'partitioned' +] +reg.cell_data['partitioned'][DRM['vtkOriginalCellIds']] = ( + DRM.cell_data['partitioned'] + reg_num_cores +) +PML.cell_data['partitioned'] = ( + PML.cell_data['partitioned'] + reg_num_cores + DRM_num_cores +) # %% # merging PML and regular mesh to create a single mesh -mesh = reg.merge(PML,merge_points=False,tolerance=1e-6,progress_bar = True) +mesh = reg.merge(PML, merge_points=False, tolerance=1e-6, progress_bar=True) # mapping between PML and regular mesh on the boundary -mapping = mesh.clean(produce_merge_map=True)["PointMergeMap"] -regindicies = np.where(mapping[PML.n_points:]0)[0] +indices = np.where(mesh.point_data['boundary'] > 0)[0] # %% -mesh["matTag"] = np.ones(mesh.n_cells,dtype=np.uint8) - - - +mesh['matTag'] = np.ones(mesh.n_cells, dtype=np.uint8) # %% # define the ASDA absorbing elements -if AbsorbingElements == "ASDA": - - mesh = mesh.clean(tolerance=1e-6,remove_unused_points=False) - mesh["ASDA_type"] = np.zeros(mesh.n_cells,dtype=np.uint8) +if AbsorbingElements == 'ASDA': + mesh = mesh.clean(tolerance=1e-6, remove_unused_points=False) + mesh['ASDA_type'] = np.zeros(mesh.n_cells, dtype=np.uint8) ASDAelem_type = { - "B" :1 , - "L" :2 , - "R" :3 , - "F" :4 , - "K" :5 , - "BL" :6 , - "BR" :7 , - "BF" :8 , - "BK" :9 , - "LF" :10, - "LK" :11, - "RF" :12, - "RK" :13, - "BLF" :14, - "BLK" :15, - "BRF" :16, - "BRK" :17, + 'B': 1, + 'L': 2, + 'R': 3, + 'F': 4, + 'K': 5, + 'BL': 6, + 'BR': 7, + 'BF': 8, + 'BK': 9, + 'LF': 10, + 'LK': 11, + 'RF': 12, + 'RK': 13, + 'BLF': 14, + 'BLK': 15, + 'BRF': 16, + 'BRK': 17, } ASDAelem_typereverse = { - 1 :"B" , - 2 :"L" , - 3 :"R" , - 4 :"F" , - 5 :"K" , - 6 :"BL" , - 7 :"BR" , - 8 :"BF" , - 9 :"BK" , - 10:"LF" , - 11:"LK" , - 12:"RF" , - 13:"RK" , - 14:"BLF" , - 15:"BLK" , - 16:"BRF" , - 17:"BRK" , + 1: 'B', + 2: 'L', + 3: 'R', + 4: 'F', + 5: 'K', + 6: 'BL', + 7: 'BR', + 8: 'BF', + 9: 'BK', + 10: 'LF', + 11: 'LK', + 12: 'RF', + 13: 'RK', + 14: 'BLF', + 15: 'BLK', + 16: 'BRF', + 17: 'BRK', } xmin, xmax, ymin, ymax, zmin, zmax = reg.bounds ASDA_xwidth = xmax - xmin ASDA_ywidth = ymax - ymin ASDA_zwidth = zmax - zmin - print("ASDA_xwidth", ASDA_xwidth) - print("ASDA_ywidth", ASDA_ywidth) - print("ASDA_zwidth", ASDA_zwidth) + print('ASDA_xwidth', ASDA_xwidth) + print('ASDA_ywidth', ASDA_ywidth) + print('ASDA_zwidth', ASDA_zwidth) i = 0 for ele_center in mesh.cell_centers().points: # check if the element is in the left or rightside - if ele_center[0] < (-ASDA_xwidth/2.): + if ele_center[0] < (-ASDA_xwidth / 2.0): # it is in the left side # check if it is in the front or back - if ele_center[1] < (-ASDA_ywidth/2.): - # it is in the back + if ele_center[1] < (-ASDA_ywidth / 2.0): + # it is in the back # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BLK"] + mesh['ASDA_type'][i] = ASDAelem_type['BLK'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["LK"] - elif ele_center[1] > (ASDA_ywidth/2.): + mesh['ASDA_type'][i] = ASDAelem_type['LK'] + elif ele_center[1] > (ASDA_ywidth / 2.0): # it is in the front # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BLF"] + mesh['ASDA_type'][i] = ASDAelem_type['BLF'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["LF"] + mesh['ASDA_type'][i] = ASDAelem_type['LF'] else: # it is in the middle # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BL"] + mesh['ASDA_type'][i] = ASDAelem_type['BL'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["L"] + mesh['ASDA_type'][i] = ASDAelem_type['L'] - elif ele_center[0] > (ASDA_xwidth/2.): + elif ele_center[0] > (ASDA_xwidth / 2.0): # it is in the right side # check if it is in the front or back - if ele_center[1] < (-ASDA_ywidth/2.): - # it is in the back + if ele_center[1] < (-ASDA_ywidth / 2.0): + # it is in the back # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BRK"] + mesh['ASDA_type'][i] = ASDAelem_type['BRK'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["RK"] - elif ele_center[1] > (ASDA_ywidth/2.): + mesh['ASDA_type'][i] = ASDAelem_type['RK'] + elif ele_center[1] > (ASDA_ywidth / 2.0): # it is in the front # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BRF"] + mesh['ASDA_type'][i] = ASDAelem_type['BRF'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["RF"] + mesh['ASDA_type'][i] = ASDAelem_type['RF'] else: # it is in the middle # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BR"] + mesh['ASDA_type'][i] = ASDAelem_type['BR'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["R"] + mesh['ASDA_type'][i] = ASDAelem_type['R'] else: # it is in the middle # check if it is in the front or back - if ele_center[1] < (-ASDA_ywidth/2.): - # it is in the back + if ele_center[1] < (-ASDA_ywidth / 2.0): + # it is in the back # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BK"] + mesh['ASDA_type'][i] = ASDAelem_type['BK'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["K"] - elif ele_center[1] > (ASDA_ywidth/2.): + mesh['ASDA_type'][i] = ASDAelem_type['K'] + elif ele_center[1] > (ASDA_ywidth / 2.0): # it is in the front # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["BF"] + mesh['ASDA_type'][i] = ASDAelem_type['BF'] else: # it is in the top - mesh["ASDA_type"][i] = ASDAelem_type["F"] + mesh['ASDA_type'][i] = ASDAelem_type['F'] else: # it is in the middle # check if it is in the bottom or top if ele_center[2] < -ASDA_zwidth: # it is in the bottom - mesh["ASDA_type"][i] = ASDAelem_type["B"] - + mesh['ASDA_type'][i] = ASDAelem_type['B'] + i += 1 # %% # ============================================================================= @@ -571,160 +643,161 @@ def partition_with_pilebox(messh,numcores, pileboxinfo,tol=1e-6): # ============================================================================= if not os.path.exists(Dir): os.makedirs(Dir) -else : +else: # remove the files in the directory for file in os.listdir(Dir): - os.remove(os.path.join(Dir,file)) + os.remove(os.path.join(Dir, file)) min_core = mesh.cell_data['partitioned'].min() max_core = mesh.cell_data['partitioned'].max() - # write the mesh nodes -for core in range(min_core,max_core+1): - tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned']==core)[0]) - f = open(Dir + "/Nodes" + str(core) + ".tcl", "w") +for core in range(min_core, max_core + 1): + tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned'] == core)[0]) + f = open(Dir + '/Nodes' + str(core) + '.tcl', 'w') for i in range(tmp.n_points): - f.write(f"node {tmp['vtkOriginalPointIds'][i]} {tmp.points[i][0]} {tmp.points[i][1]} {tmp.points[i][2]}\n") + f.write( + f"node {tmp['vtkOriginalPointIds'][i]} {tmp.points[i][0]} {tmp.points[i][1]} {tmp.points[i][2]}\n" + ) f.close() # %% # writing the mesh elements -if AbsorbingElements == "ASDA": +if AbsorbingElements == 'ASDA': # writing the mesh elements - for core in range(min_core,max_core+1): - tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned']==core)[0]) - f = open(Dir + "/Elements" + str(core) + ".tcl", "w") + for core in range(min_core, max_core + 1): + tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned'] == core)[0]) + f = open(Dir + '/Elements' + str(core) + '.tcl', 'w') if core >= reg_num_cores + DRM_num_cores: for eletag in range(tmp.n_cells): - f.write(f"eval \"element $elementType {tmp['vtkOriginalCellIds'][eletag]} {' '.join(str(x) for x in tmp['vtkOriginalPointIds'][tmp.get_cell(eletag).point_ids])} $matTag{tmp['matTag'][eletag]} {ASDAelem_typereverse[tmp['ASDA_type'][eletag]]}\" \n") + f.write( + f"eval \"element $elementType {tmp['vtkOriginalCellIds'][eletag]} {' '.join(str(x) for x in tmp['vtkOriginalPointIds'][tmp.get_cell(eletag).point_ids])} $matTag{tmp['matTag'][eletag]} {ASDAelem_typereverse[tmp['ASDA_type'][eletag]]}\" \n" + ) else: for eletag in range(tmp.n_cells): - f.write(f"eval \"element $elementType {tmp['vtkOriginalCellIds'][eletag]} {' '.join(str(x) for x in tmp['vtkOriginalPointIds'][tmp.get_cell(eletag).point_ids])} $matTag{tmp['matTag'][eletag]}\" \n") + f.write( + f"eval \"element $elementType {tmp['vtkOriginalCellIds'][eletag]} {' '.join(str(x) for x in tmp['vtkOriginalPointIds'][tmp.get_cell(eletag).point_ids])} $matTag{tmp['matTag'][eletag]}\" \n" + ) f.close() -else : - for core in range(min_core,max_core+1): - tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned']==core)[0]) - f = open(Dir + "/Elements" + str(core) + ".tcl", "w") +else: + for core in range(min_core, max_core + 1): + tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned'] == core)[0]) + f = open(Dir + '/Elements' + str(core) + '.tcl', 'w') for eletag in range(tmp.n_cells): - f.write(f"eval \"element $elementType {tmp['vtkOriginalCellIds'][eletag]} {' '.join(str(x) for x in tmp['vtkOriginalPointIds'][tmp.get_cell(eletag).point_ids])} $matTag{tmp['matTag'][eletag]}\" \n") + f.write( + f"eval \"element $elementType {tmp['vtkOriginalCellIds'][eletag]} {' '.join(str(x) for x in tmp['vtkOriginalPointIds'][tmp.get_cell(eletag).point_ids])} $matTag{tmp['matTag'][eletag]}\" \n" + ) f.close() -if AbsorbingElements == "PML": +if AbsorbingElements == 'PML': # writing the boundary files - for core in range(reg_num_cores + DRM_num_cores , max_core+1): - tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned']==core)[0]) - f = open(Dir + "/Boundary" + str(core) + ".tcl", "w") + for core in range(reg_num_cores + DRM_num_cores, max_core + 1): + tmp = mesh.extract_cells(np.where(mesh.cell_data['partitioned'] == core)[0]) + f = open(Dir + '/Boundary' + str(core) + '.tcl', 'w') for i in range(tmp.n_points): - if tmp["boundary"][i] != -1: - x,y,z = tmp.points[i] + if tmp['boundary'][i] != -1: + x, y, z = tmp.points[i] nodeTag1 = tmp['vtkOriginalPointIds'][i] nodeTag2 = tmp['boundary'][i] - f.write(f"node {nodeTag2} {str(x)} {str(y)} {str(z)}\n") - f.write(f"equalDOF {nodeTag2} {nodeTag1} 1 2 3\n") - - - - - - - - - + f.write(f'node {nodeTag2} {str(x)} {str(y)} {str(z)}\n') + f.write(f'equalDOF {nodeTag2} {nodeTag1} 1 2 3\n') # ============================================================================= # printing information # ============================================================================= -print(f"Number of regular cores: {reg_num_cores}") -print(f"Number of DRM cores: {DRM_num_cores}") -print(f"Number of PML cores: {PML_num_cores}") -print(f"Number of regular elements: {regular.n_cells} roughly {int(regular.n_cells/reg_num_cores)} each core") -print(f"Number of DRM elements: {DRM.n_cells} roughly {int(DRM.n_cells/DRM_num_cores)} each core") -print(f"Number of PML elements: {PML.n_cells} roughly {int(PML.n_cells/PML_num_cores)} each core") -print(f"Number of total elements: {mesh.n_cells}") -print(f"Number of total points: {mesh.n_points}") -print(f"Number of cores: {max_core-min_core+1}") -print(f"Number of PML nodes: {PML.n_points}") -print(f"Number of regular nodes: {regular.n_points}") -print(f"Number of DRM nodes: {DRM.n_points}") -if AbsorbingElements == "PML": - print(f"Number of MP constraints: {regindicies.size}") +print(f'Number of regular cores: {reg_num_cores}') +print(f'Number of DRM cores: {DRM_num_cores}') +print(f'Number of PML cores: {PML_num_cores}') +print( + f'Number of regular elements: {regular.n_cells} roughly {int(regular.n_cells/reg_num_cores)} each core' +) +print( + f'Number of DRM elements: {DRM.n_cells} roughly {int(DRM.n_cells/DRM_num_cores)} each core' +) +print( + f'Number of PML elements: {PML.n_cells} roughly {int(PML.n_cells/PML_num_cores)} each core' +) +print(f'Number of total elements: {mesh.n_cells}') +print(f'Number of total points: {mesh.n_points}') +print(f'Number of cores: {max_core-min_core+1}') +print(f'Number of PML nodes: {PML.n_points}') +print(f'Number of regular nodes: {regular.n_points}') +print(f'Number of DRM nodes: {DRM.n_points}') +if AbsorbingElements == 'PML': + print(f'Number of MP constraints: {regindicies.size}') # calculating number of surface points on the boundaries eps = 1e-2 -bounds = np.array(mesh.bounds)-np.array([-eps,eps,-eps,eps,-eps,-eps]) +bounds = np.array(mesh.bounds) - np.array([-eps, eps, -eps, eps, -eps, -eps]) cube = pv.Cube(bounds=bounds) # points outside the cube -selected = mesh.select_enclosed_points(cube,inside_out = True) -pts = mesh.extract_points( +selected = mesh.select_enclosed_points(cube, inside_out=True) +pts = mesh.extract_points( selected['SelectedPoints'].view(bool), include_cells=False, ) -print(f"number of sp constriants: {pts.n_points*9}") +print(f'number of sp constriants: {pts.n_points*9}') +import h5py - - -import h5py # f = h5py.File('./DRMloadSmall.h5drm', 'r') f = h5py.File(DRMfile, 'r') -pts = f["DRM_Data"]["xyz"][:] -internal = f["DRM_Data"]["internal"][:] +pts = f['DRM_Data']['xyz'][:] +internal = f['DRM_Data']['internal'][:] # %% - pl = pv.Plotter() # plot the DRM layer with the DRM nodes loaded from the DRM file -pl.add_mesh(DRM,scalars="partitioned",show_edges=True) +pl.add_mesh(DRM, scalars='partitioned', show_edges=True) pl.add_points(pts, color='r') -pl.export_html(meshplotdir+"/DRM.html") +pl.export_html(meshplotdir + '/DRM.html') pl.clear() # plot the regular mesh with the internal nodes loaded from the DRM file -pl.add_mesh(regular,scalars="partitioned",show_edges=True) -pl.export_html(meshplotdir+"/Regular.html") +pl.add_mesh(regular, scalars='partitioned', show_edges=True) +pl.export_html(meshplotdir + '/Regular.html') pl.clear() # plot the PML mesh -pl.add_mesh(PML,scalars="partitioned",show_edges=True) -pl.export_html(meshplotdir+"/PML.html") +pl.add_mesh(PML, scalars='partitioned', show_edges=True) +pl.export_html(meshplotdir + '/PML.html') pl.clear() # plot the total mesh -pl.add_mesh(mesh,scalars="partitioned",show_edges=True) -pl.export_html(meshplotdir+"/Total_partitioned.html") +pl.add_mesh(mesh, scalars='partitioned', show_edges=True) +pl.export_html(meshplotdir + '/Total_partitioned.html') pl.clear() # plot the total mesh with domain scalars -pl.add_mesh(mesh,scalars="Domain",show_edges=True) -pl.export_html(meshplotdir+"/Total_domain.html") +pl.add_mesh(mesh, scalars='Domain', show_edges=True) +pl.export_html(meshplotdir + '/Total_domain.html') pl.clear() -if AbsorbingElements == "ASDA": - pl.add_mesh(mesh,scalars="ASDA_type",show_edges=True,cmap="tab20") - pl.export_html(meshplotdir+"/ASDA_total.html") +if AbsorbingElements == 'ASDA': + pl.add_mesh(mesh, scalars='ASDA_type', show_edges=True, cmap='tab20') + pl.export_html(meshplotdir + '/ASDA_total.html') pl.clear() # filter the mesh with domain pml - indices = mesh['Domain'] == info["PMLDomain"] + indices = mesh['Domain'] == info['PMLDomain'] grid = mesh.extract_cells(indices) - pl.add_mesh(grid,scalars="ASDA_type",show_edges=True,cmap="tab20b") - pl.export_html(meshplotdir+"/ASDA_PML.html") + pl.add_mesh(grid, scalars='ASDA_type', show_edges=True, cmap='tab20b') + pl.export_html(meshplotdir + '/ASDA_PML.html') pl.clear() pl.close() # save the mesh -mesh.save(os.path.join(OutputDir,"mesh.vtk"),binary=True) +mesh.save(os.path.join(OutputDir, 'mesh.vtk'), binary=True) # mesh.plot(scalars="partitioned",show_edges=True,show_grid=True,show_axes=True,show_bounds=True) # %% # print number of PML nodes @@ -742,23 +815,23 @@ def partition_with_pilebox(messh,numcores, pileboxinfo,tol=1e-6): # x1,y1,_ = p1 # x2,y2,_ = p2 # signed_area += (x1*y2 - x2*y1) - + # return signed_area/2.0 # for i in range(mesh.n_cells): # cell = mesh.get_cell(i) # points = mesh.points[cell.point_ids] - + # # first surface nodes # S1 = points[:4,:] # S2 = points[4:,:] - + # # check that the S2 is above S1 # if S1[:,2].mean() > S2[:,2].mean(): # print(f"Element {i} S1 is above S2") # # else : # # print(f"Element {i} S2 is above S1") - + # # calcuting signed area of the S1 # S1 = iscounterclockwise(S1) # S2 = iscounterclockwise(S2) @@ -766,9 +839,3 @@ def partition_with_pilebox(messh,numcores, pileboxinfo,tol=1e-6): # print(f"Element {i} S1 is not counter clockwise") # if S2 < 0: # print(f"Element {i} S2 is not counter clockwise") - - - - - - diff --git a/modules/tools/DRM/modelCreator.py b/modules/tools/DRM/modelCreator.py index 281c62ed1..d35ddfa45 100644 --- a/modules/tools/DRM/modelCreator.py +++ b/modules/tools/DRM/modelCreator.py @@ -13,12 +13,24 @@ parser = argparse.ArgumentParser(description='Create a model for the DRM') -parser.add_argument('--soilwidth_x', type=float, help='width of the soil in x direction') -parser.add_argument('--soilwidth_y', type=float, help='width of the soil in y direction') -parser.add_argument('--soilwidth_z', type=float, help='width of the soil in z direction') -parser.add_argument('--soilNumEle_x', type=int, help='number of elements in x direction') -parser.add_argument('--soilNumEle_y', type=int, help='number of elements in y direction') -parser.add_argument('--soilNumEle_z', type=int, help='number of elements in z direction') +parser.add_argument( + '--soilwidth_x', type=float, help='width of the soil in x direction' +) +parser.add_argument( + '--soilwidth_y', type=float, help='width of the soil in y direction' +) +parser.add_argument( + '--soilwidth_z', type=float, help='width of the soil in z direction' +) +parser.add_argument( + '--soilNumEle_x', type=int, help='number of elements in x direction' +) +parser.add_argument( + '--soilNumEle_y', type=int, help='number of elements in y direction' +) +parser.add_argument( + '--soilNumEle_z', type=int, help='number of elements in z direction' +) parser.add_argument('--Vs', type=float, help='shear wave velocity') parser.add_argument('--nu', type=float, help='poisson ratio') parser.add_argument('--rho', type=float, help='density') @@ -40,9 +52,15 @@ parser.add_argument('--DRM_originY', type=float, help='DRM origin Y') parser.add_argument('--DRM_originZ', type=float, help='DRM origin Z') parser.add_argument('--DRM_Software', type=str, help='DRM software') -parser.add_argument('--DRM_CoordinateTransformation', type=bool, help='DRM coordinate transformation') -parser.add_argument('--Absorb_HaveAbsorbingElements', type=str, help='Absorbing elements') -parser.add_argument('--Absorb_NumAbsorbingElements', type=int, help='Number of absorbing elements') +parser.add_argument( + '--DRM_CoordinateTransformation', type=bool, help='DRM coordinate transformation' +) +parser.add_argument( + '--Absorb_HaveAbsorbingElements', type=str, help='Absorbing elements' +) +parser.add_argument( + '--Absorb_NumAbsorbingElements', type=int, help='Number of absorbing elements' +) parser.add_argument('--Absorb_rayleighAlpha', type=float, help='Rayleigh alpha') parser.add_argument('--Absorb_rayleighBeta', type=float, help='Rayleigh beta') parser.add_argument('--Absorb_type', type=str, help='Absorbing type') @@ -61,108 +79,110 @@ # ============================================================================ # Cores information # ============================================================================ -regcores = args.soilcores -pmlcores = args.absorbingcores -drmcores = args.drmcores +regcores = args.soilcores +pmlcores = args.absorbingcores +drmcores = args.drmcores structurecores = 0 -AnalysisType = "PML" # options: "DRMFIXED", PMLDRM", "ASDADRM" -Target = "Soil-with-structure" # options: "Soil", "Structure", "Soil-with-structure" -PartitionAlgorithm = args.PartitionAlgorithm # options: "kd-tree", "metis" +AnalysisType = 'PML' # options: "DRMFIXED", PMLDRM", "ASDADRM" +Target = 'Soil-with-structure' # options: "Soil", "Structure", "Soil-with-structure" +PartitionAlgorithm = args.PartitionAlgorithm # options: "kd-tree", "metis" # ============================================================================ # Structure information # ============================================================================ -StructureType = "STEEL" # Options: STEEL, CONCRETE, Custom -NStory = 10 # number of stories above ground level -NBay = 4 # number of bays in X direction -NBayZ = 4 # number of bays in Y direction -StartNodeX = -9.0 # X coordinate of the first node -StartNodeY = -9.0 # Y coordinate of the first node -StartNodeZ = -1.5 # Z coordinate of the first node -meter = 1.0 # meter to specified unit conversion (SI unit) -ft = 0.3048 # feet to meter conversion (SI unit) -LCol = 3*meter # column height (parallel to Z axis) -LBeam = 4.5*meter # beam length (parallel to X axis) -LGird = 4.5*meter # girder length (parallel to Y axis) -SectionType = "Elastic" # options: Elastic, FiberSection -HaveStructure = "NO" # options: "YES", "NO" +StructureType = 'STEEL' # Options: STEEL, CONCRETE, Custom +NStory = 10 # number of stories above ground level +NBay = 4 # number of bays in X direction +NBayZ = 4 # number of bays in Y direction +StartNodeX = -9.0 # X coordinate of the first node +StartNodeY = -9.0 # Y coordinate of the first node +StartNodeZ = -1.5 # Z coordinate of the first node +meter = 1.0 # meter to specified unit conversion (SI unit) +ft = 0.3048 # feet to meter conversion (SI unit) +LCol = 3 * meter # column height (parallel to Z axis) +LBeam = 4.5 * meter # beam length (parallel to X axis) +LGird = 4.5 * meter # girder length (parallel to Y axis) +SectionType = 'Elastic' # options: Elastic, FiberSection +HaveStructure = 'NO' # options: "YES", "NO" # ============================================================================ # Soil information # ============================================================================ -dx = args.soilwidth_x/args.soilNumEle_x -dy = args.soilwidth_y/args.soilNumEle_y -dz = args.soilwidth_z/args.soilNumEle_z -lx = args.soilwidth_x -ly = args.soilwidth_y -lz = args.soilwidth_z +dx = args.soilwidth_x / args.soilNumEle_x +dy = args.soilwidth_y / args.soilNumEle_y +dz = args.soilwidth_z / args.soilNumEle_z +lx = args.soilwidth_x +ly = args.soilwidth_y +lz = args.soilwidth_z drmthicknessx = dx drmthicknessy = dy drmthicknessz = dz -numdrmlayers = args.DRM_numLayers -llx = lx + 2*numdrmlayers*drmthicknessx -lly = ly + 2*numdrmlayers*drmthicknessy -llz = lz + 1*numdrmlayers*drmthicknessz +numdrmlayers = args.DRM_numLayers +llx = lx + 2 * numdrmlayers * drmthicknessx +lly = ly + 2 * numdrmlayers * drmthicknessy +llz = lz + 1 * numdrmlayers * drmthicknessz -nx = args.soilNumEle_x -ny = args.soilNumEle_y -nz = args.soilNumEle_z +nx = args.soilNumEle_x +ny = args.soilNumEle_y +nz = args.soilNumEle_z # modelCreator.py: error: unrecognized arguments: --soilwidth_x 100 --soilwidth_y 100 --soilwidth_z 30 --soilNumEle_x 40 --soilNumEle_y 40 --soilNumEle_z 12 --Vs 200 --nu 0.3 --rho 2000 --DRM_filePath /mnt/d/Projects/OpenSeesProjects/EEUQ_DRMBackend/DRMload/DRMLoad.h5drm --DRM_numLayers 3 --DRM_laodFactor 1 --DRM_crdScale 1 --DRM_tolernace 1 --DRM_T00 0.0 --DRM_T01 1.0 --DRM_T02 0.0 --DRM_T10 1.0 --DRM_T11 0.0 --DRM_T12 0.0 --DRM_T20 0.0 --DRM_T21 0.0 --DRM_T22 -1.0 --DRM_originX 0 --DRM_originY 0 --DRM_originZ 0 --DRM_Sofrware ShakerMaker --Absorb_HaveAbsorbingElements NO --Absorb_NumAbsorbingElements 2 --Absorb_type PML --Absorb_rayleighAlpha 0.0 --Absorb_rayleighBeta 0.0 --dt 0.005 --t_final 60 --recording_dt 0.005 --outputDir /home/amnp95/Documents/EE-UQ/LocalWorkDir/DRM_Model # ============================================================================ # PML information # ============================================================================ -AbsorbingElements = args.Absorb_type # could be "ASDA" or "NORMAL" -numpmllayers = args.Absorb_NumAbsorbingElements -pmlthicknessx = dx -pmlthicknessy = dy -pmlthicknessz = dz -pmltotalthickness = numpmllayers*pmlthicknessx -HaveAbsorbingElements = args.Absorb_HaveAbsorbingElements # options: "YES", "NO" +AbsorbingElements = args.Absorb_type # could be "ASDA" or "NORMAL" +numpmllayers = args.Absorb_NumAbsorbingElements +pmlthicknessx = dx +pmlthicknessy = dy +pmlthicknessz = dz +pmltotalthickness = numpmllayers * pmlthicknessx +HaveAbsorbingElements = args.Absorb_HaveAbsorbingElements # options: "YES", "NO" Absorbing_rayleigh_alpha = args.Absorb_rayleighAlpha -Absorbing_rayleigh_beta = args.Absorb_rayleighBeta +Absorbing_rayleigh_beta = args.Absorb_rayleighBeta # ============================================================================ # General information # ============================================================================ -tmpdir = args.outputDir -meshdir = f"{tmpdir}/Mesh" -outputdir = f"{tmpdir}/Results" -DRMFile = args.DRM_filePath +tmpdir = args.outputDir +meshdir = f'{tmpdir}/Mesh' +outputdir = f'{tmpdir}/Results' +DRMFile = args.DRM_filePath # ============================================================================ # Embedding foundation # ============================================================================ -EmbeddingFoundation = "NO" # options: "YES", "NO" +EmbeddingFoundation = 'NO' # options: "YES", "NO" EmbeddedFoundation = { - "xmax": 10.0, - "xmin": -10.0, - "ymax": 10.0, - "ymin": -10.0, - "zmax": 0.0, - "zmin": -5.0, + 'xmax': 10.0, + 'xmin': -10.0, + 'ymax': 10.0, + 'ymin': -10.0, + 'zmax': 0.0, + 'zmin': -5.0, } # ============================================================================ # Fondation information # ============================================================================ -HaveFoundation = "NO" -AttachFoundation = "NO" +HaveFoundation = 'NO' +AttachFoundation = 'NO' foundationBlocks = [] # adding a block -foundationBlocks.append({ - "matTag": 1, - "xmax": 10.0, - "xmin": -10.0, - "ymax": 10.0, - "ymin": -10.0, - "zmax": -1.5, - "zmin": -4.5, - "Xmeshsize" : 1.0, - "Ymeshsize" : 1.0, - "Zmeshsize" : 1.0, -}) +foundationBlocks.append( + { + 'matTag': 1, + 'xmax': 10.0, + 'xmin': -10.0, + 'ymax': 10.0, + 'ymin': -10.0, + 'zmax': -1.5, + 'zmin': -4.5, + 'Xmeshsize': 1.0, + 'Ymeshsize': 1.0, + 'Zmeshsize': 1.0, + } +) # ============================================================================ @@ -170,48 +190,50 @@ # ============================================================================ pilelist = [] -x = np.arange(-7.0, 7.0+1e-6, 3.5) -y = np.arange(-7.0, 7.0+1e-6, 3.5) +x = np.arange(-7.0, 7.0 + 1e-6, 3.5) +y = np.arange(-7.0, 7.0 + 1e-6, 3.5) x, y = np.meshgrid(x, y) x = x.flatten() y = y.flatten() -HavePiles = "NO" +HavePiles = 'NO' for i in range(len(x)): - pilelist.append({ - "xtop": x[i], - "ytop": y[i], - "ztop": -3.0, - "xbottom": x[i], - "ybottom": y[i], - "zbottom": -10.0, - "numberofElements": 6, - }) + pilelist.append( + { + 'xtop': x[i], + 'ytop': y[i], + 'ztop': -3.0, + 'xbottom': x[i], + 'ybottom': y[i], + 'zbottom': -10.0, + 'numberofElements': 6, + } + ) # ============================================================================ # DRM information # ============================================================================ # DRM information DRMinformation = { - "DRM_Provider_Software" : args.DRM_Software, - "factor": args.DRM_loadFactor, - "crd_scale": args.DRM_crdScale, - "distance_tolerance": args.DRM_tolerance, - "do_coordinate_transformation": 1, - "T00":args.DRM_T00, - "T01":args.DRM_T01, - "T02":args.DRM_T02, - "T10":args.DRM_T10, - "T11":args.DRM_T11, - "T12":args.DRM_T12, - "T20":args.DRM_T20, - "T21":args.DRM_T21, - "T22":args.DRM_T22, - "originX": args.DRM_originX, - "originY": args.DRM_originY, - "originZ": args.DRM_originZ, - "DRM_Location": args.DRM_Location, + 'DRM_Provider_Software': args.DRM_Software, + 'factor': args.DRM_loadFactor, + 'crd_scale': args.DRM_crdScale, + 'distance_tolerance': args.DRM_tolerance, + 'do_coordinate_transformation': 1, + 'T00': args.DRM_T00, + 'T01': args.DRM_T01, + 'T02': args.DRM_T02, + 'T10': args.DRM_T10, + 'T11': args.DRM_T11, + 'T12': args.DRM_T12, + 'T20': args.DRM_T20, + 'T21': args.DRM_T21, + 'T22': args.DRM_T22, + 'originX': args.DRM_originX, + 'originY': args.DRM_originY, + 'originZ': args.DRM_originZ, + 'DRM_Location': args.DRM_Location, } @@ -223,9 +245,9 @@ recording_dt = args.recording_dt analysisinfo = { - "dt": dt, - "t_final": t_final, - "recording_dt": recording_dt, + 'dt': dt, + 't_final': t_final, + 'recording_dt': recording_dt, } @@ -234,9 +256,9 @@ # ============================================================================ # creating the material information materialInfo = { - "Vs": args.Vs, - "nu": args.nu, - "rho": args.rho, + 'Vs': args.Vs, + 'nu': args.nu, + 'rho': args.rho, } # ============================================================================ @@ -248,136 +270,137 @@ shutil.rmtree(meshdir, ignore_errors=False) if os.path.exists(outputdir): - shutil.rmtree(outputdir, ignore_errors=False) + shutil.rmtree(outputdir, ignore_errors=False) # creating directories using shutil os.makedirs(meshdir) os.makedirs(outputdir) - # ============================================================================ # Creating the mesh # ============================================================================ info = { - "xwidth": lx, - "ywidth": ly, - "zwidth": lz, - "Xmeshsize": dx, - "Ymeshsize": dy, - "Zmeshsize": dz, - "PMLThickness": [pmlthicknessx, pmlthicknessy, pmlthicknessz], - "numPMLLayers": numpmllayers, - "DRMThickness": [drmthicknessx, drmthicknessy, drmthicknessz], - "numDrmLayers": numdrmlayers, - "reg_num_cores": regcores, - "DRM_num_cores": drmcores, - "PML_num_cores": pmlcores, - "Structure_num_cores": structurecores, - "Dir": meshdir, - "OutputDir": outputdir, - "AbsorbingElements": AbsorbingElements, - "DRMfile": DRMFile, - "EmbeddingFoundation": EmbeddingFoundation, - "EmbeddedFoundationDict": EmbeddedFoundation, - "HaveFoundation": HaveFoundation, - "foundationBlocks": foundationBlocks, - "pilelist": pilelist, - "HavePiles": HavePiles, - "HaveStructure": HaveStructure, - "AttachFoundation": AttachFoundation, - "DRMinformation": DRMinformation, - "AnalysisInfo": analysisinfo, - "PartitionAlgorithm": PartitionAlgorithm, - "Absorbing_rayleigh_alpha": Absorbing_rayleigh_alpha, - "Absorbing_rayleigh_beta": Absorbing_rayleigh_beta, + 'xwidth': lx, + 'ywidth': ly, + 'zwidth': lz, + 'Xmeshsize': dx, + 'Ymeshsize': dy, + 'Zmeshsize': dz, + 'PMLThickness': [pmlthicknessx, pmlthicknessy, pmlthicknessz], + 'numPMLLayers': numpmllayers, + 'DRMThickness': [drmthicknessx, drmthicknessy, drmthicknessz], + 'numDrmLayers': numdrmlayers, + 'reg_num_cores': regcores, + 'DRM_num_cores': drmcores, + 'PML_num_cores': pmlcores, + 'Structure_num_cores': structurecores, + 'Dir': meshdir, + 'OutputDir': outputdir, + 'AbsorbingElements': AbsorbingElements, + 'DRMfile': DRMFile, + 'EmbeddingFoundation': EmbeddingFoundation, + 'EmbeddedFoundationDict': EmbeddedFoundation, + 'HaveFoundation': HaveFoundation, + 'foundationBlocks': foundationBlocks, + 'pilelist': pilelist, + 'HavePiles': HavePiles, + 'HaveStructure': HaveStructure, + 'AttachFoundation': AttachFoundation, + 'DRMinformation': DRMinformation, + 'AnalysisInfo': analysisinfo, + 'PartitionAlgorithm': PartitionAlgorithm, + 'Absorbing_rayleigh_alpha': Absorbing_rayleigh_alpha, + 'Absorbing_rayleigh_beta': Absorbing_rayleigh_beta, } numcells, numpoints = DRM_PML_Foundation_Meshgenrator(info) -print(f"Number of cells: {numcells}") -print(f"Number of points: {numpoints}") +print(f'Number of cells: {numcells}') +print(f'Number of points: {numpoints}') # ============================================================================ # Writing the information file # ============================================================================ info = { - "soilfoundation_num_cells": numcells, - "soilfoundation_num_points": numpoints, - "AnalysisType": AnalysisType, - "regcores": regcores, - "pmlcores": pmlcores, - "drmcores": drmcores, - "structurecores": structurecores, - "StructureType": StructureType, - "NStory": NStory, - "NBay": NBay, - "NBayZ": NBayZ, - "StartNodeX": StartNodeX, - "StartNodeY": StartNodeY, - "StartNodeZ": StartNodeZ, - "meter": meter, - "ft": ft, - "LCol": LCol, - "LBeam": LBeam, - "LGird": LGird, - "SectionType": SectionType, - "dx": dx, - "dy": dy, - "dz": dz, - "llx": llx, - "lly": lly, - "llz": llz, - "drmthicknessx": drmthicknessx, - "drmthicknessy": drmthicknessy, - "drmthicknessz": drmthicknessz, - "numdrmlayers": numdrmlayers, - "lx": lx, - "ly": ly, - "lz": lz, - "nx": nx, - "ny": ny, - "nz": nz, - "AbsorbingElements": AbsorbingElements, - "numpmllayers": numpmllayers, - "pmlthicknessx": pmlthicknessx, - "pmlthicknessy": pmlthicknessy, - "pmlthicknessz": pmlthicknessz, - "pmltotalthickness": pmltotalthickness, - "HaveAbsorbingElements": HaveAbsorbingElements, - "meshdir": meshdir, - "outputdir": outputdir, - "DRMFile": args.DRM_filePath.split("/")[-1], - "EmbeddingFoundation": EmbeddingFoundation, - "EmbeddedFoundation": EmbeddedFoundation, - "HaveFoundation": HaveFoundation, - "foundationBlocks": foundationBlocks, - "pilelist": pilelist, - "HavePiles": HavePiles, - "HaveStructure": HaveStructure, - "DRMinformation": DRMinformation, - "Absorbing_rayleigh_alpha": Absorbing_rayleigh_alpha, - "Absorbing_rayleigh_beta": Absorbing_rayleigh_beta, - "AnalysisInfo": analysisinfo, - "MaterialInfo": materialInfo, - + 'soilfoundation_num_cells': numcells, + 'soilfoundation_num_points': numpoints, + 'AnalysisType': AnalysisType, + 'regcores': regcores, + 'pmlcores': pmlcores, + 'drmcores': drmcores, + 'structurecores': structurecores, + 'StructureType': StructureType, + 'NStory': NStory, + 'NBay': NBay, + 'NBayZ': NBayZ, + 'StartNodeX': StartNodeX, + 'StartNodeY': StartNodeY, + 'StartNodeZ': StartNodeZ, + 'meter': meter, + 'ft': ft, + 'LCol': LCol, + 'LBeam': LBeam, + 'LGird': LGird, + 'SectionType': SectionType, + 'dx': dx, + 'dy': dy, + 'dz': dz, + 'llx': llx, + 'lly': lly, + 'llz': llz, + 'drmthicknessx': drmthicknessx, + 'drmthicknessy': drmthicknessy, + 'drmthicknessz': drmthicknessz, + 'numdrmlayers': numdrmlayers, + 'lx': lx, + 'ly': ly, + 'lz': lz, + 'nx': nx, + 'ny': ny, + 'nz': nz, + 'AbsorbingElements': AbsorbingElements, + 'numpmllayers': numpmllayers, + 'pmlthicknessx': pmlthicknessx, + 'pmlthicknessy': pmlthicknessy, + 'pmlthicknessz': pmlthicknessz, + 'pmltotalthickness': pmltotalthickness, + 'HaveAbsorbingElements': HaveAbsorbingElements, + 'meshdir': meshdir, + 'outputdir': outputdir, + 'DRMFile': args.DRM_filePath.split('/')[-1], + 'EmbeddingFoundation': EmbeddingFoundation, + 'EmbeddedFoundation': EmbeddedFoundation, + 'HaveFoundation': HaveFoundation, + 'foundationBlocks': foundationBlocks, + 'pilelist': pilelist, + 'HavePiles': HavePiles, + 'HaveStructure': HaveStructure, + 'DRMinformation': DRMinformation, + 'Absorbing_rayleigh_alpha': Absorbing_rayleigh_alpha, + 'Absorbing_rayleigh_beta': Absorbing_rayleigh_beta, + 'AnalysisInfo': analysisinfo, + 'MaterialInfo': materialInfo, } -infowriter(info,meshdir) +infowriter(info, meshdir) + # ============================================================================ # copy the related file as model.tcl to the current directory # ============================================================================ def copy_file(source_path, destination_path): with open(destination_path, 'wb') as dst_file: - with open(f"{meshdir}/Modelinfo.tcl", 'rb') as src_file: + with open(f'{meshdir}/Modelinfo.tcl', 'rb') as src_file: dst_file.write(src_file.read()) with open(source_path, 'rb') as src_file: dst_file.write(src_file.read()) # delete the model file if exists -if os.path.exists("./model.tcl"): - os.remove("./model.tcl") +if os.path.exists('./model.tcl'): + os.remove('./model.tcl') -if Target == "Soil-with-structure": - copy_file(f"MeshGenerator/models/Soil_with_structure.tcl", f"{meshdir}/model.tcl") +if Target == 'Soil-with-structure': + copy_file( + f'MeshGenerator/models/Soil_with_structure.tcl', f'{meshdir}/model.tcl' + ) # %% diff --git a/modules/tools/ShakerMaker/ShakerMakersubmitjob.py b/modules/tools/ShakerMaker/ShakerMakersubmitjob.py index 47f9a0cd9..24d3b2355 100644 --- a/modules/tools/ShakerMaker/ShakerMakersubmitjob.py +++ b/modules/tools/ShakerMaker/ShakerMakersubmitjob.py @@ -40,7 +40,7 @@ systemcodepath = args.tapisfolder -allocation = f"-A {args.allocation}" +allocation = f'-A {args.allocation}' faultinfo = metadata['faultdata'] faultfiles = faultinfo['Faultfilenames'] @@ -214,7 +214,7 @@ print('please wait for the job to finish') # noqa: T201 print('you can check the job status through the designsafe portal') # noqa: T201 if job_info.status == 'BLOCKED': - print('Job is blocked for now') + print('Job is blocked for now') print('Please wait for the job to be staged') print('This can take several hours') print('you can check the job status through the designsafe portal')