diff --git a/modules/Workflow/WorkflowApplications.json b/modules/Workflow/WorkflowApplications.json
index c9fe2029d..a45611a02 100644
--- a/modules/Workflow/WorkflowApplications.json
+++ b/modules/Workflow/WorkflowApplications.json
@@ -286,8 +286,23 @@
},
{
"Name": "UserInputShakeMap",
- "ExecutablePath": null,
+ "ExecutablePath": "applications/createEVENT/shakeMapEvent/shakeMapEvent.py",
"ApplicationSpecificInputs": [
+ {
+ "id": "Directory",
+ "type": "path",
+ "description": "Path to file containing folder of shake maps"
+ },
+ {
+ "id": "EventPath",
+ "type": "string",
+ "description": "Path to the shake map event"
+ },
+ {
+ "id": "IntensityMeasureType",
+ "type": "list",
+ "description": "List of intensity measures"
+ }
]
},
{
@@ -360,6 +375,18 @@
{
"Name": "SiteSpecifiedEvents",
"ExecutablePath": "applications/performRegionalMapping/SiteSpecifiedEvents/SSE.py",
+ "RunsParallel": true,
+ "ApplicationSpecificInputs": [
+ {
+ "id": "filenameEVENTgrid",
+ "type": "path",
+ "description": "Path to file containing location information on each event file"
+ }
+ ]
+ },
+ {
+ "Name": "GISSpecifiedEvents",
+ "ExecutablePath": "applications/performRegionalMapping/GISSpecifiedEvents/GISSpecifiedEvent.py",
"RunsParallel": true,
"ApplicationSpecificInputs": [
{
diff --git a/modules/createEVENT/CMakeLists.txt b/modules/createEVENT/CMakeLists.txt
index 8d5d0043b..516cd0baf 100644
--- a/modules/createEVENT/CMakeLists.txt
+++ b/modules/createEVENT/CMakeLists.txt
@@ -1,34 +1,33 @@
-add_subdirectory(common)
-add_subdirectory(CFDEvent)
-add_subdirectory(GeoClawOpenFOAM)
-add_subdirectory(DEDM_HRP)
-add_subdirectory(hazardBasedEvent)
-add_subdirectory(multiplePEER)
-add_subdirectory(multipleSimCenter)
-add_subdirectory(siteResponse)
-add_subdirectory(windTunnelExperiment)
-add_subdirectory(LowRiseTPU)
-add_subdirectory(HighRiseTPU)
-add_subdirectory(pointWindSpeed)
-add_subdirectory(LLNL_SW4)
-add_subdirectory(SimCenterEvent)
-add_subdirectory(ASCE7_WindSpeed)
-add_subdirectory(stochasticGroundMotion)
-add_subdirectory(stochasticWind)
-add_subdirectory(groundMotionIM)
-add_subdirectory(uniformPEER)
-add_subdirectory(experimentalWindForces)
-add_subdirectory(experimentalWindPressures)
-add_subdirectory(EmptyDomainCFD)
-add_subdirectory(IsolatedBuildingCFD)
-add_subdirectory(SurroundedBuildingCFD)
-add_subdirectory(coupledDigitalTwin)
-add_subdirectory(physicsBasedMotion)
-add_subdirectory(M9)
-add_subdirectory(Istanbul)
-add_subdirectory(MPM)
-add_subdirectory(stochasticWave)
-add_subdirectory(TaichiEvent)
-add_subdirectory(CelerisTaichiEvent)
-
-
+add_subdirectory(common)
+add_subdirectory(CFDEvent)
+add_subdirectory(GeoClawOpenFOAM)
+add_subdirectory(DEDM_HRP)
+add_subdirectory(hazardBasedEvent)
+add_subdirectory(multiplePEER)
+add_subdirectory(multipleSimCenter)
+add_subdirectory(siteResponse)
+add_subdirectory(windTunnelExperiment)
+add_subdirectory(LowRiseTPU)
+add_subdirectory(HighRiseTPU)
+add_subdirectory(pointWindSpeed)
+add_subdirectory(LLNL_SW4)
+add_subdirectory(SimCenterEvent)
+add_subdirectory(ASCE7_WindSpeed)
+add_subdirectory(stochasticGroundMotion)
+add_subdirectory(stochasticWind)
+add_subdirectory(groundMotionIM)
+add_subdirectory(uniformPEER)
+add_subdirectory(experimentalWindForces)
+add_subdirectory(experimentalWindPressures)
+add_subdirectory(EmptyDomainCFD)
+add_subdirectory(IsolatedBuildingCFD)
+add_subdirectory(SurroundedBuildingCFD)
+add_subdirectory(coupledDigitalTwin)
+add_subdirectory(physicsBasedMotion)
+add_subdirectory(M9)
+add_subdirectory(Istanbul)
+add_subdirectory(MPM)
+add_subdirectory(stochasticWave)
+add_subdirectory(TaichiEvent)
+add_subdirectory(CelerisTaichiEvent)
+
diff --git a/modules/createEVENT/shakeMapEvent/CMakeLists.txt b/modules/createEVENT/shakeMapEvent/CMakeLists.txt
new file mode 100644
index 000000000..88460802e
--- /dev/null
+++ b/modules/createEVENT/shakeMapEvent/CMakeLists.txt
@@ -0,0 +1 @@
+simcenter_add_python_script(SCRIPT shakeMapEvent.py)
diff --git a/modules/createEVENT/shakeMapEvent/shakeMapEvent.py b/modules/createEVENT/shakeMapEvent/shakeMapEvent.py
new file mode 100644
index 000000000..7956743a4
--- /dev/null
+++ b/modules/createEVENT/shakeMapEvent/shakeMapEvent.py
@@ -0,0 +1,114 @@
+# # noqa: INP001, D100
+# Copyright (c) 2018 Leland Stanford Junior University
+# Copyright (c) 2018 The Regents of the University of California
+#
+# This file is part of the SimCenter Backend Applications
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors
+# may be used to endorse or promote products derived from this software without
+# specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+#
+# You should have received a copy of the BSD 3-Clause License along with
+# this file. If not, see .
+#
+# Contributors:
+# Stevan Gavrilovic
+#
+
+import argparse
+import xml.etree.ElementTree as ET
+import geopandas as gpd
+from shapely.geometry import Point
+from pathlib import Path
+
+
+def create_shakemap_event(eventDirectory, eventPath, IMTypes): # noqa: D103
+
+ IMTypesList = eval(IMTypes)
+
+ print("Creating shakemap event")
+
+ xml_file_path = Path(eventDirectory) / eventPath / 'grid.xml'
+
+ # Parse the XML file
+ tree = ET.parse(xml_file_path)
+ root = tree.getroot()
+
+ # Find the grid_data element
+ grid_data = root.find('{http://earthquake.usgs.gov/eqcenter/shakemap}grid_data')
+
+ # Prepare lists to store data
+ points = []
+ attributes = []
+
+ # Parse the grid data
+ for line in grid_data.text.strip().split('\n'):
+ values = line.split()
+ lon, lat = float(values[0]), float(values[1])
+ point = Point(lon, lat)
+ points.append(point)
+
+ # Store only the specified attributes
+ attr = {}
+ attribute_mapping = {
+ 'PGA': 2,
+ 'PGV': 3,
+ 'MMI': 4,
+ 'PSA03': 5,
+ 'PSA10': 6,
+ 'PSA30': 7
+ }
+
+ for im_type in IMTypesList:
+ if im_type in attribute_mapping:
+ attr[im_type] = float(values[attribute_mapping[im_type]])
+
+ attributes.append(attr)
+
+ # Create GeoDataFrame
+ gdf = gpd.GeoDataFrame(attributes, geometry=points, crs="EPSG:4326")
+
+ # Display the first few rows
+ print("Saving shakemap to gpkg")
+
+ # Save as a GeoPackage file
+ gdf_path = Path(eventDirectory) / 'EventGrid.gpkg'
+ gdf.to_file(gdf_path, driver="GPKG")
+
+ return
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input', help='Input file')
+ parser.add_argument('--Directory', help='Directory path')
+ parser.add_argument('--EventPath', help='Event path')
+ parser.add_argument('--IntensityMeasureType', help='types of intensity measures')
+
+
+ args = parser.parse_args()
+
+ create_shakemap_event(args.Directory, args.EventPath, args.IntensityMeasureType)
diff --git a/modules/performRegionalMapping/CMakeLists.txt b/modules/performRegionalMapping/CMakeLists.txt
index d2e2520dd..000a8382e 100644
--- a/modules/performRegionalMapping/CMakeLists.txt
+++ b/modules/performRegionalMapping/CMakeLists.txt
@@ -1,2 +1,3 @@
add_subdirectory(NearestNeighborEvents)
add_subdirectory(SiteSpecifiedEvents)
+add_subdirectory(GISSpecifiedEvents)
diff --git a/modules/performRegionalMapping/GISSpecifiedEvents/CMakeLists.txt b/modules/performRegionalMapping/GISSpecifiedEvents/CMakeLists.txt
new file mode 100644
index 000000000..bc922b273
--- /dev/null
+++ b/modules/performRegionalMapping/GISSpecifiedEvents/CMakeLists.txt
@@ -0,0 +1,2 @@
+simcenter_add_python_script(SCRIPT GISSpecifiedEvent.py)
+simcenter_add_python_script(SCRIPT RasterEvent.py)
diff --git a/modules/performRegionalMapping/GISSpecifiedEvents/GISSpecifiedEvent.py b/modules/performRegionalMapping/GISSpecifiedEvents/GISSpecifiedEvent.py
new file mode 100644
index 000000000..67aaef869
--- /dev/null
+++ b/modules/performRegionalMapping/GISSpecifiedEvents/GISSpecifiedEvent.py
@@ -0,0 +1,89 @@
+# # noqa: INP001, D100
+# Copyright (c) 2018 Leland Stanford Junior University
+# Copyright (c) 2018 The Regents of the University of California
+#
+# This file is part of the SimCenter Backend Applications
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors
+# may be used to endorse or promote products derived from this software without
+# specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+#
+# You should have received a copy of the BSD 3-Clause License along with
+# this file. If not, see .
+#
+# Contributors:
+# Stevan Gavrilovic
+#
+
+import argparse
+from pathlib import Path
+import xml.etree.ElementTree as ET
+
+from RasterEvent import create_event as create_raster_event
+
+def is_raster_file(filename):
+ # Define a set of common raster file extensions
+ raster_extensions = {'.jpg', '.jpeg', '.png', '.bmp', '.gif', '.tiff', '.tif'}
+
+ # Create a Path object from the filename
+ file_path = Path(filename)
+
+ # Extract the file extension and check if it is in the set of raster extensions
+ return file_path.suffix.lower() in raster_extensions
+
+def is_xml_file(filename):
+ # Check if the file has an .xml extension
+ if not filename.lower().endswith('.xml'):
+ return False
+
+ # Try to parse the file as XML
+ try:
+ ET.parse(filename)
+ return True
+ except ET.ParseError:
+ return False
+
+def create_event(asset_file : str, event_grid_file: str): # noqa: C901, N803, D103
+
+ if is_raster_file(event_grid_file):
+ return create_raster_event(asset_file, event_grid_file)
+ elif is_xml_file(event_grid_file):
+ # Here you would call a function to handle XML files
+ # For now, we'll just raise a NotImplementedError
+ raise NotImplementedError("XML file handling is not yet implemented.")
+ else:
+ raise ValueError(f"{event_grid_file} is not a raster. Only rasters are currently supported.")
+
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--assetFile')
+ parser.add_argument('--filenameEVENTgrid')
+ args = parser.parse_args()
+
+ create_event(
+ args.assetFile, args.filenameEVENTgrid
+ )
diff --git a/modules/performRegionalMapping/GISSpecifiedEvents/RasterEvent.py b/modules/performRegionalMapping/GISSpecifiedEvents/RasterEvent.py
new file mode 100644
index 000000000..a7dcddbc0
--- /dev/null
+++ b/modules/performRegionalMapping/GISSpecifiedEvents/RasterEvent.py
@@ -0,0 +1,190 @@
+# # noqa: INP001, D100
+# Copyright (c) 2018 Leland Stanford Junior University
+# Copyright (c) 2018 The Regents of the University of California
+#
+# This file is part of the SimCenter Backend Applications
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors
+# may be used to endorse or promote products derived from this software without
+# specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+#
+# You should have received a copy of the BSD 3-Clause License along with
+# this file. If not, see .
+#
+# Contributors:
+# Stevan Gavrilovic
+#
+
+import argparse
+import json, csv
+from pathlib import Path
+import rasterio
+import pyproj
+from rasterio.transform import rowcol
+
+
+def sample_raster_at_latlon(src, lat, lon):
+ # Get the row and column indices in the raster
+ row, col = rowcol(src.transform, lon, lat) # Note the order: lon, lat
+
+ # Ensure the indices are within the bounds of the raster
+ if row < 0 or row >= src.height or col < 0 or col >= src.width:
+ raise IndexError("Transformed coordinates are out of raster bounds")
+
+ # Read the raster value at the given row and column
+ raster_value = src.read(1)[row, col]
+
+ return raster_value
+
+def create_event(asset_file, event_grid_file): # noqa: C901, N803, D103
+
+
+ # read the event grid data file
+ event_grid_path = Path(event_grid_file).resolve()
+ event_dir = event_grid_path.parent
+ event_grid_file = event_grid_path.name
+
+ src = rasterio.open(event_grid_path)
+
+ # Get the raster's CRS
+ raster_crs = pyproj.CRS.from_wkt(src.crs.to_wkt())
+
+ # Define the source CRS (EPSG:4326)
+ src_crs = pyproj.CRS('EPSG:4326')
+
+ # Transform the lat/lon to the raster's coordinate system
+ transformer = pyproj.Transformer.from_crs(src_crs, raster_crs, always_xy=True)
+
+ # iterate through the assets and store the selected events in the AIM
+ with open(asset_file, encoding='utf-8') as f: # noqa: PTH123
+ asset_dict = json.load(f)
+
+ data_final = [
+ ['GP_file','Latitude','Longitude'],
+ ]
+
+ # Iterate through each asset
+ for asset in asset_dict:
+ asset_id = asset['id']
+ asset_file_path = asset['file']
+
+ # Load the corresponding file for each asset
+ with open(asset_file_path, encoding='utf-8') as asset_file :
+
+ # Load the asset data
+ asset_data = json.load(asset_file)
+
+ im_tag = asset_data['RegionalEvent']['intensityMeasures'][0]
+
+ # Extract the latitude and longitude
+ lat = float(asset_data['GeneralInformation']['location']['latitude'])
+ lon = float(asset_data['GeneralInformation']['location']['longitude'])
+
+ # Transform the coordinates
+ lon_transformed, lat_transformed = transformer.transform(lon, lat)
+
+ # Check if the transformed coordinates are within the raster bounds
+ bounds = src.bounds
+ if (bounds.left <= lon_transformed <= bounds.right and
+ bounds.bottom <= lat_transformed <= bounds.top):
+ try:
+ val = sample_raster_at_latlon(src=src,
+ lat=lat_transformed,
+ lon=lon_transformed)
+
+ data = [
+ [im_tag],
+ [val]
+ ]
+
+ # Save the simcenter file name
+ file_name = f'Site_{asset_id}.csvx{0}x{int(asset_id):05d}'
+
+ data_final.append([file_name,lat,lon])
+
+ csv_save_path = event_dir / f'Site_{asset_id}.csv'
+ with open(csv_save_path, 'w', newline='') as file:
+ # Create a CSV writer object
+ writer = csv.writer(file)
+
+ # Write the data to the CSV file
+ writer.writerows(data)
+
+ # prepare a dictionary of events
+ event_list_json = [[file_name, 1.0]]
+
+ asset_data['Events'] = [{}]
+ asset_data['Events'][0] = {
+ 'EventFolderPath': str(event_dir),
+ 'Events': event_list_json,
+ 'type': 'intensityMeasure',
+ }
+
+ with open(asset_file_path, 'w', encoding='utf-8') as f: # noqa: PTH123
+ json.dump(asset_data, f, indent=2)
+
+
+ except IndexError as e:
+ print(f"Error for asset ID {asset_id}: {e}")
+ else:
+ print(f"Asset ID: {asset_id} is outside the raster bounds")
+
+
+ # # save the event dictionary to the BIM
+ # asset_data['Events'] = [{}]
+ # asset_data['Events'][0] = {
+ # # "EventClassification": "Earthquake",
+ # 'EventFolderPath': str(event_dir),
+ # 'Events': event_list_json,
+ # 'type': event_type,
+ # # "type": "SimCenterEvents"
+ # }
+
+ # with open(asset_file, 'w', encoding='utf-8') as f: # noqa: PTH123
+ # json.dump(asset_data, f, indent=2)
+
+
+ # Save the final event grid
+ csv_save_path = event_dir / 'EventGrid.csv'
+ with open(csv_save_path, 'w', newline='') as file:
+ # Create a CSV writer object
+ writer = csv.writer(file)
+
+ # Write the data to the CSV file
+ writer.writerows(data_final)
+
+ # Perform cleanup
+ src.close()
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--assetFile')
+ parser.add_argument('--filenameEVENTgrid')
+ args = parser.parse_args()
+
+ create_event(
+ args.assetFile, args.filenameEVENTgrid
+ )
diff --git a/modules/performRegionalMapping/NearestNeighborEvents/NNE.py b/modules/performRegionalMapping/NearestNeighborEvents/NNE.py
index a297e191d..bd4e953e5 100644
--- a/modules/performRegionalMapping/NearestNeighborEvents/NNE.py
+++ b/modules/performRegionalMapping/NearestNeighborEvents/NNE.py
@@ -46,7 +46,7 @@
import numpy as np
import pandas as pd
from sklearn.neighbors import NearestNeighbors
-
+import geopandas as gpd
def find_neighbors( # noqa: C901, D103
asset_file,
@@ -83,26 +83,55 @@ def find_neighbors( # noqa: C901, D103
event_dir = event_grid_path.parent
event_grid_file = event_grid_path.name
- grid_df = pd.read_csv(event_dir / event_grid_file, header=0)
-
- # store the locations of the grid points in X
- lat_E = grid_df['Latitude'] # noqa: N806
- lon_E = grid_df['Longitude'] # noqa: N806
- X = np.array([[lo, la] for lo, la in zip(lon_E, lat_E)]) # noqa: N806
+ # Check if the file is a CSV or a GIS file
+ file_extension = Path(event_grid_file).suffix.lower()
+
+ if file_extension == '.csv':
+ # Existing code for CSV files
+ grid_df = pd.read_csv(event_dir / event_grid_file, header=0)
+
+ # store the locations of the grid points in X
+ lat_E = grid_df['Latitude'] # noqa: N806
+ lon_E = grid_df['Longitude'] # noqa: N806
+ X = np.array([[lo, la] for lo, la in zip(lon_E, lat_E)]) # noqa: N806
+
+ if filter_label == '':
+ grid_extra_keys = list(
+ grid_df.drop(['GP_file', 'Longitude', 'Latitude'], axis=1).columns
+ )
- if filter_label == '':
- grid_extra_keys = list(
- grid_df.drop(['GP_file', 'Longitude', 'Latitude'], axis=1).columns
- )
+ else:
+ # Else assume GIS files - works will all gis files that geopandas supports
+ gdf = gpd.read_file(event_dir / event_grid_file)
+
+ # Ensure the GIS file is in a geographic coordinate system
+ if not gdf.crs.is_geographic:
+ gdf = gdf.to_crs(epsg=4326) # Convert to WGS84
+
+ # Extract coordinates from the geometry
+ gdf['Longitude'] = gdf.geometry.x
+ gdf['Latitude'] = gdf.geometry.y
+
+ # store the locations of the grid points in X
+ lat_E = gdf['Latitude'] # noqa: N806
+ lon_E = gdf['Longitude'] # noqa: N806
+ X = np.array([[lo, la] for lo, la in zip(lon_E, lat_E)]) # noqa: N806
+
+ if filter_label == '':
+ grid_extra_keys = list(
+ gdf.drop(['geometry', 'Longitude', 'Latitude'], axis=1).columns
+ )
+
+ # Convert GeoDataFrame to regular DataFrame for consistency with the rest of the code
+ grid_df = pd.DataFrame(gdf.drop(columns='geometry'))
# prepare the tree for the nearest neighbor search
if filter_label != '' or len(grid_extra_keys) > 0:
neighbors_to_get = min(neighbors * 10, len(lon_E))
else:
neighbors_to_get = neighbors
- nbrs = NearestNeighbors(n_neighbors=neighbors_to_get, algorithm='ball_tree').fit(
- X
- )
+
+ nbrs = NearestNeighbors(n_neighbors=neighbors_to_get, algorithm='ball_tree').fit(X)
# load the building data file
with open(asset_file, encoding='utf-8') as f: # noqa: PTH123
@@ -211,72 +240,108 @@ def find_neighbors( # noqa: C901, D103
nbr_samples = np.where(rng.multinomial(1, weights, samples) == 1)[1]
# this is the preferred behavior, the else clause is left for legacy inputs
- if grid_df.iloc[0]['GP_file'][-3:] == 'csv':
- # We assume that every grid point has the same type and number of
- # event data. That is, you cannot mix ground motion records and
- # intensity measures and you cannot assign 10 records to one point
- # and 15 records to another.
-
- # Load the first file and identify if this is a grid of IM or GM
- # information. GM grids have GM record filenames defined in the
- # grid point files.
- first_file = pd.read_csv(
- event_dir / grid_df.iloc[0]['GP_file'], header=0
- )
- if first_file.columns[0] == 'TH_file':
- event_type = 'timeHistory'
- else:
- event_type = 'intensityMeasure'
- event_count = first_file.shape[0]
+ if file_extension == '.csv':
+ if grid_df.iloc[0]['GP_file'][-3:] == 'csv':
+ # We assume that every grid point has the same type and number of
+ # event data. That is, you cannot mix ground motion records and
+ # intensity measures and you cannot assign 10 records to one point
+ # and 15 records to another.
+
+ # Load the first file and identify if this is a grid of IM or GM
+ # information. GM grids have GM record filenames defined in the
+ # grid point files.
+ first_file = pd.read_csv(
+ event_dir / grid_df.iloc[0]['GP_file'], header=0
+ )
+ if first_file.columns[0] == 'TH_file':
+ event_type = 'timeHistory'
+ else:
+ event_type = 'intensityMeasure'
+ event_count = first_file.shape[0]
+
+ # collect the list of events and scale factors
+ event_list = []
+ scale_list = []
+
+ # for each neighbor
+ for sample_j, nbr in enumerate(nbr_samples):
+ # make sure we resample events if samples > event_count
+ event_j = sample_j % event_count
+
+ # get the index of the nth neighbor
+ nbr_index = ind_list[nbr]
+
+ # if the grid has ground motion records...
+ if event_type == 'timeHistory':
+ # load the file for the selected grid point
+ event_collection_file = grid_df.iloc[nbr_index]['GP_file']
+ event_df = pd.read_csv(
+ event_dir / event_collection_file, header=0
+ )
+
+ # append the GM record name to the event list
+ event_list.append(event_df.iloc[event_j, 0])
+
+ # append the scale factor (or 1.0) to the scale list
+ if len(event_df.columns) > 1:
+ scale_list.append(float(event_df.iloc[event_j, 1]))
+ else:
+ scale_list.append(1.0)
+
+ # if the grid has intensity measures
+ elif event_type == 'intensityMeasure':
+ # save the collection file name and the IM row id
+ event_list.append(
+ grid_df.iloc[nbr_index]['GP_file'] + f'x{event_j}'
+ )
+
+ # IM collections are not scaled
+ scale_list.append(1.0)
- # collect the list of events and scale factors
+ # TODO: update the LLNL input data and remove this clause # noqa: TD002
+ else:
+ event_list = []
+ for e, i in zip(nbr_samples, ind_list):
+ event_list += [
+ grid_df.iloc[i]['GP_file'],
+ ] * e
+
+ scale_list = np.ones(len(event_list))
+ else :
event_list = []
scale_list = []
+ event_type = 'intensityMeasure'
+
+ # Determine event_count (number of IMs per grid point)
+ im_columns = [col for col in grid_df.columns if col not in ['geometry', 'Longitude', 'Latitude']]
+ event_count = len(im_columns)
# for each neighbor
for sample_j, nbr in enumerate(nbr_samples):
+
# make sure we resample events if samples > event_count
event_j = sample_j % event_count
# get the index of the nth neighbor
nbr_index = ind_list[nbr]
-
- # if the grid has ground motion records...
- if event_type == 'timeHistory':
- # load the file for the selected grid point
- event_collection_file = grid_df.iloc[nbr_index]['GP_file']
- event_df = pd.read_csv(
- event_dir / event_collection_file, header=0
- )
-
- # append the GM record name to the event list
- event_list.append(event_df.iloc[event_j, 0])
-
- # append the scale factor (or 1.0) to the scale list
- if len(event_df.columns) > 1:
- scale_list.append(float(event_df.iloc[event_j, 1]))
- else:
- scale_list.append(1.0)
-
- # if the grid has intensity measures
- elif event_type == 'intensityMeasure':
- # save the collection file name and the IM row id
- event_list.append(
- grid_df.iloc[nbr_index]['GP_file'] + f'x{event_j}'
- )
-
- # IM collections are not scaled
- scale_list.append(1.0)
-
- # TODO: update the LLNL input data and remove this clause # noqa: TD002
- else:
- event_list = []
- for e, i in zip(nbr_samples, ind_list):
- event_list += [
- grid_df.iloc[i]['GP_file'],
- ] * e
-
- scale_list = np.ones(len(event_list))
+
+ # For GIS files, create a new CSV file
+ csv_filename = f'Site_{sample_j}.csv'
+
+ csv_path = event_dir / csv_filename
+
+ # Create a CSV file with data from the GIS file
+ # Use actual data from the GIS file if available, otherwise use dummy data
+ im_columns = [col for col in grid_df.columns if col not in ['geometry', 'Longitude', 'Latitude']]
+
+ im_data = pd.DataFrame({col: [grid_df.iloc[nbr_index][col]] * event_count for col in im_columns})
+
+ im_data.to_csv(csv_path, index=False)
+ # save the collection file name and the IM row id
+ event_list.append(csv_filename + f'x{event_j}')
+
+ # IM collections are not scaled
+ scale_list.append(1.0)
# prepare a dictionary of events
event_list_json = []