From 4c6e6a4e9662ad5b454624c0d04323924eca7857 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Mon, 29 Jan 2024 13:01:58 -0800 Subject: [PATCH 01/63] TranspInventoryGenerator now displays combined inventory file name and location --- brails/TranspInventoryGenerator.py | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/brails/TranspInventoryGenerator.py b/brails/TranspInventoryGenerator.py index 0b40fad..ff32d44 100644 --- a/brails/TranspInventoryGenerator.py +++ b/brails/TranspInventoryGenerator.py @@ -40,7 +40,7 @@ # # Last updated: -# 01-26-2024 +# 01-29-2024 import geopandas as gpd @@ -52,7 +52,7 @@ import warnings import numpy as np from datetime import datetime -import brails +from importlib.metadata import version from brails.workflow.TransportationElementHandler import TransportationElementHandler # The map defines the default values according to MTFCC code @@ -71,7 +71,6 @@ "S1710":25, "S1720":25, "S1730":25, "S1740":10, "S1750":10, "S1780":10, "S1810":10, "S1820":10, "S1830":10} - class TranspInventoryGenerator: def __init__(self, location='Berkeley, CA'): @@ -112,7 +111,7 @@ def combineAndFormat_HWY(self, minimumHAZUS=True, connectivity=False, maxRoadLen bnodeDF = gpd.GeoDataFrame(columns = ["nodeID", "geometry"], crs = "epsg:4326") bridgesDict = {'type':'FeatureCollection', 'generated':str(datetime.now()), - 'brails_version': brails.__version__, + 'brails_version': version('BRAILS'), "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, 'units': {"length": lengthUnit}, 'features':[]} @@ -132,7 +131,7 @@ def combineAndFormat_HWY(self, minimumHAZUS=True, connectivity=False, maxRoadLen rnodeDF = gpd.GeoDataFrame(columns = ["nodeID", "geometry"], crs = "epsg:4326") roadsDict = {'type':'FeatureCollection', 'generated':str(datetime.now()), - 'brails_version': brails.__version__, + 'brails_version': version('BRAILS'), "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, 'units': {"length": lengthUnit}, 'features':[]} @@ -146,7 +145,7 @@ def combineAndFormat_HWY(self, minimumHAZUS=True, connectivity=False, maxRoadLen tnodeDF = gpd.GeoDataFrame(columns = ["nodeID", "geometry"], crs = "epsg:4326") tunnelsDict = {'type':'FeatureCollection', 'generated':str(datetime.now()), - 'brails_version': brails.__version__, + 'brails_version': version('BRAILS'), "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, 'units': {"length": lengthUnit}, 'features':[]} @@ -158,10 +157,10 @@ def combineAndFormat_HWY(self, minimumHAZUS=True, connectivity=False, maxRoadLen # Dump to json file with open("hwy_inventory.geojson", "w") as f: json.dump(combinedGeoJSON, f, indent = 2) - + print('Combined transportation inventory saved in hwy_inventory.geojson' + 'This file is suitable for R2D use and is available in {os.getcwd()}') return - # Convert common length units def convertUnits(value, unit_in, unit_out): aval_types = ['m', 'mm', 'cm', 'km', 'inch', 'ft', 'mile'] @@ -180,7 +179,6 @@ def convertUnits(value, unit_in, unit_out): value = value*scale_map[unit_in]/scale_map[unit_out] return value - # Break down long roads according to delta def breakDownLongEdges(edges, delta, nodes = None, tolerance = 10e-3): dropedEdges = [] @@ -401,7 +399,6 @@ def explodeLineString(roads_gdf): expandedRoads.append(newRoad) expandedRoads = gpd.GeoDataFrame(expandedRoads, crs=crs) return expandedRoads - def formatRoads(minimumHAZUS, connectivity, maxRoadLength, roads_gdf): if roads_gdf.shape[0] == 0: @@ -494,14 +491,13 @@ def formatTunnels(minimumHAZUS, connectivity, tunnels_gdf): tunnels_gdf["type"] = "Tunnel" tunnels_gdf["assetSubtype"] = "HwyTunnel" tunnelDict = json.loads(tunnels_gdf.to_json()) - return tnodeDF, tunnelDict - + return tnodeDF, tunnelDict def combineDict(bnodeDF, bridgesDict, rnodeDF, roadsDict, tnodeDF, tunnelsDict, connectivity, lengthUnit, crs="epsg:4326"): combinedDict = {'type':'FeatureCollection', 'generated':str(datetime.now()), - 'brails_version': brails.__version__, + 'brails_version': version('BRAILS'), "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, 'units': {"length": lengthUnit}, 'features':[]} From d6676c2659bd719e618b4145e203baa9fa798e49 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Thu, 1 Feb 2024 04:08:41 -0800 Subject: [PATCH 02/63] TransInventoryGenerator now displays location of combined inventory file --- brails/TranspInventoryGenerator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/brails/TranspInventoryGenerator.py b/brails/TranspInventoryGenerator.py index ff32d44..f7234c8 100644 --- a/brails/TranspInventoryGenerator.py +++ b/brails/TranspInventoryGenerator.py @@ -157,8 +157,8 @@ def combineAndFormat_HWY(self, minimumHAZUS=True, connectivity=False, maxRoadLen # Dump to json file with open("hwy_inventory.geojson", "w") as f: json.dump(combinedGeoJSON, f, indent = 2) - print('Combined transportation inventory saved in hwy_inventory.geojson' - 'This file is suitable for R2D use and is available in {os.getcwd()}') + print('Combined transportation inventory saved in hwy_inventory.geojson.' + f' This file is suitable for R2D use and is available in {os.getcwd()}') return # Convert common length units From 70aead190e320788bd19677b18ca844d36eeae4c Mon Sep 17 00:00:00 2001 From: bacetiner Date: Thu, 1 Feb 2024 04:09:49 -0800 Subject: [PATCH 03/63] FootprintHandler now accepts point/footprint combo input --- brails/workflow/FootprintHandler.py | 216 +++++++++++++++++++--------- 1 file changed, 148 insertions(+), 68 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 044c12f..bf79f79 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 01-24-2024 +# 02-01-2024 import math import json @@ -58,7 +58,6 @@ def __init__(self): self.availableDataSources = ['osm','ms','usastr'] self.fpSource = 'osm' self.footprints = [] - self.bldgheights = [] def fetch_footprint_data(self,queryarea,fpSource='osm',attrmap=None): """ @@ -494,7 +493,7 @@ def polygon_area(lats, lons): else: #return in ratio of sphere total area return area - def load_footprint_data(fpfile,attrmapfile,fpSource): + def load_footprint_data(fpfile,fpSource,attrmapfile): """ Function that loads footprint data from a GeoJSON file @@ -544,26 +543,70 @@ def fp_download(bbox,fpSource): elif fpSource=='usastr': footprints = get_usastruct_footprints(bbox) return footprints + + def parse_fp_geojson(data, attrmap, attrkeys, fpfile): + # Create the attribute fields that will be extracted from the + # GeoJSON file: + attributes = {} + for attr in attrmap.values(): + attributes[attr] = [] + + footprints_out = [] + discardedfp_count = 0 + correctedfp_count = 0 + for loc in data: + # If the footprint is a polygon: + if loc['geometry']['type']=='Polygon': + # Read footprint coordinates: + temp_fp = loc['geometry']['coordinates'] - with open(attrmapfile) as f: - lines = [line.rstrip() for line in f] - attrmap = {} - for line in lines: - lout = line.split(':') - if lout[1]!='': - attrmap[lout[1]] = lout[0] - - attributes = {} - for attr in attrmap.values(): - attributes[attr] = [] - - with open(fpfile) as f: - data = json.load(f)['features'] - - attrkeys = list(data[0]['properties'].keys()) - print(attrkeys) - ptdata = all(loc['geometry']['type']=='Point' for loc in data) - if ptdata: + # Check down to two levels deep into extracted JSON + # structure to account for inconsistencies in the + # provided footprint data + if len(temp_fp)>1: + fp = temp_fp[:] + elif len(temp_fp[0])>1: + fp = temp_fp[0][:] + elif len(temp_fp[0][0])>1: + fp = temp_fp[0][0][:] + + # If mutliple polygons are detected for a location, + # take the outermost polygon: + if len(fp)==2: + list_len = [len(i) for i in fp] + fp = fp[list_len.index(max(list_len))] + correctedfp_count+=1 + + # Add the footprint and attributes to the output + # variables + footprints_out.append(fp) + if attrkeys: + for key in attrkeys: + try: + attributes[attrmap[key]].append(loc['properties'][key]) + except: + pass + # If the footprint is a multi-polygon, discard the footprint: + elif loc['geometry']['type']=='MultiPolygon': + discardedfp_count+=1 + + # Print the results of the footprint extraction: + if discardedfp_count==0: + print(f"Extracted a total of {len(footprints_out)} building footprints from {fpfile}") + else: + print(f"Corrected {correctedfp_count} building footprint{pluralsuffix(correctedfp_count)} with invalid geometry") + print(f"Discarded {discardedfp_count} building footprint{pluralsuffix(discardedfp_count)} with invalid geometry") + print(f"Extracted a total of {len(footprints_out)} building footprints from {fpfile}") + + return (footprints_out, attributes) + + def parse_pt_geojson(data, attrmap, attrkeys, fpSource): + # Create the attribute fields that will be extracted from the + # GeoJSON file: + attributes = {} + for attr in attrmap.values(): + attributes[attr] = [] + # Write the data in datalist into a dictionary for better data access, # and filtering the duplicate entries: datadict = {} @@ -575,9 +618,21 @@ def fp_download(bbox,fpSource): datadict[pt] = loc['properties'] points = list(datadict.keys()) - # Determine the coordinates of the bounding box including the points: + + # Determine the coordinates of the bounding box containing the + # points: bbox = get_bbox(ptcoords) - footprints = fp_download(bbox,fpSource) + + # Get the footprint data corresponding to the point GeoJSON + # input: + if 'geojson' in fpSource.lower(): + with open(fpSource) as f: + data = json.load(f)['features'] + (footprints,_) = parse_fp_geojson(data, {}, {}, fpSource) + else: + footprints = fp_download(bbox,fpSource) + + # Create an STR tree for efficient parsing of point coordinates: pttree = STRtree(points) # Find the data points that are enclosed in each footprint: @@ -601,44 +656,64 @@ def fp_download(bbox,fpSource): attributes[attrmap[key]].append(ptres[key]) except: pass - + + return (footprints_out, attributes) + + if attrmapfile: + # Create a dictionary for mapping the attributes in the GeoJSON + # file to BRAILS inventory naming conventions: + with open(attrmapfile) as f: + lines = [line.rstrip() for line in f] + attrmap = {} + for line in lines: + lout = line.split(':') + if lout[1]!='': + attrmap[lout[1]] = lout[0] + + # Read the GeoJSON file and check if all the data in the file is + # point data: + with open(fpfile) as f: + data = json.load(f)['features'] + ptdata = all(loc['geometry']['type']=='Point' for loc in data) + + # Identify the attribute keys in the GeoJSON file: + attrkeys0 = list(data[0]['properties'].keys()) + if attrkeys0: + print('Building attributes detected in the input GeoJSON: ' + + ', '.join(attrkeys0)) + + # Check if all of the attribute keys in the GeoJSON have + # correspondence in the map. Ignore the keys that do not have + # correspondence: + attrkeys = set() + for key in attrkeys0: + try: + attrmap[key] + attrkeys.add(key) + except: + pass + ignored_Attr = set(attrkeys0) - attrkeys + if ignored_Attr: + print('Attribute mapping does not cover all attributes detected in' + 'the input GeoJSON. Ignoring detected attributes: ' + + ', '.join(ignored_Attr)) + else: + attrmap = {} + attrkeys = {} + + if ptdata: + (footprints_out, attributes) = parse_pt_geojson(data, + attrmap, + attrkeys, + fpSource) else: - footprints_out = [] - discardedfp_count = 0 - correctedfp_count = 0 - for loc in data: - if loc['geometry']['type']=='Polygon': - temp_fp = loc['geometry']['coordinates'] - if len(temp_fp)>1: - fp = temp_fp[:] - elif len(temp_fp[0])>1: - fp = temp_fp[0][:] - elif len(temp_fp[0][0])>1: - fp = temp_fp[0][0][:] - - if len(fp)==2: - list_len = [len(i) for i in fp] - fp = fp[list_len.index(max(list_len))] - correctedfp_count+=1 - - footprints_out.append(fp) - for key in attrkeys: - try: - attributes[attrmap[key]].append(loc['properties'][key]) - except: - pass - - elif loc['geometry']['type']=='MultiPolygon': - discardedfp_count+=1 - - if discardedfp_count==0: - print(f"Extracted a total of {len(footprints_out)} building footprints from {fpfile}") - else: - print(f"Corrected {correctedfp_count} building footprint{pluralsuffix(correctedfp_count)} with invalid geometry") - print(f"Discarded {discardedfp_count} building footprint{pluralsuffix(discardedfp_count)} with invalid geometry") - print(f"Extracted a total of {len(footprints_out)} building footprints from {fpfile}") + (footprints_out, attributes) = parse_fp_geojson(data, + attrmap, + attrkeys, + fpfile) + fpSource = fpfile - return (footprints_out, attributes) + return (footprints_out, attributes, fpSource) def fp_source_selector(self): if self.fpSource=='osm': @@ -655,20 +730,22 @@ def fp_source_selector(self): self.fpSource = fpSource if isinstance(self.queryarea,str): if 'geojson' in queryarea.lower(): - (self.footprints,self.attributes) = load_footprint_data( + (self.footprints,self.attributes,self.fpSource) = load_footprint_data( self.queryarea, - attrmap, - self.fpSource) + self.fpSource, + attrmap) + bldgheights = [] else: - (self.footprints,self.bldgheights) = fp_source_selector(self) + (self.footprints,bldgheights) = fp_source_selector(self) elif isinstance(queryarea,tuple): - (self.footprints,self.bldgheights) = fp_source_selector(self) + (self.footprints,bldgheights) = fp_source_selector(self) elif isinstance(queryarea,list): self.footprints = [] + bldgheights = [] for query in self.queryarea: (fps, bldghts) = fp_source_selector(query) self.footprints.extend(fps) - self.bldgheights.extend(bldghts) + bldgheights.extend(bldghts) else: sys.exit('Incorrect location entry. The location entry must be defined as' + ' 1) a string or a list of strings containing the name(s) of the query areas,' + @@ -677,8 +754,11 @@ def fp_source_selector(self): ' bounding box of interest in (lon1, lat1, lon2, lat2) format.' + ' For defining a bounding box, longitude and latitude values' + ' shall be entered for the vertex pairs of any of the two' + - ' diagonals of the rectangular bounding box.') - + ' diagonals of the rectangular bounding box.') + + if bldgheights!=[]: + self.attributes['buildingheight'] = bldgheights.copy() + self.fpAreas = [] for fp in self.footprints: lons = [] From fdd270b454b64078486c58aceb1701db4a9f7919 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Thu, 1 Feb 2024 04:48:58 -0800 Subject: [PATCH 04/63] Updated data outputting to better interface with InventoryGenerator --- brails/workflow/NSIParser.py | 38 +++++++++++++++++++++++++----------- 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/brails/workflow/NSIParser.py b/brails/workflow/NSIParser.py index 4555a65..29735fc 100644 --- a/brails/workflow/NSIParser.py +++ b/brails/workflow/NSIParser.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 01-24-2024 +# 02-01-2024 import requests import numpy as np @@ -51,7 +51,7 @@ class NSIParser: def __init__(self): self.inventory = [] - def GenerateBldgInventory(self,footprints): + def GenerateBldgInventory(self,footprints,outFile=None): """ Function that reads NSI buildings points and matches the data to a set of footprints. @@ -122,7 +122,8 @@ def get_nbi_data(box): # Download NBI data using the defined retry strategy, read downloaded # GeoJSON data into a list: - print('\nGetting National Structure Inventory (NSI) building points for the building footprints...') + print('\nGetting National Structure Inventory (NSI) building data' + ' for the building footprints...') response = s.get(url) datalist = response.json()['features'] @@ -149,6 +150,7 @@ def get_inv_from_datadict(datadict,footprints): ress.append(np.empty(shape=(0, 0))) # Match NBI data to each footprint: + footprints_out_json = [] footprints_out = [] lat = [] lon = [] @@ -160,7 +162,8 @@ def get_inv_from_datadict(datadict,footprints): occ = [] for ind, fp in enumerate(footprints): if ress[ind].size!=0: - footprints_out.append(('{"type":"Feature","geometry":' + + footprints_out.append(fp) + footprints_out_json.append(('{"type":"Feature","geometry":' + '{"type":"Polygon","coordinates":[' + f"""{fp}""" + ']},"properties":{}}')) @@ -172,8 +175,16 @@ def get_inv_from_datadict(datadict,footprints): nstories.append(ptres['num_story']) yearbuilt.append(ptres['med_yr_blt']) repcost.append(ptres['val_struct']) - strtype.append(ptres['bldgtype']) - occ.append(ptres['occtype']) + bldgtype = ptres['bldgtype'] + '1' + if bldgtype=='M1': + strtype.append('RM1') + else: + strtype.append(bldgtype) + if '-' in ptres['occtype']: + occ.append(ptres['occtype'].split('-')[0]) + else: + occ.append(ptres['occtype']) + # Display the number of footprints that can be matched to NSI points: print(f'Found a total of {len(footprints_out)} building points in' @@ -189,7 +200,7 @@ def get_inv_from_datadict(datadict,footprints): inventory['StructureType'] = strtype inventory['OccupancyClass'] = occ inventory['Footprint'] = footprints_out - return inventory + return (inventory, footprints_out_json) # Determine the coordinates of the bounding box including the footprints: bbox = get_bbox(footprints) @@ -198,8 +209,13 @@ def get_inv_from_datadict(datadict,footprints): datadict = get_nbi_data(bbox) # Create a footprint-merged building inventory from extracted NBI data: - self.inventory = get_inv_from_datadict(datadict,footprints) + (self.inventory, footprints_out_json) = get_inv_from_datadict(datadict,footprints) - # Write the created inventory in R2D-compatible CSV format: - self.inventory.to_csv('inventory.csv', index=True, index_label='id') - print(f'\nFinal inventory data available in {os.getcwd()}/inventory.csv') \ No newline at end of file + # If requested, write the created inventory in R2D-compatible CSV format: + if outFile: + inventory = self.inventory.copy(deep=True) + n = inventory.columns[1] + inventory.drop(n, axis = 1, inplace = True) + inventory[n] = footprints_out_json + inventory.to_csv(outFile, index=True, index_label='id') + print(f'\nFinal inventory data available in {os.getcwd()}/{outFile}') \ No newline at end of file From b3810d18da0c9eb56e5da41f34ebbe1393810c5a Mon Sep 17 00:00:00 2001 From: bacetiner Date: Thu, 1 Feb 2024 04:55:49 -0800 Subject: [PATCH 05/63] Added list of attributes that can be extracted from NSI using NSIParser --- brails/workflow/NSIParser.py | 1 + 1 file changed, 1 insertion(+) diff --git a/brails/workflow/NSIParser.py b/brails/workflow/NSIParser.py index 29735fc..efc7956 100644 --- a/brails/workflow/NSIParser.py +++ b/brails/workflow/NSIParser.py @@ -50,6 +50,7 @@ class NSIParser: def __init__(self): self.inventory = [] + self.attributes = ['erabuilt','numstories','occupancy','constype'] def GenerateBldgInventory(self,footprints,outFile=None): """ From 6813edd1260f731c106841c1f2cfae31b233f9a4 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Thu, 1 Feb 2024 05:25:06 -0800 Subject: [PATCH 06/63] Enabled using NSI as baseline inventory in InventoryGenerator --- brails/InventoryGenerator.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/brails/InventoryGenerator.py b/brails/InventoryGenerator.py index cc07cf2..fc2e337 100644 --- a/brails/InventoryGenerator.py +++ b/brails/InventoryGenerator.py @@ -40,7 +40,7 @@ # Satish Rao # # Last updated: -# 01-29-2024 +# 02-01-2024 import random import sys @@ -58,6 +58,7 @@ YearBuiltClassifier) from .workflow.ImHandler import ImageHandler from .workflow.FootprintHandler import FootprintHandler +from brails.workflow.NSIParser import NSIParser class InventoryGenerator: @@ -136,7 +137,8 @@ def enabled_attributes(self): "InventoryGenerator.generate, simply set attributes='all'") def generate(self,attributes=['numstories','occupancy','roofshape'], - outFile='inventory.csv', lengthUnit='ft'): + useNSIBaseline= True, outFile='inventory.csv', + lengthUnit='ft'): def write_to_dataframe(df,predictions,column,imtype='street_images'): """ @@ -266,7 +268,7 @@ def write_inventory_output(inventorydf,outFile): # Rearrange the column order of dfout such that the Footprint field is # the last: cols = [col for col in dfout.columns if col!='Footprint'] - new_cols = ['Latitude','Longitude'] + cols[:-2] + ['Footprint'] + new_cols = ['Latitude','Longitude'] + cols + ['Footprint'] dfout = dfout[new_cols] # If the inventory is desired in CSV format, write dfout to a CSV: @@ -305,17 +307,25 @@ def write_inventory_output(inventorydf,outFile): json.dump(geojson, output_file, indent=2) print(f'\nFinal inventory data available in {outFile} in {os.getcwd()}') - # Parse/correct the list of user requested building attributes: self.attributes = parse_attribute_input(attributes, self.enabledAttributes) # Create a list of footprints for easier module calls: footprints = self.inventory['Footprint'].values.tolist() + + if useNSIBaseline: + nsi = NSIParser() + nsi.GenerateBldgInventory(footprints) + attributes_process = set(self.attributes) - set(nsi.attributes) + self.inventory = nsi.inventory.copy(deep=True) + footprints = self.inventory['Footprint'].values.tolist() + else: + attributes_process = self.attributes.copy() # Download the images required for the requested attributes: image_handler = ImageHandler(self.apiKey) - if 'roofshape' in self.attributes: #or 'roofcover' in self.attributes: + if 'roofshape' in attributes_process: #or 'roofcover' in attributes_process: image_handler.GetGoogleSatelliteImage(footprints) imsat = [im for im in image_handler.satellite_images if im is not None] self.inventory['satellite_images'] = image_handler.satellite_images @@ -323,12 +333,12 @@ def write_inventory_output(inventorydf,outFile): streetAttributes = self.enabledAttributes[:] streetAttributes.remove('roofshape') #streetAttributes.remove('roofcover') - if set.intersection(set(streetAttributes),set(self.attributes))!=set(): + if set.intersection(set(streetAttributes),set(attributes_process))!=set(): image_handler.GetGoogleStreetImage(footprints) imstreet = [im for im in image_handler.street_images if im is not None] self.inventory['street_images'] = image_handler.street_images - for attribute in self.attributes: + for attribute in attributes_process: if attribute=='chimney': # Initialize the chimney detector object: @@ -449,9 +459,9 @@ def write_inventory_output(inventorydf,outFile): # Bring the attribute values to the desired length unit: if lengthUnit.lower()=='m': self.inventory['PlanArea'] = self.inventory['PlanArea'].apply(lambda x: x*0.0929) - if 'buildingheight' in self.attributes: + if 'buildingheight' in attributes_process: self.inventory['buildingheight'] = self.inventory['buildingheight'].apply(lambda x: x*0.3048) - if 'roofeaveheight' in self.attributes: + if 'roofeaveheight' in attributes_process: self.inventory['roofeaveheight'] = self.inventory['roofeaveheight'].apply(lambda x: x*0.3048) # Write the genereated inventory in outFile: From 89839b7e516aa536f77e40c030b3906d2ebaf112 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 7 Feb 2024 14:50:13 -0800 Subject: [PATCH 07/63] Updated street-level imagery extraction to mark unattainable images as None --- brails/workflow/ImHandler.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/brails/workflow/ImHandler.py b/brails/workflow/ImHandler.py index c26c03d..3fd68b0 100644 --- a/brails/workflow/ImHandler.py +++ b/brails/workflow/ImHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 01-09-2024 +# 02-07-2024 import os @@ -707,7 +707,7 @@ def download_streetlev_image(fp, fpcent, im_name, depthmap_name, apikey, try: pano['id'] = get_pano_id(pano['queryLatLon'],apikey) except: - return + return None # Get the metdata for the pano: pano = get_pano_meta(pano, savedmap = True, dmapoutname = depthmap_name) @@ -740,7 +740,7 @@ def download_streetlev_image(fp, fpcent, im_name, depthmap_name, apikey, # street-level imagery: self.footprints = footprints self.centroids = [] - self.street_images = [] + street_images = [] inps = [] for footprint in footprints: fp = np.fliplr(np.squeeze(np.array(footprint))).tolist() @@ -750,7 +750,7 @@ def download_streetlev_image(fp, fpcent, im_name, depthmap_name, apikey, imName.replace(".","") im_name = f"tmp/images/street/imstreet_{imName}.jpg" depthmap_name = f"tmp/images/depthmap/dmstreet_{imName}.txt" - self.street_images.append(im_name) + street_images.append(im_name) inps.append((fp,(fp_cent.y,fp_cent.x),im_name,depthmap_name)) # Download building-wise street-level imagery and depthmap strings: @@ -778,11 +778,12 @@ def download_streetlev_image(fp, fpcent, im_name, depthmap_name, apikey, if save_all_cam_metadata==False: self.cam_elevs = [] self.depthmaps = [] - for im in self.street_images: + for (ind,im) in enumerate(street_images): if results[im] is not None: self.cam_elevs.append(results[im][0]) self.depthmaps.append(results[im][1]) else: + street_images[ind] = None self.cam_elevs.append(None) self.depthmaps.append(None) else: @@ -793,7 +794,7 @@ def download_streetlev_image(fp, fpcent, im_name, depthmap_name, apikey, self.headings = [] self.pitch = [] self.zoom_levels = [] - for im in self.street_images: + for (ind,im) in enumerate(street_images): if results[im] is not None: self.cam_elevs.append(results[im][0]) self.cam_latlons.append(results[im][1]) @@ -810,7 +811,9 @@ def download_streetlev_image(fp, fpcent, im_name, depthmap_name, apikey, self.headings.append(None) self.pitch.append(None) self.zoom_levels.append(None) - + street_images[ind] = None + self.street_images = street_images.copy() + def GetGoogleStreetImageAPI(self,footprints): self.footprints = footprints[:] # Function that downloads a file given its URL and the desired path to save it: From 34488949c9487a4744caec9c04c663c23d06a77e Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 7 Feb 2024 14:51:31 -0800 Subject: [PATCH 08/63] Updated InventoryGenerator for more robust street-level image handling --- brails/InventoryGenerator.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/brails/InventoryGenerator.py b/brails/InventoryGenerator.py index fc2e337..6b94f2f 100644 --- a/brails/InventoryGenerator.py +++ b/brails/InventoryGenerator.py @@ -40,7 +40,7 @@ # Satish Rao # # Last updated: -# 02-01-2024 +# 02-07-2024 import random import sys @@ -229,7 +229,7 @@ def parse_attribute_input(attrIn,attrEnabled): return attrOut - def write_inventory_output(inventorydf,outFile): + def write_inventory_output(inventorydf,outFile,lengthUnit): """ Function that writes the data in inventorydf DataFrame into a CSV or GeoJSON file based on the file name defined in the outFile. @@ -254,7 +254,7 @@ def write_inventory_output(inventorydf,outFile): dfout = inventorydf.copy(deep=True) dfout = dfout.drop(columns=['satellite_images', 'street_images'], errors='ignore') - + for index, row in inventorydf.iterrows(): dfout.loc[index, 'Footprint'] = ('{"type":"Feature","geometry":' + @@ -267,14 +267,14 @@ def write_inventory_output(inventorydf,outFile): # Rearrange the column order of dfout such that the Footprint field is # the last: - cols = [col for col in dfout.columns if col!='Footprint'] + cols = [col for col in dfout.columns if col not in ['Footprint','Latitude','Longitude']] new_cols = ['Latitude','Longitude'] + cols + ['Footprint'] dfout = dfout[new_cols] # If the inventory is desired in CSV format, write dfout to a CSV: if '.csv' in outFile.lower(): dfout.to_csv(outFile, index=True, index_label='id') - + # Else write the inventory into a GeoJSON file: else: if '.geojson' not in outFile.lower(): @@ -287,7 +287,7 @@ def write_inventory_output(inventorydf,outFile): "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84" }}, 'units': {"length": lengthUnit}, 'features':[]} - + attrs = dfout.columns.values.tolist() attrs.remove('Footprint') for index, row in dfout.iterrows(): @@ -300,13 +300,18 @@ def write_inventory_output(inventorydf,outFile): feature['geometry']['coordinates'] = json.loads(fp) feature['properties']['id'] = index for attr in attrs: - feature['properties'][attr] = row[attr] + res = row[attr] + if pd.isnull(res): + feature['properties'][attr] = 'NA' + else: + feature['properties'][attr] = res geojson['features'].append(feature) - + with open(outFile, 'w') as output_file: json.dump(geojson, output_file, indent=2) print(f'\nFinal inventory data available in {outFile} in {os.getcwd()}') + # Parse/correct the list of user requested building attributes: self.attributes = parse_attribute_input(attributes, self.enabledAttributes) @@ -465,7 +470,7 @@ def write_inventory_output(inventorydf,outFile): self.inventory['roofeaveheight'] = self.inventory['roofeaveheight'].apply(lambda x: x*0.3048) # Write the genereated inventory in outFile: - write_inventory_output(self.inventory,outFile) + write_inventory_output(self.inventory,outFile,lengthUnit) # Merge the DataFrame of predicted attributes with the DataFrame of # incomplete inventory and print the resulting table to the output file From 8a817377d124d0d436e2bcc8b80422f72ecd8451 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Thu, 15 Feb 2024 19:03:36 -0800 Subject: [PATCH 09/63] Fixed incorrect footprint formatting issue in output files --- brails/workflow/NSIParser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/brails/workflow/NSIParser.py b/brails/workflow/NSIParser.py index efc7956..0a22cb7 100644 --- a/brails/workflow/NSIParser.py +++ b/brails/workflow/NSIParser.py @@ -215,7 +215,7 @@ def get_inv_from_datadict(datadict,footprints): # If requested, write the created inventory in R2D-compatible CSV format: if outFile: inventory = self.inventory.copy(deep=True) - n = inventory.columns[1] + n = inventory.columns[-1] inventory.drop(n, axis = 1, inplace = True) inventory[n] = footprints_out_json inventory.to_csv(outFile, index=True, index_label='id') From 13f5e4f3a0319d960e13db4cab34d8ad4b662771 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 16 Feb 2024 04:39:07 -0800 Subject: [PATCH 10/63] Preallocated attributes fields for more consistent extraction behavior --- brails/workflow/FootprintHandler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index bf79f79..9a52f60 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 02-01-2024 +# 02-16-2024 import math import json @@ -58,6 +58,7 @@ def __init__(self): self.availableDataSources = ['osm','ms','usastr'] self.fpSource = 'osm' self.footprints = [] + self.attributes = {} def fetch_footprint_data(self,queryarea,fpSource='osm',attrmap=None): """ From ce068e365b057b377237a19b5690c1bc476d2f1e Mon Sep 17 00:00:00 2001 From: bacetiner Date: Thu, 7 Mar 2024 08:01:02 -0800 Subject: [PATCH 11/63] FootprintHandler now reads OSM, MS and USA Structures attributes --- brails/workflow/FootprintHandler.py | 531 +++++++++++++--------------- 1 file changed, 248 insertions(+), 283 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 9a52f60..7bf6469 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 02-16-2024 +# 03-07-2024 import math import json @@ -51,7 +51,6 @@ from shapely.ops import linemerge, unary_union, polygonize from shapely.strtree import STRtree - class FootprintHandler: def __init__(self): self.queryarea = [] @@ -59,8 +58,126 @@ def __init__(self): self.fpSource = 'osm' self.footprints = [] self.attributes = {} + + def __fetch_roi(self,queryarea): + # Search for the query area using Nominatim API: + print(f"\nSearching for {queryarea}...") + queryarea = queryarea.replace(" ", "+").replace(',','+') + + queryarea_formatted = "" + for i, j in groupby(queryarea): + if i=='+': + queryarea_formatted += i + else: + queryarea_formatted += ''.join(list(j)) - def fetch_footprint_data(self,queryarea,fpSource='osm',attrmap=None): + nominatimquery = ('https://nominatim.openstreetmap.org/search?' + + f"q={queryarea_formatted}&format=jsonv2") + r = requests.get(nominatimquery) + datalist = r.json() + + areafound = False + for data in datalist: + queryarea_osmid = data['osm_id'] + queryarea_name = data['display_name'] + if(data['osm_type']=='relation' and + 'university' in queryarea.lower() and + data['type']=='university'): + areafound = True + break + elif (data['osm_type']=='relation' and + data['type']=='administrative'): + areafound = True + break + + if areafound==True: + print(f"Found {queryarea_name}") + else: + sys.exit(f"Could not locate an area named {queryarea}. " + + 'Please check your location query to make sure ' + + 'it was entered correctly.') + + queryarea_printname = queryarea_name.split(",")[0] + + url = 'http://overpass-api.de/api/interpreter' + + # Get the polygon boundary for the query area: + query = f""" + [out:json][timeout:5000]; + rel({queryarea_osmid}); + out geom; + """ + + r = requests.get(url, params={'data': query}) + + datastruct = r.json()['elements'][0] + if datastruct['tags']['type'] in ['boundary','multipolygon']: + lss = [] + for coorddict in datastruct['members']: + if coorddict['role']=='outer': + ls = [] + for coord in coorddict['geometry']: + ls.append([coord['lon'],coord['lat']]) + lss.append(LineString(ls)) + + merged = linemerge([*lss]) + borders = unary_union(merged) # linestrings to a MultiLineString + polygons = list(polygonize(borders)) + + if len(polygons)==1: + bpoly = polygons[0] + else: + bpoly = MultiPolygon(polygons) + + else: + sys.exit(f"Could not retrieve the boundary for {queryarea}. " + + 'Please check your location query to make sure ' + + 'it was entered correctly.') + return bpoly, queryarea_printname, queryarea_osmid + + def __bbox2poly(self,queryarea): + # Parse the entered bounding box into a polygon: + if len(queryarea)%2==0 and len(queryarea)!=0: + if len(queryarea)==4: + bpoly = box(*queryarea) + queryarea_printname = (f"the bounding box: {list(queryarea)}") + elif len(queryarea)>4: + queryarea_printname = 'the bounding box: [' + bpolycoords = [] + for i in range(int(len(queryarea)/2)): + bpolycoords = bpolycoords.append([queryarea[2*i], queryarea[2*i+1]]) + queryarea_printname+= f'{queryarea[2*i]}, {queryarea[2*i+1]}, ' + bpoly = Polygon(bpolycoords) + queryarea_printname = queryarea_printname[:-2]+']' + else: + raise ValueError('Less than two longitude/latitude pairs were entered to define the bounding box entry. ' + + 'A bounding box can be defined by using at least two longitude/latitude pairs.') + else: + raise ValueError('Incorrect number of elements detected in the tuple for the bounding box. ' + 'Please check to see if you are missing a longitude or latitude value.') + return bpoly, queryarea_printname + + def __write_fp2geojson(self,footprints,attributes,outputFilename): + attrkeys = list(attributes.keys()) + geojson = {'type':'FeatureCollection', + "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, + 'features':[]} + for ind,fp in enumerate(footprints): + feature = {'type':'Feature', + 'properties':{}, + 'geometry':{'type':'Polygon', + 'coordinates':[]}} + feature['geometry']['coordinates'] = [fp] + for key in attrkeys: + attr = attributes[key][ind] + feature['properties'][key] = 'NA' if attr is None else attr + geojson['features'].append(feature) + + with open(outputFilename, 'w') as outputFile: + json.dump(geojson, outputFile, indent=2) + + def fetch_footprint_data(self,queryarea,fpSource='osm',attrmap=None, + outputFile='footprints.geojson'): """ Function that loads footprint data from OpenStreetMap, Microsoft, USA Structures, user-defined data @@ -73,94 +190,11 @@ def fetch_footprint_data(self,queryarea,fpSource='osm',attrmap=None): Output: Footprint information parsed as a list of lists with each coordinate described in longitude and latitude pairs """ - def get_osm_footprints(queryarea): - def write_fp2geojson(footprints, output_filename='footprints.geojson'): - geojson = {'type':'FeatureCollection', - "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } }, - 'features':[]} - for fp in footprints: - feature = {'type':'Feature', - 'properties':{}, - 'geometry':{'type':'Polygon', - 'coordinates':[]}} - feature['geometry']['coordinates'] = [fp] - geojson['features'].append(feature) - - with open(output_filename, 'w') as output_file: - json.dump(geojson, output_file, indent=2) - - if isinstance(queryarea,str): - # Search for the query area using Nominatim API: - print(f"\nSearching for {queryarea}...") - queryarea = queryarea.replace(" ", "+").replace(',','+') - - queryarea_formatted = "" - for i, j in groupby(queryarea): - if i=='+': - queryarea_formatted += i - else: - queryarea_formatted += ''.join(list(j)) - - nominatimquery = ('https://nominatim.openstreetmap.org/search?' + - f"q={queryarea_formatted}&format=jsonv2") - - r = requests.get(nominatimquery) - datalist = r.json() - - areafound = False - for data in datalist: - queryarea_turboid = data['osm_id'] + 3600000000 - queryarea_name = data['display_name'] - if(data['osm_type']=='relation' and - 'university' in queryarea.lower() and - data['type']=='university'): - areafound = True - break - elif (data['osm_type']=='relation' and - data['type']=='administrative'): - areafound = True - break - if areafound==True: - print(f"Found {queryarea_name}") - else: - sys.exit(f"Could not locate an area named {queryarea}. " + - 'Please check your location query to make sure ' + - 'it was entered correctly.') - - queryarea_printname = queryarea_name.split(",")[0] - - elif isinstance(queryarea,tuple): - if len(queryarea)%2==0 and len(queryarea)!=0: - if len(queryarea)==4: - bpoly = [min(queryarea[1],queryarea[3]), - min(queryarea[0],queryarea[2]), - max(queryarea[1],queryarea[3]), - max(queryarea[0],queryarea[2])] - bpoly = f'{bpoly[0]},{bpoly[1]},{bpoly[2]},{bpoly[3]}' - queryarea_printname = (f"the bounding box: {list(queryarea)}") - elif len(queryarea)>4: - bpoly = 'poly:"' - queryarea_printname = 'the bounding box: [' - for i in range(int(len(queryarea)/2)): - bpoly+=f'{queryarea[2*i+1]} {queryarea[2*i]} ' - queryarea_printname+= f'{queryarea[2*i]}, {queryarea[2*i+1]}, ' - bpoly = bpoly[:-1]+'"' - queryarea_printname = queryarea_printname[:-2]+']' - else: - raise ValueError('Less than two latitude longitude pairs were entered to define the bounding box entry. ' + - 'A bounding box can be defined by using at least two longitude/latitude pairs.') - else: - raise ValueError('Incorrect number of elements detected in the tuple for the bounding box. ' - 'Please check to see if you are missing a longitude or latitude value.') - - - - # Obtain and parse the footprint data for the determined area using Overpass API: - - print(f"\nFetching OSM footprint data for {queryarea_printname}...") - url = 'http://overpass-api.de/api/interpreter' - + + def get_osm_footprints(queryarea): if isinstance(queryarea,str): + bpoly, queryarea_printname, osmid = self.__fetch_roi(queryarea) + queryarea_turboid = osmid + 3600000000 query = f""" [out:json][timeout:5000][maxsize:2000000000]; area({queryarea_turboid})->.searchArea; @@ -170,14 +204,28 @@ def write_fp2geojson(footprints, output_filename='footprints.geojson'): out skel qt; """ elif isinstance(queryarea,tuple): + bpoly, queryarea_printname = self.__bbox2poly(queryarea) + if len(queryarea)==4: + bbox = [min(queryarea[1],queryarea[3]), + min(queryarea[0],queryarea[2]), + max(queryarea[1],queryarea[3]), + max(queryarea[0],queryarea[2])] + bbox = f'{bbox[0]},{bbox[1]},{bbox[2]},{bbox[3]}' + elif len(queryarea)>4: + bbox = 'poly:"' + for i in range(int(len(queryarea)/2)): + bbox+=f'{queryarea[2*i+1]} {queryarea[2*i]} ' + bbox = bbox[:-1]+'"' + query = f""" [out:json][timeout:5000][maxsize:2000000000]; - way["building"]({bpoly}); + way["building"]({bbox}); out body; >; out skel qt; """ - + + url = 'http://overpass-api.de/api/interpreter' r = requests.get(url, params={'data': query}) datalist = r.json()['elements'] @@ -185,7 +233,22 @@ def write_fp2geojson(footprints, output_filename='footprints.geojson'): for data in datalist: if data['type']=='node': nodedict[data['id']] = [data['lon'],data['lat']] - + + attrmap = {'start_date':'erabuilt', + 'building:start_date':'erabuilt', + 'construction_date':'erabuilt', + 'roof:shape':'roofshape', + 'height':'buildingheight', + } + + levelkeys = {'building:levels','roof:levels', + 'building:levels:underground'} + otherattrkeys = set(attrmap.keys()) + datakeys = levelkeys.union(otherattrkeys) + + attrkeys = ['buildingheight','erabuilt','numstories','roofshape'] + attributes = {key: [] for key in attrkeys} + fpcount = 0 footprints = [] for data in datalist: if data['type']=='way': @@ -194,91 +257,33 @@ def write_fp2geojson(footprints, output_filename='footprints.geojson'): for node in nodes: footprint.append(nodedict[node]) footprints.append(footprint) + fpcount+=1 + availableTags = set(data['tags'].keys()).intersection(datakeys) + for tag in availableTags: + nstory = 0 + if tag in otherattrkeys: + attributes[attrmap[tag]].append(data['tags'][tag]) + elif tag in levelkeys: + try: + nstory+=int(data['tags'][tag]) + except: + pass + if nstory>0: + attributes['numstories'].append(nstory) + for attr in attrkeys: + if len(attributes[attr])!=fpcount: + attributes[attr].append('NA') + attributes['buildingheight'] = [round(float(height)*3.28084,1) if height!='NA' else None + for height in attributes['buildingheight']] + attributes['erabuilt'] = [int(year[:4]) if year!='NA' else None + for year in attributes['erabuilt']] + attributes['numstories'] = [nstories if nstories!='NA' else None + for nstories in attributes['numstories']] - print(f"\nFound a total of {len(footprints)} building footprints in {queryarea_printname}") - - write_fp2geojson(footprints) - - return footprints - + print(f"\nFound a total of {fpcount} building footprints in {queryarea_printname}") + return footprints, attributes + def get_ms_footprints(queryarea): - def fetch_roi(queryarea): - # Search for the query area using Nominatim API: - print(f"\nSearching for {queryarea}...") - queryarea = queryarea.replace(" ", "+").replace(',','+') - - queryarea_formatted = "" - for i, j in groupby(queryarea): - if i=='+': - queryarea_formatted += i - else: - queryarea_formatted += ''.join(list(j)) - - nominatimquery = ('https://nominatim.openstreetmap.org/search?' + - f"q={queryarea_formatted}&format=jsonv2") - r = requests.get(nominatimquery) - datalist = r.json() - - areafound = False - for data in datalist: - queryarea_osmid = data['osm_id'] - queryarea_name = data['display_name'] - if(data['osm_type']=='relation' and - 'university' in queryarea.lower() and - data['type']=='university'): - areafound = True - break - elif (data['osm_type']=='relation' and - data['type']=='administrative'): - areafound = True - break - - if areafound==True: - print(f"Found {queryarea_name}") - else: - sys.exit(f"Could not locate an area named {queryarea}. " + - 'Please check your location query to make sure ' + - 'it was entered correctly.') - - queryarea_printname = queryarea_name.split(",")[0] - - print(f"\nFetching Microsoft building footprints for {queryarea_printname}...") - url = 'http://overpass-api.de/api/interpreter' - - if isinstance(queryarea,str): - query = f""" - [out:json][timeout:5000]; - rel({queryarea_osmid}); - out geom; - """ - r = requests.get(url, params={'data': query}) - - datastruct = r.json()['elements'][0] - - if datastruct['tags']['type'] in ['boundary','multipolygon']: - lss = [] - for coorddict in datastruct['members']: - if coorddict['role']=='outer': - ls = [] - for coord in coorddict['geometry']: - ls.append([coord['lon'],coord['lat']]) - lss.append(LineString(ls)) - - merged = linemerge([*lss]) - borders = unary_union(merged) # linestrings to a MultiLineString - polygons = list(polygonize(borders)) - - if len(polygons)==1: - bpoly = polygons[0] - else: - bpoly = MultiPolygon(polygons) - - else: - sys.exit(f"Could not retrieve the boundary for {queryarea}. " + - 'Please check your location query to make sure ' + - 'it was entered correctly.') - return bpoly, queryarea_printname - def deg2num(lat, lon, zoom): lat_rad = math.radians(lat) n = 2**zoom @@ -328,30 +333,6 @@ def bbox2quadkeys(bpoly): quadkeys = list(set(quadkeys)) return quadkeys - def bbox2poly(queryarea): - if len(queryarea)%2==0 and len(queryarea)!=0: - if len(queryarea)==4: - bpoly = box(*queryarea) - queryarea_printname = (f"the bounding box: {list(queryarea)}") - elif len(queryarea)>4: - queryarea_printname = 'the bounding box: [' - bpolycoords = [] - for i in range(int(len(queryarea)/2)): - bpolycoords = bpolycoords.append([queryarea[2*i], queryarea[2*i+1]]) - queryarea_printname+= f'{queryarea[2*i]}, {queryarea[2*i+1]}, ' - bpoly = Polygon(bpolycoords) - queryarea_printname = queryarea_printname[:-2]+']' - else: - raise ValueError('Less than two latitude longitude pairs were entered to define the bounding box entry. ' + - 'A bounding box can be defined by using at least two longitude/latitude pairs.') - else: - raise ValueError('Incorrect number of elements detected in the tuple for the bounding box. ' - 'Please check to see if you are missing a longitude or latitude value.') - - print(f"\nFetching Microsoft building footprints for {queryarea_printname}...") - - return bpoly, queryarea_printname - def parse_file_size(strsize): strsize = strsize.lower() if 'gb' in strsize: @@ -392,38 +373,24 @@ def download_ms_tiles(quadkeys,bpoly): footprints.append(row['geometry']['coordinates'][0]) height = row['properties']['height'] if height!=-1: - bldgheights.append(height*3.28084) + bldgheights.append(round(height*3.28084,1)) else: - bldgheights.append(0) + bldgheights.append(None) return (footprints, bldgheights) - - def write_fp2geojson(footprints, bldgheights, output_filename='footprints.geojson'): - geojson = {'type':'FeatureCollection', - "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } }, - 'features':[]} - for fp,bldgheight in zip(footprints,bldgheights): - feature = {'type':'Feature', - 'properties':{}, - 'geometry':{'type':'Polygon', - 'coordinates':[]}} - feature['geometry']['coordinates'] = [fp] - feature['properties']['bldgheight'] = bldgheight - geojson['features'].append(feature) - with open(output_filename, 'w') as output_file: - json.dump(geojson, output_file, indent=2) if isinstance(queryarea,tuple): - bpoly, queryarea_printname = bbox2poly(queryarea) + bpoly, queryarea_printname = self.__bbox2poly(queryarea) elif isinstance(queryarea,str): - bpoly, queryarea_printname = fetch_roi(queryarea) + bpoly, queryarea_printname, _ = self.__fetch_roi(queryarea) + print(f"\nFetching Microsoft building footprints for {queryarea_printname}...") quadkeys = bbox2quadkeys(bpoly) - (footprints, bldgheights) = download_ms_tiles(quadkeys, bpoly) - write_fp2geojson(footprints,bldgheights) + attributes = {'buildingheight':[]} + (footprints, attributes['buildingheight']) = download_ms_tiles(quadkeys, bpoly) print(f"\nFound a total of {len(footprints)} building footprints in {queryarea_printname}") - return (footprints,bldgheights) + return footprints,attributes def get_usastruct_footprints(queryarea): if isinstance(queryarea,tuple): @@ -451,48 +418,6 @@ def get_usastruct_footprints(queryarea): print(f"\nFound a total of {len(footprints)} building footprints in {queryarea_printname}") return footprints - - def polygon_area(lats, lons): - - radius = 20925721.784777 # Earth's radius in feet - - from numpy import arctan2, cos, sin, sqrt, pi, append, diff, deg2rad - lats = deg2rad(lats) - lons = deg2rad(lons) - - # Line integral based on Green's Theorem, assumes spherical Earth - - #close polygon - if lats[0]!=lats[-1]: - lats = append(lats, lats[0]) - lons = append(lons, lons[0]) - - #colatitudes relative to (0,0) - a = sin(lats/2)**2 + cos(lats)* sin(lons/2)**2 - colat = 2*arctan2( sqrt(a), sqrt(1-a) ) - - #azimuths relative to (0,0) - az = arctan2(cos(lats) * sin(lons), sin(lats)) % (2*pi) - - # Calculate diffs - # daz = diff(az) % (2*pi) - daz = diff(az) - daz = (daz + pi) % (2 * pi) - pi - - deltas=diff(colat)/2 - colat=colat[0:-1]+deltas - - # Perform integral - integrands = (1-cos(colat)) * daz - - # Integrate - area = abs(sum(integrands))/(4*pi) - - area = min(area,1-area) - if radius is not None: #return in units of radius - return area * 4*pi*radius**2 - else: #return in ratio of sphere total area - return area def load_footprint_data(fpfile,fpSource,attrmapfile): """ @@ -714,39 +639,79 @@ def parse_pt_geojson(data, attrmap, attrkeys, fpSource): fpfile) fpSource = fpfile - return (footprints_out, attributes, fpSource) + return footprints_out, attributes, fpSource + + def polygon_area(lats, lons): + + radius = 20925721.784777 # Earth's radius in feet + + from numpy import arctan2, cos, sin, sqrt, pi, append, diff, deg2rad + lats = deg2rad(lats) + lons = deg2rad(lons) + + # Line integral based on Green's Theorem, assumes spherical Earth + + #close polygon + if lats[0]!=lats[-1]: + lats = append(lats, lats[0]) + lons = append(lons, lons[0]) + + #colatitudes relative to (0,0) + a = sin(lats/2)**2 + cos(lats)* sin(lons/2)**2 + colat = 2*arctan2( sqrt(a), sqrt(1-a) ) + + #azimuths relative to (0,0) + az = arctan2(cos(lats) * sin(lons), sin(lats)) % (2*pi) + + # Calculate diffs + # daz = diff(az) % (2*pi) + daz = diff(az) + daz = (daz + pi) % (2 * pi) - pi + + deltas=diff(colat)/2 + colat=colat[0:-1]+deltas + + # Perform integral + integrands = (1-cos(colat)) * daz + + # Integrate + area = abs(sum(integrands))/(4*pi) + + area = min(area,1-area) + if radius is not None: #return in units of radius + return area * 4*pi*radius**2 + else: #return in ratio of sphere total area + return area def fp_source_selector(self): if self.fpSource=='osm': - footprints = get_osm_footprints(self.queryarea) - bldgheights = [] + footprints, attributes = get_osm_footprints(self.queryarea) elif self.fpSource=='ms': - (footprints,bldgheights) = get_ms_footprints(self.queryarea) + footprints,attributes = get_ms_footprints(self.queryarea) elif self.fpSource=='usastr': footprints = get_usastruct_footprints(self.queryarea) - bldgheights = [] - return (footprints,bldgheights) + attributes = {} + return footprints, attributes self.queryarea = queryarea self.fpSource = fpSource if isinstance(self.queryarea,str): if 'geojson' in queryarea.lower(): - (self.footprints,self.attributes,self.fpSource) = load_footprint_data( + self.footprints,self.attributes,self.fpSource = load_footprint_data( self.queryarea, self.fpSource, attrmap) - bldgheights = [] else: - (self.footprints,bldgheights) = fp_source_selector(self) + self.footprints,self.attributes = fp_source_selector(self) elif isinstance(queryarea,tuple): - (self.footprints,bldgheights) = fp_source_selector(self) + self.footprints,self.attributes = fp_source_selector(self) elif isinstance(queryarea,list): - self.footprints = [] - bldgheights = [] for query in self.queryarea: - (fps, bldghts) = fp_source_selector(query) + (fps, attributes) = fp_source_selector(query) + attrkeys = list(attributes.keys()) self.footprints.extend(fps) - bldgheights.extend(bldghts) + for key in attrkeys: + self.attributes[key].extend(attributes[key]) else: sys.exit('Incorrect location entry. The location entry must be defined as' + ' 1) a string or a list of strings containing the name(s) of the query areas,' + @@ -756,15 +721,15 @@ def fp_source_selector(self): ' For defining a bounding box, longitude and latitude values' + ' shall be entered for the vertex pairs of any of the two' + ' diagonals of the rectangular bounding box.') - - if bldgheights!=[]: - self.attributes['buildingheight'] = bldgheights.copy() - self.fpAreas = [] + self.attributes['fpAreas'] = [] for fp in self.footprints: lons = [] lats = [] for pt in fp: lons.append(pt[0]) lats.append(pt[1]) - self.fpAreas.append(polygon_area(lats, lons)) \ No newline at end of file + self.attributes['fpAreas'].append(int(polygon_area(lats, lons))) + + # Write the footprint data into a GeoJSON file: + self.__write_fp2geojson(self.footprints, self.attributes, outputFile) \ No newline at end of file From 80f78be0ba7603774a8beefee55a3abe1142fee7 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 8 Mar 2024 11:32:59 -0800 Subject: [PATCH 12/63] Added error rectification for OSM data & a method to remove bad footprints --- brails/workflow/FootprintHandler.py | 82 +++++++++++++++++++++++------ 1 file changed, 65 insertions(+), 17 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 7bf6469..380bddd 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-07-2024 +# 03-08-2024 import math import json @@ -192,6 +192,35 @@ def fetch_footprint_data(self,queryarea,fpSource='osm',attrmap=None, """ def get_osm_footprints(queryarea): + def cleanstr(inpstr): + return ''.join(char for char in inpstr if not char.isalpha() + and not char.isspace() and + (char == '.' or not char.isalnum())) + def yearstr2int(inpstr): + if inpstr!='NA': + yearout = cleanstr(inpstr) + yearout = yearout[:4] + if len(yearout)==4: + try: + yearout = int(yearout) + except: + yearout = None + else: + yearout = None + else: + yearout = None + return yearout + def height2float(inpstr): + if inpstr!='NA': + heightout = cleanstr(inpstr) + try: + heightout = round(float(heightout)*3.28084,1) + except: + heightout = None + else: + heightout = None + return heightout + if isinstance(queryarea,str): bpoly, queryarea_printname, osmid = self.__fetch_roi(queryarea) queryarea_turboid = osmid + 3600000000 @@ -273,10 +302,9 @@ def get_osm_footprints(queryarea): for attr in attrkeys: if len(attributes[attr])!=fpcount: attributes[attr].append('NA') - attributes['buildingheight'] = [round(float(height)*3.28084,1) if height!='NA' else None + attributes['buildingheight'] = [height2float(height) for height in attributes['buildingheight']] - attributes['erabuilt'] = [int(year[:4]) if year!='NA' else None - for year in attributes['erabuilt']] + attributes['erabuilt'] = [yearstr2int(year) for year in attributes['erabuilt']] attributes['numstories'] = [nstories if nstories!='NA' else None for nstories in attributes['numstories']] @@ -403,7 +431,10 @@ def get_usastruct_footprints(queryarea): print(f"\nFetching FEMA USA Structures footprint data for {queryarea_printname}...") - query = f'https://services2.arcgis.com/FiaPA4ga0iQKduv3/ArcGIS/rest/services/USA_Structures_View/FeatureServer/0/query?geometry={queryarea[0]},{queryarea[1]},{queryarea[2]},{queryarea[3]}&geometryType=esriGeometryEnvelope&inSR=4326&spatialRel=esriSpatialRelIntersects&f=pjson' + query = ('https://services2.arcgis.com/FiaPA4ga0iQKduv3/ArcGIS/' + + 'rest/services/USA_Structures_View/FeatureServer/0/query?' + + f'geometry={queryarea[0]},{queryarea[1]},{queryarea[2]},{queryarea[3]}' + '&geometryType=esriGeometryEnvelope&inSR=4326&spatialRel=esriSpatialRelIntersects&f=pjson') r = requests.get(query) if 'error' in r.text: @@ -584,6 +615,12 @@ def parse_pt_geojson(data, attrmap, attrkeys, fpSource): pass return (footprints_out, attributes) + + # Read the GeoJSON file and check if all the data in the file is + # point data: + with open(fpfile) as f: + data = json.load(f)['features'] + ptdata = all(loc['geometry']['type']=='Point' for loc in data) if attrmapfile: # Create a dictionary for mapping the attributes in the GeoJSON @@ -594,13 +631,7 @@ def parse_pt_geojson(data, attrmap, attrkeys, fpSource): for line in lines: lout = line.split(':') if lout[1]!='': - attrmap[lout[1]] = lout[0] - - # Read the GeoJSON file and check if all the data in the file is - # point data: - with open(fpfile) as f: - data = json.load(f)['features'] - ptdata = all(loc['geometry']['type']=='Point' for loc in data) + attrmap[lout[1]] = lout[0] # Identify the attribute keys in the GeoJSON file: attrkeys0 = list(data[0]['properties'].keys()) @@ -697,19 +728,19 @@ def fp_source_selector(self): self.fpSource = fpSource if isinstance(self.queryarea,str): if 'geojson' in queryarea.lower(): - self.footprints,self.attributes,self.fpSource = load_footprint_data( + footprints,self.attributes,self.fpSource = load_footprint_data( self.queryarea, self.fpSource, attrmap) else: - self.footprints,self.attributes = fp_source_selector(self) + footprints,self.attributes = fp_source_selector(self) elif isinstance(queryarea,tuple): - self.footprints,self.attributes = fp_source_selector(self) + footprints,self.attributes = fp_source_selector(self) elif isinstance(queryarea,list): for query in self.queryarea: (fps, attributes) = fp_source_selector(query) attrkeys = list(attributes.keys()) - self.footprints.extend(fps) + footprints.extend(fps) for key in attrkeys: self.attributes[key].extend(attributes[key]) else: @@ -723,13 +754,30 @@ def fp_source_selector(self): ' diagonals of the rectangular bounding box.') self.attributes['fpAreas'] = [] - for fp in self.footprints: + for fp in footprints: lons = [] lats = [] for pt in fp: lons.append(pt[0]) lats.append(pt[1]) self.attributes['fpAreas'].append(int(polygon_area(lats, lons))) + + # Calculate centroids of the footprints and remove footprint data that + # does not form a polygon: + self.centroids = [] + ind_remove = [] + for (ind,footprint) in enumerate(footprints): + try: + self.centroids.append(Polygon(footprint).centroid) + self.footprints.append(footprint) + except: + ind_remove.append(ind) + pass + # Remove attribute corresponding to the removed footprints: + for i in sorted(ind_remove, reverse=True): + for key in self.attributes.keys(): + del self.attributes[key][i] + # Write the footprint data into a GeoJSON file: self.__write_fp2geojson(self.footprints, self.attributes, outputFile) \ No newline at end of file From 909270b5502e6b9aa2ae9f5d0ee84f1fdad30eda Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 8 Mar 2024 13:09:47 -0800 Subject: [PATCH 13/63] ImHandler now checks the validity of API keys cost-free --- brails/workflow/ImHandler.py | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/brails/workflow/ImHandler.py b/brails/workflow/ImHandler.py index 3fd68b0..88047fd 100644 --- a/brails/workflow/ImHandler.py +++ b/brails/workflow/ImHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 02-07-2024 +# 03-08-2024 import os @@ -60,24 +60,20 @@ class ImageHandler: def __init__(self,apikey: str): - # Check if the provided Google API Key successfully obtains street-level - # imagery for Doe Memorial Library of UC Berkeley: - responseStreet = requests.get('https://maps.googleapis.com/maps/api/streetview?' + - 'size=600x600&location=37.87251874078189,' + - '-122.25960286494328&heading=280&fov=120&' + - f"pitch=20&key={apikey}").ok + # Check if the provided Google API Key successfully obtains street view + # imagery metadata for Doe Memorial Library of UC Berkeley: + responseStreet = requests.get('https://maps.googleapis.com/maps/api/streetview/metadata?' + + 'location=37.8725187407,-122.2596028649' + + '&source=outdoor' + + f'&key={apikey}') # If the requested image cannot be downloaded, notify the user of the # error and stop program execution: - if responseStreet==False: + if 'error' in responseStreet.text.lower(): error_message = ('Google API key error. The entered API key is valid ' + 'but does not have Street View Static API enabled. ' - + 'Please enter a key that has both Maps Static API ' - + 'and Street View Static API enabled.') - else: - error_message = None - - if error_message!=None: + + 'Please enter a key that has the Street View' + + 'Static API enabled.') sys.exit(error_message) self.apikey = apikey From d2c427aee619e3e262a9752e25194d696b81d071 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Tue, 12 Mar 2024 21:34:31 -0700 Subject: [PATCH 14/63] Added common coordinate operations to utils to reduce code repetition --- brails/utils/geoTools.py | 111 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 111 insertions(+) create mode 100644 brails/utils/geoTools.py diff --git a/brails/utils/geoTools.py b/brails/utils/geoTools.py new file mode 100644 index 0000000..ef2fc07 --- /dev/null +++ b/brails/utils/geoTools.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +# +# Copyright (c) 2024 The Regents of the University of California +# +# This file is part of BRAILS. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# You should have received a copy of the BSD 3-Clause License along with +# BRAILS. If not, see . +# +# Contributors: +# Barbaros Cetiner +# +# Last updated: +# 03-12-2024 + +from math import radians, sin, cos, atan2, sqrt +from shapely.geometry import Polygon + +def haversine_dist(p1,p2): + """ + Function that calculates the Haversine distance between two points. + + Input: Two points with coordinates defined as latitude and longitude + with each point defined as a list of two floating-point values + Output: Distance between the input points in feet + """ + + # Define mean radius of the Earth in kilometers: + R = 6371.0 + + # Convert coordinate values from degrees to radians + lat1 = radians(p1[0]); lon1 = radians(p1[1]) + lat2 = radians(p2[0]); lon2 = radians(p2[1]) + + # Compute the difference between latitude and longitude values: + dlon = lon2 - lon1 + dlat = lat2 - lat1 + + # Compute the distance between two points as a proportion of Earth's + # mean radius: + a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2 + c = 2 * atan2(sqrt(a), sqrt(1 - a)) + + # Compute distance between the two points in feet: + distance = R*c*3280.84 + return distance + +def mesh_polygon(polygon, rows:int, cols:int): + """ + Function that splits a polygon into individual rectangular polygons. + + Inputs: A Shapely polygon and number of rows and columns of rectangular + cells requested to mesh the area of the polygon. + with each point defined as a list of two floating-point values + Output: Individual rectangular cells stored as Shapely polygons + """ + + # Get bounds of the polygon: + min_x, min_y, max_x, max_y = polygon.bounds + + # Calculate dimensions of each cell: + width = (max_x - min_x) / cols + height = (max_y - min_y) / rows + + rectangles = [] + # For each cell: + for i in range(rows): + for j in range(cols): + # Calculate coordinates of the cell vertices: + x1 = min_x + j * width + y1 = min_y + i * height + x2 = x1 + width + y2 = y1 + height + + # Convert the computed geometry into a polygon: + poly = Polygon([(x1, y1), (x2, y1), (x2, y2), (x1, y2)]) + + # Check if the obtained cell intersects with the polygon: + if poly.intersects(polygon): + poly = poly.intersection(polygon) + # If the intersection is a finite geometry keep its envelope as + # a valid cell: + if poly.area > 0: + rectangles.append(poly.envelope) + return rectangles \ No newline at end of file From 7072b56cc5b6d1f15b75c6b5f1681429fa1af8b1 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 13 Mar 2024 08:30:42 -0700 Subject: [PATCH 15/63] Added rst2pdf to dependencies to cover Linux systems --- docs/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/requirements.txt b/docs/requirements.txt index 47f0389..e0f006c 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -10,3 +10,4 @@ sphinx_panels jsonpath2 sphinxcontrib.spelling==7.2.1 sphinx_toolbox +rst2pdf From 65a3b053c9fb636f126a815084bef87c8ff12f13 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 13 Mar 2024 09:30:23 -0700 Subject: [PATCH 16/63] Updated the version number to the current --- docs/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index c1ed6c6..20f0440 100755 --- a/docs/index.rst +++ b/docs/index.rst @@ -5,7 +5,7 @@ |app| (|full tool name|) provides a set of Python modules that utilize deep learning (DL), and computer vision (CV) techniques to extract information from satellite and street level images. The |app| framework also provides turn-key applications allowing users to put individual modules together to determine multiple attributes in a single pass or train general-purpose image classification, object detection, or semantic segmentation models. -This document covers the features and capabilities of Version 3.0 of the tool. Users are encouraged to comment on what additional features and capabilities they would like to see in future versions of the application through the |messageBoard|. +This document covers the features and capabilities of Version 3.1.1 of the tool. Users are encouraged to comment on what additional features and capabilities they would like to see in future versions of the application through the |messageBoard|. .. _lbl-front-matter: @@ -33,7 +33,7 @@ This document covers the features and capabilities of Version 3.0 of the tool. U common/user_manual/userGuide common/user_manual/troubleshooting common/user_manual/examples - common/user_manual/bugs + common/user_manual/bugs .. _lbl-technical-manual: From 48c4abe8d530b3db54f981950186306de63808ed Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 13 Mar 2024 09:31:00 -0700 Subject: [PATCH 17/63] Populated the release notes for all version 3 releases --- docs/common/about/releasenotes.rst | 35 +++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/docs/common/about/releasenotes.rst b/docs/common/about/releasenotes.rst index 3bf6a98..82472c9 100755 --- a/docs/common/about/releasenotes.rst +++ b/docs/common/about/releasenotes.rst @@ -10,9 +10,38 @@ Major Version 3 The major version number was increased from 2 to 3 as changes were made to input and output formats of |app|. This means old examples will not be loaded in this version of the tool. - .. dropdown:: Version 3.1.0 (:blue:`Current`) + .. dropdown:: **Version 3.1.1** (:blue:`Current`) :open: - **Release date:** September. 2022 + **Release date:** February 2024 - #. Coming soon + **Highlights** + + #. Enabled rapidtools feature to extract building-level imagery from NHERI RAPID aerial and street-level image data + #. Added the capability to pull National Structure Inventory as baseline building inventory + #. Updated TranspInventoryGenerator for more stable API queries + #. Updated FacadeParser for more effective memory utilization + + .. dropdown:: **Version 3.1.0** + + **Release date:** December 2023 + + **Highlights** + + #. Enabled Transportation inventory generation for roadways, bridges, tunnels, and railroads, + #. Revised FacadeParser to calculate dimensions from depth maps (as opposed to ray tracing from camera coordinates) for more robust dimension predictions, + #. Added FEMA USA Structures as an additional building footprint source. + + + .. dropdown:: **Version 3.0.0** + + **Release date:** September 2022 + + **Highlights** + + #. Added new modules for predicting building height, building window area, first-floor height, existence of chimneys, and existence of garages, roof cover type, roof eave height, roof pitch, + #. Streamlined location inputs by resolving the input through Nominatim API, + #. Enabled on-the-fly footprint parsing from Microsoft Footprint Database and OpenStreetMaps, + #. Added the capability to remove neighboring buildings from street-level and satellite imagery, + #. Enabled inventory output creation in a CSV format compatible with R2D, + #. Added pipelines for generalized image classification and semantic segmentation models. From 8448a56d2d4713f807600840645718072c4bb2c9 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 13 Mar 2024 09:32:09 -0700 Subject: [PATCH 18/63] Updated the BRAILS Zenodo citation to latest --- docs/common/about/cite.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/common/about/cite.rst b/docs/common/about/cite.rst index 9af99c3..e9cf093 100644 --- a/docs/common/about/cite.rst +++ b/docs/common/about/cite.rst @@ -4,10 +4,10 @@ How to Cite *********** -To cite BRAILS, please make reference to both 1) the BRAILS repository published on Zenodo and 2) the SimCenter paper that provides an outline of the existing SimCenter tools listed below. +To cite BRAILS, please make reference to both 1) the BRAILS repository published on Zenodo and 2) the SimCenter paper that provides an outline of the existing SimCenter tools listed below. + + 1. Barbaros Cetiner, Charles Wang, Frank McKenna, Sascha Hornauer, Jinyan Zhao, Yunhui Guo, Stella X. Yu, Ertugrul Taciroglu & Kincho H. Law. (2024). NHERI-SimCenter/BRAILS: Version v3.1.1. Zenodo. `DOI: 10.5281/zenodo.10606032 `_ - 1. Barbaros Cetiner, Charles Wang, Frank McKenna, Sascha Hornauer, & Yunhui Guo. (2022). NHERI-SimCenter/BRAILS: Version v3.0.0. Zenodo. `DOI: 10.5281/zenodo.7132010 `_ - 2. Gregory G. Deierlein, Frank McKenna, Adam Zsarnóczay, Tracy Kijewski-Correa, Ahsan Kareem, Wael Elhaddad, Laura Lowes, Matt J. Schoettler, and Sanjay Govindjee (2020) A Cloud-Enabled Application Framework for Simulating Regional-Scale Impacts of From b6ebf16b35d1d309d5dd603a8d46755b5091236d Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 13 Mar 2024 09:35:17 -0700 Subject: [PATCH 19/63] Updated the BRAILS citation --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index f7533de..0bd319b 100644 --- a/README.md +++ b/README.md @@ -66,12 +66,12 @@ NHERI-SimCenter nheri-simcenter@berkeley.edu Stella X. Yu and Ertugrul Taciroglu and Kincho H. Law}, - title = {BRAILS Release v3.1.0}, - month = jan, + title = {BRAILS Release v3.1.1}, + month = feb, year = 2024, publisher = {Zenodo}, - version = {v3.1.0}, - doi = {10.5281/zenodo.10448047}, - url = {https://doi.org/10.5281/zenodo.10448047} + version = {v3.1.1}, + doi = {10.5281/zenodo.10606032}, + url = {https://doi.org/10.5281/zenodo.10606032} } ``` From 87b263a7c2c1d5df92e0c9500d9c32295c7ae21f Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 13 Mar 2024 09:45:26 -0700 Subject: [PATCH 20/63] Updated the version number and list of authors --- docs/conf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 6df57d5..b8ea780 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,11 +9,11 @@ project = 'BRAILS' copyright = f"{str(datetime.today().year)}, The Regents of the University of California" -author = 'Barbaros Cetiner, Chaofeng Wang, Frank McKenna, Sascha Hornauer, Yunhui Guo' +author = 'Barbaros Cetiner, Chaofeng Wang, Frank McKenna, Sascha Hornauer, Jinyan Zhao, Yunhui Guo' # The short X.Y version #version = '1.0' # The full version, including alpha/beta/rc tags -release = '3.0.0' +release = '3.1.1' rst_prolog = """ .. |app| replace:: BRAILS @@ -28,7 +28,7 @@ .. |short tool id| replace:: BRAILS .. |tool github link| replace:: `BRAILS Github page`_ .. _brails Github page: https://github.com/NHERI-SimCenter/BRAILS -.. |tool version| replace:: 3.0.0 +.. |tool version| replace:: 3.1.1 .. |SimCenter| replace:: `SimCenter`_ .. _SimCenter: https://simcenter.designsafe-ci.org/ From 6f341f80f1fb89e4e71ec6879b42641122f02cec Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 13 Mar 2024 09:46:02 -0700 Subject: [PATCH 21/63] Updated to pull version number from conf.py --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 20f0440..489b645 100755 --- a/docs/index.rst +++ b/docs/index.rst @@ -5,7 +5,7 @@ |app| (|full tool name|) provides a set of Python modules that utilize deep learning (DL), and computer vision (CV) techniques to extract information from satellite and street level images. The |app| framework also provides turn-key applications allowing users to put individual modules together to determine multiple attributes in a single pass or train general-purpose image classification, object detection, or semantic segmentation models. -This document covers the features and capabilities of Version 3.1.1 of the tool. Users are encouraged to comment on what additional features and capabilities they would like to see in future versions of the application through the |messageBoard|. +This document covers the features and capabilities of Version |release| of the tool. Users are encouraged to comment on what additional features and capabilities they would like to see in future versions of the application through the |messageBoard|. .. _lbl-front-matter: From 6e44a30285c4cf22b700cb2a65862986894e2f5b Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 13 Mar 2024 10:57:43 -0700 Subject: [PATCH 22/63] Updated the tool name --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index b8ea780..de27b11 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -21,7 +21,7 @@ .. |s3harkName| replace:: SURF .. |surfName| replace:: SURF .. |brailsName| replace:: BRAILS -.. |full tool name| replace:: Building Recognition using AI at Large-Scale +.. |full tool name| replace:: Building and Infrastructure Recognition using AI at Large-Scale .. _MessageBoard: https://simcenter-messageboard.designsafe-ci.org/smf/index.php?board=10.0 .. |messageBoard| replace:: `MessageBoard`_ .. |short tool name| replace:: BRAILS From 615bd368d2b690b5022f8f7857442b6a1acdb54e Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 13 Mar 2024 18:50:52 -0700 Subject: [PATCH 23/63] USA Struct downloads are now auto-grided to stay under API download limits --- brails/workflow/FootprintHandler.py | 289 +++++++++++++++++++++++++--- 1 file changed, 259 insertions(+), 30 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 380bddd..5f3768d 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-08-2024 +# 03-13-2024 import math import json @@ -50,6 +50,9 @@ from shapely.geometry import Point, Polygon, LineString, MultiPolygon, box from shapely.ops import linemerge, unary_union, polygonize from shapely.strtree import STRtree +from brails.utils.geoTools import haversine_dist, mesh_polygon, plot_polygon_cells +import concurrent.futures +from requests.adapters import HTTPAdapter, Retry class FootprintHandler: def __init__(self): @@ -420,35 +423,258 @@ def download_ms_tiles(quadkeys,bpoly): print(f"\nFound a total of {len(footprints)} building footprints in {queryarea_printname}") return footprints,attributes - def get_usastruct_footprints(queryarea): - if isinstance(queryarea,tuple): - queryarea_printname = (f"the bounding box: [{queryarea[0]}," - f"{queryarea[1]}, {queryarea[2]}, " - f"{queryarea[3]}]") + def get_usastruct_footprints(queryarea): + def get_usastruct_bldg_counts(bpoly): + # Get the coordinates of the bounding box for input polygon bpoly: + bbox = bpoly.bounds + + # Get the number of buildings in the computed bounding box: + query = ('https://services2.arcgis.com/FiaPA4ga0iQKduv3/ArcGIS/' + + 'rest/services/USA_Structures_View/FeatureServer/0/query?' + + f'geometry={bbox[0]},{bbox[1]},{bbox[2]},{bbox[3]}' + + '&geometryType=esriGeometryEnvelope&inSR=4326' + + '&spatialRel=esriSpatialRelIntersects' + + '&returnCountOnly=true&f=json') + + s = requests.Session() + retries = Retry(total=5, + backoff_factor=0.1, + status_forcelist=[500, 502, 503, 504]) + s.mount('https://', HTTPAdapter(max_retries=retries)) - elif isinstance(queryarea,str): - sys.exit("This option is not yet available for FEMA USA Structures footprint data") + r = s.get(query) + totalbldgs = r.json()['count'] + + return totalbldgs + + def get_polygon_cells(bpoly, totalbldgs = None, nfeaturesInCell=4000, + plotfout=False): + if totalbldgs is None: + # Get the number of buildings in the input polygon bpoly: + totalbldgs = get_usastruct_bldg_counts(bpoly) + + if totalbldgs>nfeaturesInCell: + # Calculate the number of cells required to cover the polygon area with + # 20 percent margin of error: + ncellsRequired = round(1.2*totalbldgs/nfeaturesInCell) + + # Get the coordinates of the bounding box for input polygon bpoly: + bbox = bpoly.bounds + + # Calculate the horizontal and vertical dimensions of the bounding box: + xdist = haversine_dist((bbox[0],bbox[1]),(bbox[2],bbox[1])) + ydist = haversine_dist((bbox[0],bbox[1]),(bbox[0],bbox[3])) + + # Determine the bounding box aspect ratio defined (as a number greater + # than 1) and the long direction of the bounding box: + if xdist>ydist: + bboxAspectRatio = math.ceil(xdist/ydist) + longSide = 1 + else: + bboxAspectRatio = math.ceil(ydist/xdist) + longSide = 2 + + # Calculate the cells required on the short side of the bounding box (n) + # using the relationship ncellsRequired = bboxAspectRatio*n^2: + n = math.ceil(math.sqrt(ncellsRequired/bboxAspectRatio)) + + # Based on the calculated n value determined the number of rows and + # columns of cells required: + if longSide==1: + rows = bboxAspectRatio*n + cols = n + else: + rows = n + cols = bboxAspectRatio*n + + # Determine the coordinates of each cell covering bpoly: + rectangles = mesh_polygon(bpoly, rows, cols) + else: + rectangles = [bpoly.envelope] + # Plot the generated mesh: + if plotfout: + plot_polygon_cells(bpoly, rectangles, plotfout) + + return rectangles - print(f"\nFetching FEMA USA Structures footprint data for {queryarea_printname}...") + def refine_polygon_cells(premCells, nfeaturesInCell=4000): + # Download the building count for each cell: + pbar = tqdm(total=len(premCells), desc='Obtaining the number of buildings in each cell') + results = {} + with concurrent.futures.ThreadPoolExecutor() as executor: + future_to_url = { + executor.submit(get_usastruct_bldg_counts, rect): rect + for rect in premCells + } + for future in concurrent.futures.as_completed(future_to_url): + rect = future_to_url[future] + pbar.update(n=1) + try: + results[rect] = future.result() + except Exception as exc: + results[rect] = None + print("%r generated an exception: %s" % (rect, exc)) + + indRemove = [] + cells2split = [] + cellsKeep = premCells.copy() + for ind,rect in enumerate(premCells): + totalbldgs = results[rect] + if totalbldgs is not None: + if totalbldgs==0: + indRemove.append(ind) + elif totalbldgs>nfeaturesInCell: + indRemove.append(ind) + cells2split.append(rect) + + for i in sorted(indRemove, reverse=True): + del cellsKeep[i] + + cellsSplit = [] + for rect in cells2split: + rectangles = get_polygon_cells(rect, totalbldgs=results[rect]) + cellsSplit+=rectangles + + return cellsKeep, cellsSplit - query = ('https://services2.arcgis.com/FiaPA4ga0iQKduv3/ArcGIS/' + - 'rest/services/USA_Structures_View/FeatureServer/0/query?' + - f'geometry={queryarea[0]},{queryarea[1]},{queryarea[2]},{queryarea[3]}' - '&geometryType=esriGeometryEnvelope&inSR=4326&spatialRel=esriSpatialRelIntersects&f=pjson') + def download_ustruct_bldgattr(cell): + rect = cell.bounds + s = requests.Session() + retries = Retry(total=5, + backoff_factor=0.1, + status_forcelist=[500, 502, 503, 504]) + s.mount('https://', HTTPAdapter(max_retries=retries)) + query = ('https://services2.arcgis.com/FiaPA4ga0iQKduv3/ArcGIS/' + + 'rest/services/USA_Structures_View/FeatureServer/0/query?' + + f'geometry={rect[0]},{rect[1]},{rect[2]},{rect[3]}' + + '&outFields=BUILD_ID,HEIGHT'+ + '&geometryType=esriGeometryEnvelope&inSR=4326' + + '&spatialRel=esriSpatialRelIntersects&outSR=4326&f=json') + + r = s.get(query) + datalist = r.json()['features'] + ids = [] + footprints = [] + bldgheight = [] + for data in datalist: + footprint = data['geometry']['rings'][0] + bldgid = data['attributes']['BUILD_ID'] + if bldgid not in ids: + ids.append(bldgid) + footprints.append(footprint) + height = data['attributes']['HEIGHT'] + try: + height = round(float(height*3.28084),1) + except: + height = None + bldgheight.append(height) + + return (ids, footprints, bldgheight) - r = requests.get(query) - if 'error' in r.text: - sys.exit("Data server is currently unresponsive." + - " Please try again later.") + def download_ustruct_bldgattr4region(cellsFinal,bpoly): + # Download building attribute data for each cell: + pbar = tqdm(total=len(cellsFinal), desc='Obtaining the building attributes for each cell') + results = {} + with concurrent.futures.ThreadPoolExecutor() as executor: + future_to_url = { + executor.submit(download_ustruct_bldgattr, cell): cell + for cell in cellsFinal + } + for future in concurrent.futures.as_completed(future_to_url): + cell = future_to_url[future] + pbar.update(n=1) + try: + results[cell] = future.result() + except Exception as exc: + results[cell] = None + print("%r generated an exception: %s" % (cell, exc)) + pbar.close() + + # Parse the API results into building id, footprints and height information: + ids = [] + footprints = [] + bldgheight = [] + for cell in tqdm(cellsFinal): + res = results[cell] + ids+=res[0] + footprints+=res[1] + bldgheight+=res[2] - datalist = r.json()['features'] - footprints = [] - for data in datalist: - footprint = data['geometry']['rings'][0] - footprints.append(footprint) + # Remove the duplicate footprint data by recording the API outputs to a + # dictionary: + data = {} + for ind,bldgid in enumerate(ids): + data[bldgid] = [footprints[ind],bldgheight[ind]] + + # Calculate building centroids and save the API outputs into their + # corresponding variables: + footprints = [] + attributes = {'buildingheight':[]} + centroids = [] + for value in data.values(): + fp = value[0] + centroids.append(Polygon(fp).centroid) + footprints.append(fp) + attributes['buildingheight'].append(value[1]) + + # Identify building centroids and that fall outside of bpoly: + results = {} + with concurrent.futures.ThreadPoolExecutor() as executor: + future_to_url = { + executor.submit(bpoly.contains, cent): cent + for cent in centroids + } + for future in concurrent.futures.as_completed(future_to_url): + cent = future_to_url[future] + try: + results[cent] = future.result() + except Exception as exc: + results[cell] = None + print("%r generated an exception: %s" % (cent, exc)) + indRemove = [] + for ind, cent in enumerate(centroids): + if not results[cent]: + indRemove.append(ind) + + # Remove data corresponding to centroids that fall outside bpoly: + for i in sorted(indRemove, reverse=True): + del footprints[i] + del attributes['buildingheight'][i] + + return footprints, attributes + + if isinstance(queryarea,tuple): + bpoly, queryarea_printname = self.__bbox2poly(queryarea) + elif isinstance(queryarea,str): + bpoly, queryarea_printname, _ = self.__fetch_roi(queryarea) + + plotCells = True + + if plotCells: + meshInitialfout = queryarea_printname.replace(' ','_') + '_Mesh_Initial.png' + meshFinalfout = queryarea_printname.replace(' ','_') + '_Mesh_Final.png' - print(f"\nFound a total of {len(footprints)} building footprints in {queryarea_printname}") - return footprints + print('\nMeshing the defined area...') + cellsPrem = get_polygon_cells(bpoly, plotfout=meshInitialfout) + + if len(cellsPrem)>1: + cellsFinal = [] + cellsSplit = cellsPrem.copy() + while len(cellsSplit)!=0: + cellsKeep, cellsSplit = refine_polygon_cells(cellsSplit) + cellsFinal+=cellsKeep + print(f'\nMeshing complete. Split {queryarea_printname} into {len(cellsFinal)} cells') + else: + cellsFinal = cellsPrem.copy() + print(f'\nMeshing complete. Covered {queryarea_printname} with a rectangular cell') + + if plotCells: + plot_polygon_cells(bpoly, cellsFinal, meshFinalfout) + + footprints, attributes = download_ustruct_bldgattr4region(cellsFinal,bpoly) + print(f"\nFound a total of {len(footprints)} building footprints in {queryarea_printname}") + + return footprints, attributes def load_footprint_data(fpfile,fpSource,attrmapfile): """ @@ -718,10 +944,12 @@ def fp_source_selector(self): if self.fpSource=='osm': footprints, attributes = get_osm_footprints(self.queryarea) elif self.fpSource=='ms': - footprints,attributes = get_ms_footprints(self.queryarea) + footprints, attributes = get_ms_footprints(self.queryarea) elif self.fpSource=='usastr': - footprints = get_usastruct_footprints(self.queryarea) - attributes = {} + footprints, attributes = get_usastruct_footprints(self.queryarea) + else: + print('Unimplemented footprint source. Setting footprint source to OSM') + footprints, attributes = get_osm_footprints(self.queryarea) return footprints, attributes self.queryarea = queryarea @@ -764,18 +992,19 @@ def fp_source_selector(self): # Calculate centroids of the footprints and remove footprint data that # does not form a polygon: + self.footprints = [] self.centroids = [] - ind_remove = [] + indRemove = [] for (ind,footprint) in enumerate(footprints): try: self.centroids.append(Polygon(footprint).centroid) self.footprints.append(footprint) except: - ind_remove.append(ind) + indRemove.append(ind) pass # Remove attribute corresponding to the removed footprints: - for i in sorted(ind_remove, reverse=True): + for i in sorted(indRemove, reverse=True): for key in self.attributes.keys(): del self.attributes[key][i] From c1f43de26fa09d900c40e588da935b7061673ad9 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 13 Mar 2024 18:55:47 -0700 Subject: [PATCH 24/63] Added a plotting function to display created geo meshes --- brails/utils/geoTools.py | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/brails/utils/geoTools.py b/brails/utils/geoTools.py index ef2fc07..337600a 100644 --- a/brails/utils/geoTools.py +++ b/brails/utils/geoTools.py @@ -37,10 +37,11 @@ # Barbaros Cetiner # # Last updated: -# 03-12-2024 +# 03-13-2024 from math import radians, sin, cos, atan2, sqrt from shapely.geometry import Polygon +import matplotlib.pyplot as plt def haversine_dist(p1,p2): """ @@ -108,4 +109,27 @@ def mesh_polygon(polygon, rows:int, cols:int): # a valid cell: if poly.area > 0: rectangles.append(poly.envelope) - return rectangles \ No newline at end of file + return rectangles + +def plot_polygon_cells(bpoly, rectangles, fout=False): + """ + Function that plots the mesh for a polygon and saves the plot into a PNG image + + Inputs: A Shapely polygon and a list of rectangular mesh cells saved as + Shapely polygons. Optional input is a string containing the name of the + output file + """ + + if bpoly.geom_type == 'MultiPolygon': + for poly in bpoly.geoms: + plt.plot(*poly.exterior.xy,'k') + else: + plt.plot(*bpoly.exterior.xy,'k') + for rect in rectangles: + try: + plt.plot(*rect.exterior.xy) + except: + pass + if fout: + plt.savefig(fout, dpi=600, bbox_inches="tight") + plt.show() \ No newline at end of file From 52ca37242e5756e211884668f0f1c0d17e6d3b48 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Thu, 14 Mar 2024 19:23:27 -0700 Subject: [PATCH 25/63] Relaxed nominatim restrictions to include small towns --- brails/workflow/FootprintHandler.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 5f3768d..fb5d3bc 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-13-2024 +# 03-14-2024 import math import json @@ -83,13 +83,7 @@ def __fetch_roi(self,queryarea): for data in datalist: queryarea_osmid = data['osm_id'] queryarea_name = data['display_name'] - if(data['osm_type']=='relation' and - 'university' in queryarea.lower() and - data['type']=='university'): - areafound = True - break - elif (data['osm_type']=='relation' and - data['type']=='administrative'): + if data['osm_type']=='relation': areafound = True break From ddf9993f1180342679102d9ba6dbcb1c9eab4359 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 15 Mar 2024 17:39:51 -0700 Subject: [PATCH 26/63] Added function that writes a Shapely polygon into a GeoJSON file --- brails/utils/geoTools.py | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/brails/utils/geoTools.py b/brails/utils/geoTools.py index 337600a..551c88e 100644 --- a/brails/utils/geoTools.py +++ b/brails/utils/geoTools.py @@ -37,11 +37,13 @@ # Barbaros Cetiner # # Last updated: -# 03-13-2024 +# 03-15-2024 from math import radians, sin, cos, atan2, sqrt +from shapely import to_geojson from shapely.geometry import Polygon import matplotlib.pyplot as plt +import json def haversine_dist(p1,p2): """ @@ -132,4 +134,27 @@ def plot_polygon_cells(bpoly, rectangles, fout=False): pass if fout: plt.savefig(fout, dpi=600, bbox_inches="tight") - plt.show() \ No newline at end of file + plt.show() + +def write_polygon2geojson(poly,outfile): + """ + Function that writes a single Shapely polygon into a GeoJSON file + + Input: A Shapely polygon or multi polygon + """ + + if 'geojson' not in outfile.lower(): + outfile = outfile.replace(outfile.split('.')[-1],'geojson') + geojson = {'type':'FeatureCollection', + "crs": {"type": "name", + "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, + 'features':[]} + feature = {'type':'Feature', + 'properties':{}, + 'geometry':{'type':'Polygon', + 'coordinates':[]}} + feature['geometry']['coordinates'] = json.loads( + to_geojson(poly).split('"coordinates":')[-1][:-1]) + geojson['features'].append(feature) + with open(outfile, 'w') as outputFile: + json.dump(geojson, outputFile, indent=2) \ No newline at end of file From a0fb0ca0c85b71f60cd83d2a8d16f72a90634ad0 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 15 Mar 2024 17:42:16 -0700 Subject: [PATCH 27/63] fetch_roi and bbox2poly private methods now have output capability --- brails/workflow/FootprintHandler.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index fb5d3bc..0f4568e 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-14-2024 +# 03-15-2024 import math import json @@ -50,7 +50,7 @@ from shapely.geometry import Point, Polygon, LineString, MultiPolygon, box from shapely.ops import linemerge, unary_union, polygonize from shapely.strtree import STRtree -from brails.utils.geoTools import haversine_dist, mesh_polygon, plot_polygon_cells +from brails.utils.geoTools import * import concurrent.futures from requests.adapters import HTTPAdapter, Retry @@ -62,7 +62,7 @@ def __init__(self): self.footprints = [] self.attributes = {} - def __fetch_roi(self,queryarea): + def __fetch_roi(self,queryarea,outfile=False): # Search for the query area using Nominatim API: print(f"\nSearching for {queryarea}...") queryarea = queryarea.replace(" ", "+").replace(',','+') @@ -130,9 +130,11 @@ def __fetch_roi(self,queryarea): sys.exit(f"Could not retrieve the boundary for {queryarea}. " + 'Please check your location query to make sure ' + 'it was entered correctly.') + if outfile: + write_polygon2geojson(bpoly,outfile) return bpoly, queryarea_printname, queryarea_osmid - def __bbox2poly(self,queryarea): + def __bbox2poly(self,queryarea,outfile=False): # Parse the entered bounding box into a polygon: if len(queryarea)%2==0 and len(queryarea)!=0: if len(queryarea)==4: @@ -152,6 +154,8 @@ def __bbox2poly(self,queryarea): else: raise ValueError('Incorrect number of elements detected in the tuple for the bounding box. ' 'Please check to see if you are missing a longitude or latitude value.') + if outfile: + write_polygon2geojson(bpoly,outfile) return bpoly, queryarea_printname def __write_fp2geojson(self,footprints,attributes,outputFilename): From 48f67cad799c622e8dd5049368b59a2d72115701 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 15 Mar 2024 17:51:02 -0700 Subject: [PATCH 28/63] Added support for multi polygons --- brails/utils/geoTools.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/brails/utils/geoTools.py b/brails/utils/geoTools.py index 551c88e..1dfd9d1 100644 --- a/brails/utils/geoTools.py +++ b/brails/utils/geoTools.py @@ -149,9 +149,14 @@ def write_polygon2geojson(poly,outfile): "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, 'features':[]} + if poly.geom_type == 'MultiPolygon': + polytype = 'MultiPolygon' + elif poly.geom_type == 'Polygon': + polytype = 'Polygon' + feature = {'type':'Feature', 'properties':{}, - 'geometry':{'type':'Polygon', + 'geometry':{'type': polytype, 'coordinates':[]}} feature['geometry']['coordinates'] = json.loads( to_geojson(poly).split('"coordinates":')[-1][:-1]) From 87bdecac6fcfface3776a10f1f3a06ec94a01a46 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Mon, 18 Mar 2024 20:25:44 -0700 Subject: [PATCH 29/63] Added browser headers for more stable Nominatim requests queries --- brails/workflow/FootprintHandler.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 0f4568e..74fe16a 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-15-2024 +# 03-18-2024 import math import json @@ -75,8 +75,11 @@ def __fetch_roi(self,queryarea,outfile=False): queryarea_formatted += ''.join(list(j)) nominatimquery = ('https://nominatim.openstreetmap.org/search?' + - f"q={queryarea_formatted}&format=jsonv2") - r = requests.get(nominatimquery) + f"q={queryarea_formatted}&format=jsonv2") + headers = {'User-Agent': ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1)'+ + ' AppleWebKit/537.36 (KHTML, like Gecko)'+ + ' Chrome/39.0.2171.95 Safari/537.36')} + r = requests.get(nominatimquery, headers=headers) datalist = r.json() areafound = False From 48f0439602581574b9428d9bffda633d54605956 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Tue, 19 Mar 2024 20:19:09 -0700 Subject: [PATCH 30/63] Added length unit conversion & addressed windows UTF-8 errors --- brails/workflow/FootprintHandler.py | 149 ++++++++++++++++++++-------- 1 file changed, 107 insertions(+), 42 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 74fe16a..1dec2b9 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-18-2024 +# 03-19-2024 import math import json @@ -53,14 +53,16 @@ from brails.utils.geoTools import * import concurrent.futures from requests.adapters import HTTPAdapter, Retry +import unicodedata class FootprintHandler: def __init__(self): - self.queryarea = [] + self.attributes = {} self.availableDataSources = ['osm','ms','usastr'] - self.fpSource = 'osm' self.footprints = [] - self.attributes = {} + self.fpSource = 'osm' + self.lengthUnit = 'ft' + self.queryarea = [] def __fetch_roi(self,queryarea,outfile=False): # Search for the query area using Nominatim API: @@ -91,7 +93,13 @@ def __fetch_roi(self,queryarea,outfile=False): break if areafound==True: - print(f"Found {queryarea_name}") + try: + print(f"Found {queryarea_name}") + except: + queryareaNameUTF = unicodedata.normalize( + 'NFKD', queryarea_name).encode('ascii', 'ignore') + queryareaNameUTF = queryareaNameUTF.decode("utf-8") + print(f"Found {queryareaNameUTF}") else: sys.exit(f"Could not locate an area named {queryarea}. " + 'Please check your location query to make sure ' + @@ -180,8 +188,8 @@ def __write_fp2geojson(self,footprints,attributes,outputFilename): with open(outputFilename, 'w') as outputFile: json.dump(geojson, outputFile, indent=2) - def fetch_footprint_data(self,queryarea,fpSource='osm',attrmap=None, - outputFile='footprints.geojson'): + def fetch_footprint_data(self,queryarea,fpSource='osm',attrmap=None, + lengthUnit='ft',outputFile='footprints.geojson'): """ Function that loads footprint data from OpenStreetMap, Microsoft, USA Structures, user-defined data @@ -195,11 +203,12 @@ def fetch_footprint_data(self,queryarea,fpSource='osm',attrmap=None, coordinate described in longitude and latitude pairs """ - def get_osm_footprints(queryarea): + def get_osm_footprints(queryarea,lengthUnit='ft'): def cleanstr(inpstr): return ''.join(char for char in inpstr if not char.isalpha() and not char.isspace() and (char == '.' or not char.isalnum())) + def yearstr2int(inpstr): if inpstr!='NA': yearout = cleanstr(inpstr) @@ -214,11 +223,15 @@ def yearstr2int(inpstr): else: yearout = None return yearout - def height2float(inpstr): + + def height2float(inpstr,lengthUnit): if inpstr!='NA': heightout = cleanstr(inpstr) try: - heightout = round(float(heightout)*3.28084,1) + if lengthUnit=='ft': + heightout = round(float(heightout)*3.28084,1) + else: + heightout = round(float(heightout),1) except: heightout = None else: @@ -306,7 +319,7 @@ def height2float(inpstr): for attr in attrkeys: if len(attributes[attr])!=fpcount: attributes[attr].append('NA') - attributes['buildingheight'] = [height2float(height) + attributes['buildingheight'] = [height2float(height,lengthUnit) for height in attributes['buildingheight']] attributes['erabuilt'] = [yearstr2int(year) for year in attributes['erabuilt']] attributes['numstories'] = [nstories if nstories!='NA' else None @@ -315,7 +328,7 @@ def height2float(inpstr): print(f"\nFound a total of {fpcount} building footprints in {queryarea_printname}") return footprints, attributes - def get_ms_footprints(queryarea): + def get_ms_footprints(queryarea,lengthUnit='ft'): def deg2num(lat, lon, zoom): lat_rad = math.radians(lat) n = 2**zoom @@ -386,8 +399,15 @@ def download_ms_tiles(quadkeys,bpoly): "https://minedbuildings.blob.core.windows.net/global-buildings/dataset-links.csv" ) + + # Define length unit conversion factor: + if lengthUnit=='ft': + convFactor = 3.28084 + else: + convFactor = 1 + footprints = [] - bldgheights = [] + bldgheights = [] for quadkey in tqdm(quadkeys): rows = dftiles[dftiles['QuadKey'] == quadkey] if rows.shape[0] == 1: @@ -405,7 +425,7 @@ def download_ms_tiles(quadkeys,bpoly): footprints.append(row['geometry']['coordinates'][0]) height = row['properties']['height'] if height!=-1: - bldgheights.append(round(height*3.28084,1)) + bldgheights.append(round(height*convFactor,1)) else: bldgheights.append(None) @@ -424,7 +444,7 @@ def download_ms_tiles(quadkeys,bpoly): print(f"\nFound a total of {len(footprints)} building footprints in {queryarea_printname}") return footprints,attributes - def get_usastruct_footprints(queryarea): + def get_usastruct_footprints(queryarea,lengthUnit='ft'): def get_usastruct_bldg_counts(bpoly): # Get the coordinates of the bounding box for input polygon bpoly: bbox = bpoly.bounds @@ -565,7 +585,7 @@ def download_ustruct_bldgattr(cell): footprints.append(footprint) height = data['attributes']['HEIGHT'] try: - height = round(float(height*3.28084),1) + height = float(height) except: height = None bldgheight.append(height) @@ -591,7 +611,8 @@ def download_ustruct_bldgattr4region(cellsFinal,bpoly): print("%r generated an exception: %s" % (cell, exc)) pbar.close() - # Parse the API results into building id, footprints and height information: + # Parse the API results into building id, footprints and height + # information: ids = [] footprints = [] bldgheight = [] @@ -601,22 +622,33 @@ def download_ustruct_bldgattr4region(cellsFinal,bpoly): footprints+=res[1] bldgheight+=res[2] - # Remove the duplicate footprint data by recording the API outputs to a - # dictionary: + # Remove the duplicate footprint data by recording the API + # outputs to a dictionary: data = {} for ind,bldgid in enumerate(ids): data[bldgid] = [footprints[ind],bldgheight[ind]] - # Calculate building centroids and save the API outputs into their - # corresponding variables: + # Define length unit conversion factor: + if lengthUnit=='ft': + convFactor = 3.28084 + else: + convFactor = 1 + + # Calculate building centroids and save the API outputs into + # their corresponding variables: footprints = [] attributes = {'buildingheight':[]} - centroids = [] + centroids = [] for value in data.values(): fp = value[0] centroids.append(Polygon(fp).centroid) footprints.append(fp) - attributes['buildingheight'].append(value[1]) + heightout = value[1] + if heightout is not None: + attributes['buildingheight'].append( + round(heightout*convFactor,1)) + else: + attributes['buildingheight'].append(None) # Identify building centroids and that fall outside of bpoly: results = {} @@ -677,7 +709,7 @@ def download_ustruct_bldgattr4region(cellsFinal,bpoly): return footprints, attributes - def load_footprint_data(fpfile,fpSource,attrmapfile): + def load_footprint_data(fpfile,fpSource,attrmapfile,lengthUnit): """ Function that loads footprint data from a GeoJSON file @@ -721,11 +753,11 @@ def get_bbox(points): def fp_download(bbox,fpSource): if fpSource=='osm': - footprints = get_osm_footprints(bbox) + footprints, _ = get_osm_footprints(bbox) elif fpSource=='ms': - (footprints, _) = get_ms_footprints(bbox) + footprints,_ = get_ms_footprints(bbox) elif fpSource=='usastr': - footprints = get_usastruct_footprints(bbox) + footprints, _ = get_usastruct_footprints(bbox) return footprints def parse_fp_geojson(data, attrmap, attrkeys, fpfile): @@ -857,9 +889,11 @@ def parse_pt_geojson(data, attrmap, attrkeys, fpSource): attrmap = {} for line in lines: lout = line.split(':') - if lout[1]!='': - attrmap[lout[1]] = lout[0] - + if lout[1]!='' and lout[0]!='LengthUnit': + attrmap[lout[1]] = lout[0] + else: + attrmap[lout[0]] = lout[1] + # Identify the attribute keys in the GeoJSON file: attrkeys0 = list(data[0]['properties'].keys()) if attrkeys0: @@ -885,6 +919,13 @@ def parse_pt_geojson(data, attrmap, attrkeys, fpSource): attrmap = {} attrkeys = {} + # Get the length unit for the input data: + try: + lengthUnitInp = attrmap['LengthUnit'].lower()[0] + if lengthUnitInp=='f': lengthUnitInp='ft' + except: + pass + if ptdata: (footprints_out, attributes) = parse_pt_geojson(data, attrmap, @@ -896,10 +937,27 @@ def parse_pt_geojson(data, attrmap, attrkeys, fpSource): attrkeys, fpfile) fpSource = fpfile + + + + + if lengthUnitInp!=lengthUnit: + if lengthUnit=='ft': + convFactor = 3.28084 + else: + convFactor = 1/3.28084 + try: + bldgheights = attributes['BldgHeight'].copy() + bldgheightsConverted = [] + for height in bldgheights: + bldgheightsConverted.append(round(height*convFactor),1) + attributes['BldgHeight'].update(bldgheightsConverted) + except: + pass return footprints_out, attributes, fpSource - def polygon_area(lats, lons): + def polygon_area(lats,lons,lengthUnit): radius = 20925721.784777 # Earth's radius in feet @@ -935,22 +993,27 @@ def polygon_area(lats, lons): # Integrate area = abs(sum(integrands))/(4*pi) + # Area in ratio of sphere total area: area = min(area,1-area) - if radius is not None: #return in units of radius - return area * 4*pi*radius**2 - else: #return in ratio of sphere total area - return area - + + # Area in sqft: + areaout = area * 4*pi*radius**2 + + # Area in sqm: + if lengthUnit=='m': areaout = areaout/(3.28084**2) + + return areaout + def fp_source_selector(self): if self.fpSource=='osm': - footprints, attributes = get_osm_footprints(self.queryarea) + footprints, attributes = get_osm_footprints(self.queryarea,lengthUnit) elif self.fpSource=='ms': - footprints, attributes = get_ms_footprints(self.queryarea) + footprints, attributes = get_ms_footprints(self.queryarea,lengthUnit) elif self.fpSource=='usastr': - footprints, attributes = get_usastruct_footprints(self.queryarea) + footprints, attributes = get_usastruct_footprints(self.queryarea,lengthUnit) else: print('Unimplemented footprint source. Setting footprint source to OSM') - footprints, attributes = get_osm_footprints(self.queryarea) + footprints, attributes = get_osm_footprints(self.queryarea,lengthUnit) return footprints, attributes self.queryarea = queryarea @@ -960,7 +1023,8 @@ def fp_source_selector(self): footprints,self.attributes,self.fpSource = load_footprint_data( self.queryarea, self.fpSource, - attrmap) + attrmap, + lengthUnit) else: footprints,self.attributes = fp_source_selector(self) elif isinstance(queryarea,tuple): @@ -989,7 +1053,8 @@ def fp_source_selector(self): for pt in fp: lons.append(pt[0]) lats.append(pt[1]) - self.attributes['fpAreas'].append(int(polygon_area(lats, lons))) + self.attributes['fpAreas'].append(int(polygon_area(lats,lons, + lengthUnit))) # Calculate centroids of the footprints and remove footprint data that # does not form a polygon: From 07f9128f2b0d5f6961a6727682f0f098cd746276 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 20 Mar 2024 08:21:58 -0700 Subject: [PATCH 31/63] Added capability to parse length units for user-specified footprint data --- brails/workflow/FootprintHandler.py | 60 ++++++++++++++++------------- 1 file changed, 33 insertions(+), 27 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 1dec2b9..a425ce4 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-19-2024 +# 03-20-2024 import math import json @@ -207,7 +207,7 @@ def get_osm_footprints(queryarea,lengthUnit='ft'): def cleanstr(inpstr): return ''.join(char for char in inpstr if not char.isalpha() and not char.isspace() and - (char == '.' or not char.isalnum())) + (char == '.' or char.isalnum())) def yearstr2int(inpstr): if inpstr!='NA': @@ -764,7 +764,8 @@ def parse_fp_geojson(data, attrmap, attrkeys, fpfile): # Create the attribute fields that will be extracted from the # GeoJSON file: attributes = {} - for attr in attrmap.values(): + attributestr = [attr for attr in attrmap.values() if attr!=''] + for attr in attributestr: attributes[attr] = [] footprints_out = [] @@ -807,12 +808,10 @@ def parse_fp_geojson(data, attrmap, attrkeys, fpfile): discardedfp_count+=1 # Print the results of the footprint extraction: - if discardedfp_count==0: - print(f"Extracted a total of {len(footprints_out)} building footprints from {fpfile}") - else: + if discardedfp_count!=0: print(f"Corrected {correctedfp_count} building footprint{pluralsuffix(correctedfp_count)} with invalid geometry") print(f"Discarded {discardedfp_count} building footprint{pluralsuffix(discardedfp_count)} with invalid geometry") - print(f"Extracted a total of {len(footprints_out)} building footprints from {fpfile}") + print(f"Extracted a total of {len(footprints_out)} building footprints from {fpfile}\n") return (footprints_out, attributes) @@ -913,18 +912,19 @@ def parse_pt_geojson(data, attrmap, attrkeys, fpSource): ignored_Attr = set(attrkeys0) - attrkeys if ignored_Attr: print('Attribute mapping does not cover all attributes detected in' - 'the input GeoJSON. Ignoring detected attributes: ' + - ', '.join(ignored_Attr)) + ' the input GeoJSON. Ignoring detected attributes: ' + + ', '.join(ignored_Attr) + '\n') else: attrmap = {} attrkeys = {} - # Get the length unit for the input data: - try: - lengthUnitInp = attrmap['LengthUnit'].lower()[0] - if lengthUnitInp=='f': lengthUnitInp='ft' - except: - pass + lengthUnitInp = '' + if 'LengthUnit' in attrmap.keys(): + # Get the length unit for the input data: + if attrmap['LengthUnit']!='': + lengthUnitInp = attrmap['LengthUnit'].lower()[0] + if lengthUnitInp=='f': lengthUnitInp='ft' + del attrmap['LengthUnit'] if ptdata: (footprints_out, attributes) = parse_pt_geojson(data, @@ -938,22 +938,28 @@ def parse_pt_geojson(data, attrmap, attrkeys, fpSource): fpfile) fpSource = fpfile - - - - if lengthUnitInp!=lengthUnit: - if lengthUnit=='ft': - convFactor = 3.28084 + if 'BldgHeight' in attributes.keys(): + if lengthUnitInp!='' and lengthUnitInp!=lengthUnit: + if lengthUnit=='ft': + convFactor = 3.28084 + else: + convFactor = 1/3.28084 + elif lengthUnitInp!='' and lengthUnitInp==lengthUnit: + convFactor = 1 else: - convFactor = 1/3.28084 - try: + convFactor = None + + if convFactor is not None: bldgheights = attributes['BldgHeight'].copy() bldgheightsConverted = [] for height in bldgheights: - bldgheightsConverted.append(round(height*convFactor),1) - attributes['BldgHeight'].update(bldgheightsConverted) - except: - pass + try: + heightFloat = round(height*convFactor,1) + except: + heightFloat = 0 + bldgheightsConverted.append(heightFloat) + del attributes['BldgHeight'] + attributes['BldgHeight'] = bldgheightsConverted return footprints_out, attributes, fpSource From 4ed5f716bcf40f50d7ff53c701481c411e48d266 Mon Sep 17 00:00:00 2001 From: Barbaros Date: Wed, 20 Mar 2024 23:55:24 -0400 Subject: [PATCH 32/63] Final patch for Windows UTF8 conversion issues --- brails/workflow/FootprintHandler.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index a425ce4..414cb02 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -67,10 +67,10 @@ def __init__(self): def __fetch_roi(self,queryarea,outfile=False): # Search for the query area using Nominatim API: print(f"\nSearching for {queryarea}...") - queryarea = queryarea.replace(" ", "+").replace(',','+') + queryareaProcessed = queryarea.replace(" ", "+").replace(',','+') queryarea_formatted = "" - for i, j in groupby(queryarea): + for i, j in groupby(queryareaProcessed): if i=='+': queryarea_formatted += i else: @@ -95,18 +95,22 @@ def __fetch_roi(self,queryarea,outfile=False): if areafound==True: try: print(f"Found {queryarea_name}") + queryarea_printname = queryarea_name.split(",")[0] except: queryareaNameUTF = unicodedata.normalize( 'NFKD', queryarea_name).encode('ascii', 'ignore') queryareaNameUTF = queryareaNameUTF.decode("utf-8") - print(f"Found {queryareaNameUTF}") + queryarea_printname = queryareaNameUTF.split(",")[0] + if queryarea_printname=='': + queryarea_printname = queryarea.title() + print(queryarea_printname) + else: + print(f"Found {queryareaNameUTF}") else: sys.exit(f"Could not locate an area named {queryarea}. " + 'Please check your location query to make sure ' + 'it was entered correctly.') - queryarea_printname = queryarea_name.split(",")[0] - url = 'http://overpass-api.de/api/interpreter' # Get the polygon boundary for the query area: From 005abcfeef883651516b9c416d651d37a1c486db Mon Sep 17 00:00:00 2001 From: bacetiner Date: Thu, 21 Mar 2024 05:57:24 -0700 Subject: [PATCH 33/63] Function to get list of attributes without initializing InventoryGenerator --- brails/EnabledAttributes.py | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100755 brails/EnabledAttributes.py diff --git a/brails/EnabledAttributes.py b/brails/EnabledAttributes.py new file mode 100755 index 0000000..4886967 --- /dev/null +++ b/brails/EnabledAttributes.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- + +def BldgAttributes(): + attr = ['buildingheight','chimney','erabuilt', + 'garage','numstories','occupancy', + 'roofeaveheight','roofshape','roofpitch', + 'constype', 'roofcover','constype'] + return attr \ No newline at end of file From 95c9d7b7c90eda75d68f29acd5da75a5b91f31c5 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Thu, 21 Mar 2024 05:59:57 -0700 Subject: [PATCH 34/63] Enabled attributes now pulled from EnabledAttributes function --- brails/InventoryGenerator.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/brails/InventoryGenerator.py b/brails/InventoryGenerator.py index 6b94f2f..8ee956f 100644 --- a/brails/InventoryGenerator.py +++ b/brails/InventoryGenerator.py @@ -40,7 +40,7 @@ # Satish Rao # # Last updated: -# 02-07-2024 +# 03-21-2024 import random import sys @@ -52,6 +52,7 @@ from importlib.metadata import version #import brails.models as models +from brails.EnabledAttributes import BldgAttributes from brails.modules import (ChimneyDetector, FacadeParser, GarageDetector, NFloorDetector, RoofClassifier, OccupancyClassifier, RoofCoverClassifier, @@ -65,11 +66,7 @@ class InventoryGenerator: def __init__(self, location='Berkeley', nbldgs=10, randomSelection=True, fpSource='osm', GoogleAPIKey=''): self.apiKey = GoogleAPIKey - self.enabledAttributes = ['buildingheight','chimney','erabuilt', - 'garage','numstories','occupancy', - 'roofeaveheight','roofshape','roofpitch', - 'constype'] #roofcover,'constype'] - + self.enabledAttributes = BldgAttributes() self.inventory = None self.incompleteInventory = None self.location = location @@ -129,6 +126,7 @@ def __init__(self, location='Berkeley', nbldgs=10, randomSelection=True, # is valid: image_handler = ImageHandler(self.apiKey) + @staticmethod def enabled_attributes(self): print('Here is the list of attributes currently enabled in InventoryGenerator:\n') for attribute in self.enabledAttributes: From 287c26b7a48bcc02ff89d972ab34930f451dff4b Mon Sep 17 00:00:00 2001 From: bacetiner Date: Thu, 21 Mar 2024 06:01:52 -0700 Subject: [PATCH 35/63] Added license information --- brails/EnabledAttributes.py | 39 +++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/brails/EnabledAttributes.py b/brails/EnabledAttributes.py index 4886967..dd5aa93 100755 --- a/brails/EnabledAttributes.py +++ b/brails/EnabledAttributes.py @@ -1,4 +1,43 @@ # -*- coding: utf-8 -*- +# +# Copyright (c) 2024 The Regents of the University of California +# +# This file is part of BRAILS. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# You should have received a copy of the BSD 3-Clause License along with +# BRAILS. If not, see . +# +# Contributors: +# Barbaros Cetiner +# +# Last updated: +# 03-21-2024 def BldgAttributes(): attr = ['buildingheight','chimney','erabuilt', From 572469258139a56a6ef2548d8be2940f92606e19 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 22 Mar 2024 16:37:46 -0700 Subject: [PATCH 36/63] Adressed the attribute mismatch issue with custom inventories --- brails/workflow/FootprintHandler.py | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 414cb02..66ba051 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-20-2024 +# 03-22-2024 import math import json @@ -67,10 +67,10 @@ def __init__(self): def __fetch_roi(self,queryarea,outfile=False): # Search for the query area using Nominatim API: print(f"\nSearching for {queryarea}...") - queryareaProcessed = queryarea.replace(" ", "+").replace(',','+') + queryarea = queryarea.replace(" ", "+").replace(',','+') queryarea_formatted = "" - for i, j in groupby(queryareaProcessed): + for i, j in groupby(queryarea): if i=='+': queryarea_formatted += i else: @@ -95,22 +95,18 @@ def __fetch_roi(self,queryarea,outfile=False): if areafound==True: try: print(f"Found {queryarea_name}") - queryarea_printname = queryarea_name.split(",")[0] except: queryareaNameUTF = unicodedata.normalize( 'NFKD', queryarea_name).encode('ascii', 'ignore') queryareaNameUTF = queryareaNameUTF.decode("utf-8") - queryarea_printname = queryareaNameUTF.split(",")[0] - if queryarea_printname=='': - queryarea_printname = queryarea.title() - print(queryarea_printname) - else: - print(f"Found {queryareaNameUTF}") + print(f"Found {queryareaNameUTF}") else: sys.exit(f"Could not locate an area named {queryarea}. " + 'Please check your location query to make sure ' + 'it was entered correctly.') + queryarea_printname = queryarea_name.split(",")[0] + url = 'http://overpass-api.de/api/interpreter' # Get the polygon boundary for the query area: @@ -823,8 +819,10 @@ def parse_pt_geojson(data, attrmap, attrkeys, fpSource): # Create the attribute fields that will be extracted from the # GeoJSON file: attributes = {} - for attr in attrmap.values(): - attributes[attr] = [] + attributestr = [attr for attr in attrmap.values() if attr!=''] + for attr in attributestr: + attributes[attr] = [] + # Write the data in datalist into a dictionary for better data access, # and filtering the duplicate entries: @@ -915,8 +913,9 @@ def parse_pt_geojson(data, attrmap, attrkeys, fpSource): pass ignored_Attr = set(attrkeys0) - attrkeys if ignored_Attr: - print('Attribute mapping does not cover all attributes detected in' - ' the input GeoJSON. Ignoring detected attributes: ' + + print('\nAttribute mapping does not cover all attributes detected in' + ' the input GeoJSON. Ignoring detected attributes ' + '(building positions extracted from geometry info): ' + ', '.join(ignored_Attr) + '\n') else: attrmap = {} From d731cd1555bda0c217323cc5715a2e9171949f1c Mon Sep 17 00:00:00 2001 From: bacetiner Date: Mon, 25 Mar 2024 18:52:19 -0700 Subject: [PATCH 37/63] Added a method to view raw NSI data --- brails/workflow/NSIParser.py | 237 ++++++++++++++++++++++++----------- 1 file changed, 163 insertions(+), 74 deletions(-) diff --git a/brails/workflow/NSIParser.py b/brails/workflow/NSIParser.py index 0a22cb7..04ae1b6 100644 --- a/brails/workflow/NSIParser.py +++ b/brails/workflow/NSIParser.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 02-01-2024 +# 03-25-2024 import requests import numpy as np @@ -46,15 +46,170 @@ from requests.adapters import HTTPAdapter, Retry from shapely.strtree import STRtree from shapely import Polygon, Point +import sys +import json class NSIParser: def __init__(self): self.inventory = [] self.attributes = ['erabuilt','numstories','occupancy','constype'] + + def __get_bbox(self,footprints:list)->tuple: + """ + Function that determines the extent of the area covered by the + footprints as a tight-fit rectangular bounding box + + Input: List of footprint data defined as a list of lists of + coordinates in EPSG 4326, i.e., [[vert1],....[vertlast]]. + Vertices are defined in [longitude,latitude] fashion + Output: Tuple containing the minimum and maximum latitude and + longitude values + """ + # : + minlat = footprints[0][0][1] + minlon = footprints[0][0][0] + maxlat = footprints[0][0][1] + maxlon = footprints[0][0][0] + for fp in footprints: + for vert in fp: + if vert[1]>maxlat: + maxlat = vert[1] + if vert[0]>maxlon: + maxlon = vert[0] + if vert[1]dict: """ - Function that reads NSI buildings points and matches the data to a set + Function that gets the NBI data for a bounding box entry + Input: Tuple containing the minimum and maximum latitude and + longitude values + Output: Dictionary containing extracted NBI data keyed using the + NBI point coordinates + """ + + # Unpack the bounding box coordinates: + (minlat,minlon,maxlat,maxlon) = bbox + + # Construct the query URL for the bounding box input + baseurl = "https://nsi.sec.usace.army.mil/nsiapi/structures?bbox=" + bboxstr = (f'{minlon:.5f},{minlat:.5f},{minlon:.5f},{maxlat:.5f},' + f'{maxlon:.5f},{maxlat:.5f},{maxlon:.5f},{minlat:.5f},' + f'{minlon:.5f},{minlat:.5f}') + url = baseurl + bboxstr + + # Define a retry stratey for common error codes to use in downloading + # NBI data: + s = requests.Session() + retries = Retry(total=5, + backoff_factor=0.1, + status_forcelist=[500, 502, 503, 504]) + s.mount('https://', HTTPAdapter(max_retries=retries)) + + # Download NBI data using the defined retry strategy, read downloaded + # GeoJSON data into a list: + print('\nGetting National Structure Inventory (NSI) building data' + ' for the entered location input...') + response = s.get(url) + datalist = response.json()['features'] + + # Write the data in datalist into a dictionary for better data access, + # and filtering the duplicate entries: + datadict = {} + for data in datalist: + pt = Point(data['geometry']['coordinates']) + datadict[pt] = data['properties'] + return datadict + + def GetRawDataROI(self,roi,outfile:str)->None: + """ + The method that reads NSI buildings data finds the points within a + bounding polygon or matches a set of footprints, then writes the + extracted data into a GeoJSON file. + + Input: roi: Either 1) a list of footprint data defined as a list of + lists of coordinates in EPSG 4326, i.e., [[vert1],....[vertlast]]. + Vertices are defined in [longitude,latitude] fashion, or + 2) a shapely polygon or multipolygon for the boundary polygon + of the region of interest. + + outfile: string containing the name of the output file + Currently, only GeoJSON format is supported + """ + + # If ROI is defined as a list of lists of footprint coordinates: + if type(roi) is list: + # Determine the coordinates of the bounding box including the footprints: + bbox = self.__get_bbox(roi) + footprints = roi.copy() + bpoly = None + else: + try: + roitype = roi.geom_type + bpoly = roi + # If ROI is defined as a Shapely polygon or multipolygon, + # determine the coordinates of the bounding box for the bounding + # polygon: + if roitype in ['MultiPolygon','Polygon']: + bbox = bpoly.bounds + bbox = (bbox[1],bbox[0],bbox[3],bbox[2]) + # Otherwise exit with a system error: + except: + sys.exit('Unimplemented region of interest data type') + + # Get NBI data for the computed bounding box: + datadict = self.__get_nbi_data(bbox) + + pointsKeep = set() + points = list(datadict.keys()) + if bpoly is None: + # Create an STR tree for the building points obtained from NBI: + + pttree = STRtree(points) + + # Find the data points that are enclosed in each footprint: + ptind = [] + for fp in footprints: + res = pttree.query(Polygon(fp)) + ptind.extend(res) + ptind = set(ptind) + + for ind in ptind: + pointsKeep.add(points[ind]) + else: + for pt in points: + if bpoly.contains(pt): + pointsKeep.add(pt) + + pointsKeep = list(pointsKeep) + + attrkeys = list(datadict[pointsKeep[0]].keys()) + geojson = {'type':'FeatureCollection', + "crs": {"type": "name", "properties": + {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, + 'features':[]} + for pt in pointsKeep: + feature = {'type':'Feature', + 'properties':{}, + 'geometry':{'type':'Point', + 'coordinates':[]}} + feature['geometry']['coordinates'] = [pt.x,pt.y] + attributes = datadict[pt] + for key in attrkeys: + attr = attributes[key] + feature['properties'][key] = 'NA' if attr is None else attr + geojson['features'].append(feature) + + with open(outfile, 'w') as outputFile: + json.dump(geojson, outputFile, indent=2) + + + def GenerateBldgInventory(self,footprints,bpoly=None,outFile=None): + """ + Method that reads NSI buildings points and matches the data to a set of footprints. Input: List of footprint data defined as a list of lists of @@ -66,75 +221,8 @@ def GenerateBldgInventory(self,footprints,outFile=None): type, 8)occupancy class, 9)footprint polygon """ - def get_bbox(footprints): - """ - Function that determines the extent of the area covered by the - footprints as a tight-fit rectangular bounding box - - Input: List of footprint data defined as a list of lists of - coordinates in EPSG 4326, i.e., [[vert1],....[vertlast]]. - Vertices are defined in [longitude,latitude] fashion - Output: Tuple containing the minimum and maximum latitude and - longitude values - """ - # : - minlat = footprints[0][0][1] - minlon = footprints[0][0][0] - maxlat = footprints[0][0][1] - maxlon = footprints[0][0][0] - for fp in footprints: - for vert in fp: - if vert[1]>maxlat: - maxlat = vert[1] - if vert[0]>maxlon: - maxlon = vert[0] - if vert[1] Date: Tue, 26 Mar 2024 18:32:46 -0700 Subject: [PATCH 38/63] Added STRTree search function for matching points to polygons --- brails/utils/geoTools.py | 67 ++++++++++++++++++++++++++++++---------- 1 file changed, 51 insertions(+), 16 deletions(-) diff --git a/brails/utils/geoTools.py b/brails/utils/geoTools.py index 1dfd9d1..e648c79 100644 --- a/brails/utils/geoTools.py +++ b/brails/utils/geoTools.py @@ -37,20 +37,21 @@ # Barbaros Cetiner # # Last updated: -# 03-15-2024 +# 03-26-2024 from math import radians, sin, cos, atan2, sqrt -from shapely import to_geojson from shapely.geometry import Polygon +from shapely import to_geojson +from shapely.strtree import STRtree import matplotlib.pyplot as plt import json -def haversine_dist(p1,p2): +def haversine_dist(p1:list,p2:list)->float: """ Function that calculates the Haversine distance between two points. - Input: Two points with coordinates defined as latitude and longitude - with each point defined as a list of two floating-point values + Input: Two points with coordinates defined in latitude and longitude + order with each point defined as a list of two floating-point values Output: Distance between the input points in feet """ @@ -74,14 +75,14 @@ def haversine_dist(p1,p2): distance = R*c*3280.84 return distance -def mesh_polygon(polygon, rows:int, cols:int): +def mesh_polygon(polygon,rows:int,cols:int)->list: """ Function that splits a polygon into individual rectangular polygons. Inputs: A Shapely polygon and number of rows and columns of rectangular cells requested to mesh the area of the polygon. - with each point defined as a list of two floating-point values - Output: Individual rectangular cells stored as Shapely polygons + Output: A list containing the individual rectangular cells stored as + Shapely polygons """ # Get bounds of the polygon: @@ -113,13 +114,13 @@ def mesh_polygon(polygon, rows:int, cols:int): rectangles.append(poly.envelope) return rectangles -def plot_polygon_cells(bpoly, rectangles, fout=False): +def plot_polygon_cells(bpoly,rectangles:list,fout=False)->None: """ - Function that plots the mesh for a polygon and saves the plot into a PNG image + Function that plots meshes for a polygon and saves the plot into a PNG image Inputs: A Shapely polygon and a list of rectangular mesh cells saved as - Shapely polygons. Optional input is a string containing the name of the - output file + Shapely polygons. Optional input is a string containing the name of + the output file """ if bpoly.geom_type == 'MultiPolygon': @@ -136,11 +137,12 @@ def plot_polygon_cells(bpoly, rectangles, fout=False): plt.savefig(fout, dpi=600, bbox_inches="tight") plt.show() -def write_polygon2geojson(poly,outfile): +def write_polygon2geojson(poly,outfile:str)->None: """ - Function that writes a single Shapely polygon into a GeoJSON file + Function that writes a single Shapely polygon into a GeoJSON file. - Input: A Shapely polygon or multi polygon + Inputs: A Shapely polygon or multi polygon and a string containing the name + of the GeoJSON file """ if 'geojson' not in outfile.lower(): @@ -162,4 +164,37 @@ def write_polygon2geojson(poly,outfile): to_geojson(poly).split('"coordinates":')[-1][:-1]) geojson['features'].append(feature) with open(outfile, 'w') as outputFile: - json.dump(geojson, outputFile, indent=2) \ No newline at end of file + json.dump(geojson, outputFile, indent=2) + +def match_points2polygons(points:list,polygons:list)->list: + """ + Function that finds the set of points that match a set of polygons + + Inputs: A list of Shapely points and a list of footprint data defined as a + list of lists of coordinates in EPSG 4326, i.e., [[vert1],.... + [vertlast]]. Vertices are defined in [longitude,latitude] fashion. + Outputs: A list of Shapely points and a dictionary that maps each footprint + list of coordinates (converted to string) to the first matched + Shapely point + """ + + # Create an STR tree for the input points: + pttree = STRtree(points) + + # Find the data points that are enclosed in each polygon: + ptkeepind = [] + fp2ptmap = {} + for poly in polygons: + res = pttree.query(Polygon(poly)) + if res.size!=0: + ptkeepind.extend(res) + fp2ptmap[str(poly)] = points[res[0]] + ptkeepind = set(ptkeepind) + + # Create a list of points that include just the points that have a polygon + # match: + ptskeep = [] + for ind in ptkeepind: + ptskeep.append(points[ind]) + + return ptskeep,fp2ptmap \ No newline at end of file From 8de73f9fefa5492ade06f689f654ba120347048e Mon Sep 17 00:00:00 2001 From: bacetiner Date: Tue, 26 Mar 2024 18:34:13 -0700 Subject: [PATCH 39/63] Streamlined NSI attribute extraction and added length unit option --- brails/workflow/NSIParser.py | 230 +++++++++++++++++++---------------- 1 file changed, 124 insertions(+), 106 deletions(-) diff --git a/brails/workflow/NSIParser.py b/brails/workflow/NSIParser.py index 04ae1b6..75e9342 100644 --- a/brails/workflow/NSIParser.py +++ b/brails/workflow/NSIParser.py @@ -37,26 +37,37 @@ # Barbaros Cetiner # # Last updated: -# 03-25-2024 +# 03-26-2024 import requests -import numpy as np import pandas as pd import os from requests.adapters import HTTPAdapter, Retry -from shapely.strtree import STRtree -from shapely import Polygon, Point import sys import json +from shapely.geometry import Point +from brails.utils.geoTools import match_points2polygons +from brails.workflow.FootprintHandler import FootprintHandler class NSIParser: def __init__(self): - self.inventory = [] - self.attributes = ['erabuilt','numstories','occupancy','constype'] + self.attributes = {} + self.brails2r2dmap = {'lon':'Longitude','lat':'Latitude', + 'fparea':'PlanArea', + 'numstories':'NumberOfStories', + 'erabuilt':'YearBuilt', + 'repaircost':'ReplacementCost', + 'constype':'StructureType', + 'occupancy':'OccupancyClass','fp':'Footprint'} + self.footprints = {} + self.nsi2brailsmap = {'x':'lon','y':'lat','sqft':'fparea', + 'num_story':'numstories','med_yr_blt':'erabuilt', + 'val_struct':'repaircost','bldgtype':'constype', + 'occtype':'occupancy'} def __get_bbox(self,footprints:list)->tuple: """ - Function that determines the extent of the area covered by the + Method that determines the extent of the area covered by the footprints as a tight-fit rectangular bounding box Input: List of footprint data defined as a list of lists of @@ -84,7 +95,8 @@ def __get_bbox(self,footprints:list)->tuple: def __get_nbi_data(self,bbox:tuple)->dict: """ - Function that gets the NBI data for a bounding box entry + Method that gets the NBI data for a bounding box entry + Input: Tuple containing the minimum and maximum latitude and longitude values Output: Dictionary containing extracted NBI data keyed using the @@ -126,7 +138,7 @@ def __get_nbi_data(self,bbox:tuple)->dict: def GetRawDataROI(self,roi,outfile:str)->None: """ - The method that reads NSI buildings data finds the points within a + Method that reads NSI buildings data finds the points within a bounding polygon or matches a set of footprints, then writes the extracted data into a GeoJSON file. @@ -163,29 +175,23 @@ def GetRawDataROI(self,roi,outfile:str)->None: # Get NBI data for the computed bounding box: datadict = self.__get_nbi_data(bbox) - pointsKeep = set() points = list(datadict.keys()) - if bpoly is None: - # Create an STR tree for the building points obtained from NBI: - - pttree = STRtree(points) - - # Find the data points that are enclosed in each footprint: - ptind = [] - for fp in footprints: - res = pttree.query(Polygon(fp)) - ptind.extend(res) - ptind = set(ptind) - - for ind in ptind: - pointsKeep.add(points[ind]) - else: + if bpoly is None: + # Find the NBI points that are enclosed in each footprint: + pointsKeep, _ = match_points2polygons(points,footprints) + else: + # Find the NBI points contained in bpoly: + pointsKeep = set() for pt in points: if bpoly.contains(pt): pointsKeep.add(pt) + pointsKeep = list(pointsKeep) - pointsKeep = list(pointsKeep) + # Display the number of NSI points that are within roi: + print(f'Found a total of {len(pointsKeep)} building points in' + ' NSI that are within the entered region of interest') + # Write the extracted NSI data in a GeoJSON file: attrkeys = list(datadict[pointsKeep[0]].keys()) geojson = {'type':'FeatureCollection', "crs": {"type": "name", "properties": @@ -204,93 +210,76 @@ def GetRawDataROI(self,roi,outfile:str)->None: geojson['features'].append(feature) with open(outfile, 'w') as outputFile: - json.dump(geojson, outputFile, indent=2) - + json.dump(geojson, outputFile, indent=2) - def GenerateBldgInventory(self,footprints,bpoly=None,outFile=None): + def GetNSIData(self,footprints:list,lengthUnit:str='ft',outfile=None): """ Method that reads NSI buildings points and matches the data to a set - of footprints. + of footprints and writes the data in a BRAILS-compatible format - Input: List of footprint data defined as a list of lists of + Input: roi: List of footprint data defined as a list of lists of coordinates in EPSG 4326, i.e., [[vert1],....[vertlast]]. Vertices are defined in [longitude,latitude] fashion. - Output: Building inventory for the footprints containing the attributes + + outfile: string containing the name of the output file + Currently, only GeoJSON format is supported + + Output: Attribute dictionary for the footprints containing the attributes: 1)latitude, 2)longitude, 3)building plan area, 4)number of floors, 5)year of construction, 6)replacement cost, 7)structure type, 8)occupancy class, 9)footprint polygon """ - - - - def get_inv_from_datadict(datadict,footprints): - # Create an STR tree for the building points obtained from NBI: + def get_attr_from_datadict(datadict,footprints,nsi2brailsmap): + # Parsers for building and occupancy types: + def bldgtype_parser(bldgtype): + bldgtype = bldgtype + '1' + if bldgtype=='M1': + bldgtype = 'RM1' + return bldgtype + def occ_parser(occtype): + if '-' in occtype: + occtype = occtype.split('-')[0] + return occtype + + # Extract the NSI points from the data dictionary input: points = list(datadict.keys()) - pttree = STRtree(points) - # Find the data points that are enclosed in each footprint: - ress = [] - for fp in footprints: - res = pttree.query(Polygon(fp)) - ress.extend(res) - if res.size!=0: - ress.append(res) - else: - ress.append(np.empty(shape=(0, 0))) + # Match NBI data to footprints: + _, fp2ptmap = match_points2polygons(points,footprints) + + # Write the matched footprints to a list: + footprintsMatched = fp2ptmap.keys() - # Match NBI data to each footprint: - footprints_out_json = [] - footprints_out = [] - lat = [] - lon = [] - planarea = [] - nstories = [] - yearbuilt = [] - repcost = [] - strtype = [] - occ = [] - for ind, fp in enumerate(footprints): - if ress[ind].size!=0: - footprints_out.append(fp) - footprints_out_json.append(('{"type":"Feature","geometry":' + - '{"type":"Polygon","coordinates":[' + - f"""{fp}""" + - ']},"properties":{}}')) - ptind = ress[ind][0] - ptres = datadict[points[ptind]] - lat.append(ptres['y']) - lon.append(ptres['x']) - planarea.append(ptres['sqft']) - nstories.append(ptres['num_story']) - yearbuilt.append(ptres['med_yr_blt']) - repcost.append(ptres['val_struct']) - bldgtype = ptres['bldgtype'] + '1' - if bldgtype=='M1': - strtype.append('RM1') - else: - strtype.append(bldgtype) - if '-' in ptres['occtype']: - occ.append(ptres['occtype'].split('-')[0]) - else: - occ.append(ptres['occtype']) - + # Initialize the attributes dictionary: + attributes = {'fp':[], 'fp_json':[]} + nsikeys = list(nsi2brailsmap.keys()) + brailskeys = list(nsi2brailsmap.values()) + for key in brailskeys: + attributes[key] = [] + + # Extract NSI attributes corresponding to each building footprint + # and write them in attributes dictionary: + for fpstr in footprintsMatched: + fp = json.loads(fpstr) + pt = fp2ptmap[fpstr] + ptres = datadict[pt] + attributes['fp'].append(fp) + attributes['fp_json'].append(('{"type":"Feature","geometry":' + + '{"type":"Polygon","coordinates":[' + + f"""{fp}""" + ']},"properties":{}}')) + for key in nsikeys: + if key=='bldgtype': + attributes[nsi2brailsmap[key]].append(bldgtype_parser(ptres[key])) + elif key=='occtype': + attributes[nsi2brailsmap[key]].append(occ_parser(ptres[key])) + else: + attributes[nsi2brailsmap[key]].append(ptres[key]) # Display the number of footprints that can be matched to NSI points: - print(f'Found a total of {len(footprints_out)} building points in' - ' NSI that match the footprint data.') - - # Write the extracted features into a Pandas dataframe: - inventory = pd.DataFrame(pd.Series(lat,name='Latitude')) - inventory['Longitude'] = lon - inventory['PlanArea'] = planarea - inventory['NumberOfStories'] = nstories - inventory['YearBuilt'] = yearbuilt - inventory['ReplacementCost'] = repcost - inventory['StructureType'] = strtype - inventory['OccupancyClass'] = occ - inventory['Footprint'] = footprints_out - return (inventory, footprints_out_json) + print(f'Found a total of {len(footprintsMatched)} building points' + ' in NSI that match the footprint data.') + return attributes # Determine the coordinates of the bounding box including the footprints: bbox = self.__get_bbox(footprints) @@ -299,13 +288,42 @@ def get_inv_from_datadict(datadict,footprints): datadict = self.__get_nbi_data(bbox) # Create a footprint-merged building inventory from extracted NBI data: - (self.inventory, footprints_out_json) = get_inv_from_datadict(datadict,footprints) + attributes = get_attr_from_datadict(datadict,footprints,self.nsi2brailsmap) + + # Convert to footprint areas to sqm if needed: + if lengthUnit=='m': + fparea = [area/(3.28084**2) for area in attributes['fparea']] + attributes.update({'fparea':fparea}) + + + self.attributes = attributes.copy() + if outfile: + if 'csv' in outfile.lower(): + # Write the extracted features into a Pandas dataframe: + brailskeys = list(self.brails2r2dmap.keys()) + inventory = pd.DataFrame(pd.Series(attributes[brailskeys[0]], + name=self.brails2r2dmap[brailskeys[0]])) + for key in brailskeys[1:]: + inventory[self.brails2r2dmap[key]] = attributes[key] + + # Write the created inventory in R2D-compatible CSV format: + n = inventory.columns[-1] + inventory.drop(n, axis = 1, inplace = True) + inventory[n] = attributes['fp_json'] + inventory.to_csv(outfile, index=True, index_label='id') + else: + # Write the extracted features into a GeoJSON file: + footprints = attributes['fp'].copy() + del attributes['fp_json'] + del attributes['fp'] + fpHandler = FootprintHandler() + fpHandler._FootprintHandler__write_fp2geojson(footprints,attributes,outfile) + outfile = outfile.split('.')[0] + '.geojson' + + print(f'\nFinal inventory data available in {os.getcwd()}/{outfile}') - # If requested, write the created inventory in R2D-compatible CSV format: - if outFile: - inventory = self.inventory.copy(deep=True) - n = inventory.columns[-1] - inventory.drop(n, axis = 1, inplace = True) - inventory[n] = footprints_out_json - inventory.to_csv(outFile, index=True, index_label='id') - print(f'\nFinal inventory data available in {os.getcwd()}/{outFile}') \ No newline at end of file + # Reformat the class variables to make them compatible with the + # InventoryGenerator: + self.footprints = self.attributes['fp'].copy() + del self.attributes['fp'] + del self.attributes['fp_json'] \ No newline at end of file From 6616b5eec2a0d1ad5ee68e731e461975181e07a9 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Tue, 26 Mar 2024 18:35:21 -0700 Subject: [PATCH 40/63] Changed attribute keys for consistency with other workflow components --- brails/workflow/FootprintHandler.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 66ba051..dd142e8 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-22-2024 +# 03-26-2024 import math import json @@ -1055,14 +1055,14 @@ def fp_source_selector(self): ' shall be entered for the vertex pairs of any of the two' + ' diagonals of the rectangular bounding box.') - self.attributes['fpAreas'] = [] + self.attributes['fparea'] = [] for fp in footprints: lons = [] lats = [] for pt in fp: lons.append(pt[0]) lats.append(pt[1]) - self.attributes['fpAreas'].append(int(polygon_area(lats,lons, + self.attributes['fparea'].append(int(polygon_area(lats,lons, lengthUnit))) # Calculate centroids of the footprints and remove footprint data that From 39f184e9465c384f5a74e3aeba92bc5fc3626afe Mon Sep 17 00:00:00 2001 From: bacetiner Date: Wed, 27 Mar 2024 12:51:56 -0700 Subject: [PATCH 41/63] Reordered building attributes in alphabetical order --- brails/EnabledAttributes.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/brails/EnabledAttributes.py b/brails/EnabledAttributes.py index dd5aa93..ee2b071 100755 --- a/brails/EnabledAttributes.py +++ b/brails/EnabledAttributes.py @@ -37,11 +37,10 @@ # Barbaros Cetiner # # Last updated: -# 03-21-2024 +# 03-27-2024 def BldgAttributes(): - attr = ['buildingheight','chimney','erabuilt', - 'garage','numstories','occupancy', - 'roofeaveheight','roofshape','roofpitch', - 'constype', 'roofcover','constype'] + attr = ['buildingheight','chimney','constype','erabuilt','garage', + 'numstories','occupancy','roofcover','roofeaveheight','roofshape', + 'roofpitch'] return attr \ No newline at end of file From 8b92982e92f42f110d205a3ad582ca588ae24eee Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 29 Mar 2024 18:38:01 -0700 Subject: [PATCH 42/63] Cleaned up variable types and added type hints for easier reading --- brails/workflow/NSIParser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/brails/workflow/NSIParser.py b/brails/workflow/NSIParser.py index 75e9342..adfffaa 100644 --- a/brails/workflow/NSIParser.py +++ b/brails/workflow/NSIParser.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-26-2024 +# 03-28-2024 import requests import pandas as pd @@ -212,7 +212,7 @@ def GetRawDataROI(self,roi,outfile:str)->None: with open(outfile, 'w') as outputFile: json.dump(geojson, outputFile, indent=2) - def GetNSIData(self,footprints:list,lengthUnit:str='ft',outfile=None): + def GetNSIData(self,footprints:list,lengthUnit:str='ft',outfile:str=''): """ Method that reads NSI buildings points and matches the data to a set of footprints and writes the data in a BRAILS-compatible format From 8ad38e2cb42277106fd20f9235ee00083af4fbe8 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 29 Mar 2024 18:38:46 -0700 Subject: [PATCH 43/63] Cleaned up variable types and added type hints for easier reading --- brails/workflow/FootprintHandler.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index dd142e8..b444ca9 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-26-2024 +# 03-29-2024 import math import json @@ -64,7 +64,7 @@ def __init__(self): self.lengthUnit = 'ft' self.queryarea = [] - def __fetch_roi(self,queryarea,outfile=False): + def __fetch_roi(self,queryarea:str,outfile:str=''): # Search for the query area using Nominatim API: print(f"\nSearching for {queryarea}...") queryarea = queryarea.replace(" ", "+").replace(',','+') @@ -145,7 +145,7 @@ def __fetch_roi(self,queryarea,outfile=False): write_polygon2geojson(bpoly,outfile) return bpoly, queryarea_printname, queryarea_osmid - def __bbox2poly(self,queryarea,outfile=False): + def __bbox2poly(self,queryarea:tuple,outfile:str=''): # Parse the entered bounding box into a polygon: if len(queryarea)%2==0 and len(queryarea)!=0: if len(queryarea)==4: @@ -169,7 +169,7 @@ def __bbox2poly(self,queryarea,outfile=False): write_polygon2geojson(bpoly,outfile) return bpoly, queryarea_printname - def __write_fp2geojson(self,footprints,attributes,outputFilename): + def __write_fp2geojson(self,footprints:list,attributes:dict,outputFilename:str): attrkeys = list(attributes.keys()) geojson = {'type':'FeatureCollection', "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, @@ -188,8 +188,8 @@ def __write_fp2geojson(self,footprints,attributes,outputFilename): with open(outputFilename, 'w') as outputFile: json.dump(geojson, outputFile, indent=2) - def fetch_footprint_data(self,queryarea,fpSource='osm',attrmap=None, - lengthUnit='ft',outputFile='footprints.geojson'): + def fetch_footprint_data(self,queryarea,fpSource:str='osm',attrmap:str='', + lengthUnit:str='ft',outputFile:str=''): """ Function that loads footprint data from OpenStreetMap, Microsoft, USA Structures, user-defined data @@ -1084,4 +1084,5 @@ def fp_source_selector(self): del self.attributes[key][i] # Write the footprint data into a GeoJSON file: - self.__write_fp2geojson(self.footprints, self.attributes, outputFile) \ No newline at end of file + if outputFile: + self.__write_fp2geojson(self.footprints, self.attributes, outputFile) \ No newline at end of file From c65d62728ad1e5854fad8d7af34c8802db2b2905 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 29 Mar 2024 18:39:53 -0700 Subject: [PATCH 44/63] Added a dictionary to map between BRAILS and R2D variable names --- brails/EnabledAttributes.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/brails/EnabledAttributes.py b/brails/EnabledAttributes.py index ee2b071..1b67f45 100755 --- a/brails/EnabledAttributes.py +++ b/brails/EnabledAttributes.py @@ -37,10 +37,20 @@ # Barbaros Cetiner # # Last updated: -# 03-27-2024 +# 03-28-2024 def BldgAttributes(): attr = ['buildingheight','chimney','constype','erabuilt','garage', 'numstories','occupancy','roofcover','roofeaveheight','roofshape', - 'roofpitch'] - return attr \ No newline at end of file + 'roofpitch','windarea'] + return attr + +def BRAILStoR2D_BldgAttrMap(): + r2dHeaders = ['BuildingHeight','ChimneyExists','StructureType', + 'YearBuilt','GarageExists','NumberOfStories', + 'OccupancyClass','RoofCoverType','RoofEaveHeight', + 'RoofShape','RoofPitch','WindowAreaRatio'] + brailsAttributes = BldgAttributes() + brails2r2dmap = {} + for ind,attr in enumerate(brailsAttributes): + brails2r2dmap[attr] = r2dHeaders[ind] \ No newline at end of file From 3b375124db418618e6504d502593c6f06a3ed49e Mon Sep 17 00:00:00 2001 From: bacetiner Date: Sat, 30 Mar 2024 20:46:23 -0700 Subject: [PATCH 45/63] Fixed BRAILStoR2D_BldgAttrMap dictionary returns --- brails/EnabledAttributes.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/brails/EnabledAttributes.py b/brails/EnabledAttributes.py index 1b67f45..6de169f 100755 --- a/brails/EnabledAttributes.py +++ b/brails/EnabledAttributes.py @@ -37,20 +37,21 @@ # Barbaros Cetiner # # Last updated: -# 03-28-2024 +# 03-30-2024 -def BldgAttributes(): +def BldgAttributes()->list: attr = ['buildingheight','chimney','constype','erabuilt','garage', - 'numstories','occupancy','roofcover','roofeaveheight','roofshape', - 'roofpitch','windarea'] + 'numstories','occupancy','repaircost','roofcover','roofeaveheight', + 'roofshape','roofpitch','windarea'] return attr -def BRAILStoR2D_BldgAttrMap(): +def BRAILStoR2D_BldgAttrMap()->dict: r2dHeaders = ['BuildingHeight','ChimneyExists','StructureType', 'YearBuilt','GarageExists','NumberOfStories', - 'OccupancyClass','RoofCoverType','RoofEaveHeight', - 'RoofShape','RoofPitch','WindowAreaRatio'] + 'OccupancyClass','ReplacementCost','RoofCoverType', + 'RoofEaveHeight','RoofShape','RoofPitch','WindowAreaRatio'] brailsAttributes = BldgAttributes() brails2r2dmap = {} for ind,attr in enumerate(brailsAttributes): - brails2r2dmap[attr] = r2dHeaders[ind] \ No newline at end of file + brails2r2dmap[attr] = r2dHeaders[ind] + return brails2r2dmap From 179374f212f3e95fbe7aacd039515f2f02e45481 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Sat, 30 Mar 2024 20:47:33 -0700 Subject: [PATCH 46/63] Removed OSM underground floors from nstories count for better results --- brails/workflow/FootprintHandler.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index b444ca9..4fa68bf 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-29-2024 +# 03-30-2024 import math import json @@ -54,6 +54,11 @@ import concurrent.futures from requests.adapters import HTTPAdapter, Retry import unicodedata +import warnings + +# Set a custom warning message format: +warnings.formatwarning = lambda message, category, filename, lineno, line=None: \ + f"{category.__name__}: {message}\n" class FootprintHandler: def __init__(self): @@ -287,8 +292,7 @@ def height2float(inpstr,lengthUnit): 'height':'buildingheight', } - levelkeys = {'building:levels','roof:levels', - 'building:levels:underground'} + levelkeys = {'building:levels','roof:levels'} # Excluding 'building:levels:underground' otherattrkeys = set(attrmap.keys()) datakeys = levelkeys.union(otherattrkeys) @@ -1021,7 +1025,8 @@ def fp_source_selector(self): elif self.fpSource=='usastr': footprints, attributes = get_usastruct_footprints(self.queryarea,lengthUnit) else: - print('Unimplemented footprint source. Setting footprint source to OSM') + warnings.warn('Unimplemented footprint source. Setting footprint source to OSM', + UserWarning) footprints, attributes = get_osm_footprints(self.queryarea,lengthUnit) return footprints, attributes From b9bed89ff0a3d5cf023c93e599230fc7fa38bb77 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Mon, 1 Apr 2024 08:44:34 -0700 Subject: [PATCH 47/63] Updated the list of attributes to reflect all the enabled options --- brails/EnabledAttributes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/brails/EnabledAttributes.py b/brails/EnabledAttributes.py index 6de169f..73d5b7d 100755 --- a/brails/EnabledAttributes.py +++ b/brails/EnabledAttributes.py @@ -37,12 +37,12 @@ # Barbaros Cetiner # # Last updated: -# 03-30-2024 +# 03-31-2024 def BldgAttributes()->list: attr = ['buildingheight','chimney','constype','erabuilt','garage', 'numstories','occupancy','repaircost','roofcover','roofeaveheight', - 'roofshape','roofpitch','windarea'] + 'roofshape','roofpitch','winarea'] return attr def BRAILStoR2D_BldgAttrMap()->dict: From 97c7b01d3051076d5c4f73be41eeb3c162de1684 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Mon, 1 Apr 2024 08:45:31 -0700 Subject: [PATCH 48/63] Added custom user warning messages for inconsistent user input --- brails/workflow/FootprintHandler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 4fa68bf..df76876 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-30-2024 +# 04-01-2024 import math import json @@ -59,6 +59,7 @@ # Set a custom warning message format: warnings.formatwarning = lambda message, category, filename, lineno, line=None: \ f"{category.__name__}: {message}\n" +warnings.simplefilter('always') class FootprintHandler: def __init__(self): From 5d731bd91675afee24faa22e21fc531db3705fbd Mon Sep 17 00:00:00 2001 From: bacetiner Date: Mon, 1 Apr 2024 08:46:51 -0700 Subject: [PATCH 49/63] Updated the API to better integrate baseline inventories --- brails/InventoryGenerator.py | 525 ++++++++++++++++++++++------------- 1 file changed, 336 insertions(+), 189 deletions(-) diff --git a/brails/InventoryGenerator.py b/brails/InventoryGenerator.py index 8ee956f..9004e50 100644 --- a/brails/InventoryGenerator.py +++ b/brails/InventoryGenerator.py @@ -35,115 +35,311 @@ # # Contributors: # Barbaros Cetiner -# Sascha Hornauer # Frank McKenna -# Satish Rao # # Last updated: -# 03-21-2024 +# 04-01-2024 -import random -import sys -import pandas as pd -import os -import json -from shapely.geometry import Polygon from datetime import datetime from importlib.metadata import version +import json +import numpy as np +import os +import pandas as pd +import random +import sys +import warnings #import brails.models as models -from brails.EnabledAttributes import BldgAttributes +from brails.EnabledAttributes import BldgAttributes, BRAILStoR2D_BldgAttrMap from brails.modules import (ChimneyDetector, FacadeParser, GarageDetector, NFloorDetector, RoofClassifier, OccupancyClassifier, RoofCoverClassifier, YearBuiltClassifier) -from .workflow.ImHandler import ImageHandler -from .workflow.FootprintHandler import FootprintHandler +from brails.workflow.FootprintHandler import FootprintHandler +from brails.workflow.ImHandler import ImageHandler from brails.workflow.NSIParser import NSIParser -class InventoryGenerator: +# Set a custom warning message format: +warnings.formatwarning = lambda message, category, filename, lineno, line=None: \ + f"{category.__name__}: {message}\n" +warnings.simplefilter('always') - def __init__(self, location='Berkeley', nbldgs=10, randomSelection=True, - fpSource='osm', GoogleAPIKey=''): - self.apiKey = GoogleAPIKey +class InventoryGenerator: + def __init__(self, location='Berkeley California', fpSource:str='osm', + baselineInv:str='', attrmap:str='', lengthUnit:str='ft', + outputFile:str=''): + + # Define class variables: + self.attributes = [] + self.apiKey = '' + self.baselineInventory = pd.DataFrame() + self.baselineInventoryOutputFile = outputFile self.enabledAttributes = BldgAttributes() - self.inventory = None - self.incompleteInventory = None + self.fpSource = fpSource + self.inventory = pd.DataFrame() + self.inventoryInventoryOutputFile = '' + self.lengthUnit = lengthUnit self.location = location - self.nbldgs = nbldgs - self.randomSelection = randomSelection - self.workDir = 'tmp' self.modelDir = 'tmp/models' - self.fpSource = fpSource - - """ - # Copy the model files to the current directory: - os.makedirs(self.modelDir,exist_ok=True) - model_dir_org = models.__path__[0] - model_files = [file for file in os.listdir(model_dir_org) if file.endswith(('pth','pkl'))] - for model_file in model_files: - shutil.copyfile(os.path.join(model_dir_org,model_file), - os.path.join(self.modelDir,model_file)) - """ + self.nbldgs = 10 + self.randomSeed = 0 + self.workDir = 'tmp' - # Get footprint data for the defined location: + # Get footprint and building attribute data: fpHandler = FootprintHandler() - fpHandler.fetch_footprint_data(self.location,self.fpSource) - - # Randomly select nbldgs from the footprint data if randomSelection is - # set to True: - if nbldgs!='all': - if self.randomSelection==False: - footprints = fpHandler.footprints[:nbldgs] - fpareas = fpHandler.fpAreas[:nbldgs] - print(f'Selected the first {nbldgs} buildings') - elif self.randomSelection==True: - inds = random.sample(list(range(len(fpHandler.footprints))),nbldgs); inds.sort() - footprints = [fpHandler.footprints[ind] for ind in inds] - fpareas = [fpHandler.fpAreas[ind] for ind in inds] - print(f'Randomly selected {nbldgs} buildings') + # If a baseline inventory is not specified: + if not baselineInv: + # If location entry is a string containing geojson or csv file + # extension and an attribute mapping file is specified: + if ('csv' in location.lower() or 'geojson' in location.lower()) and attrmap: + fpHandler.fetch_footprint_data(location, + fpSource=fpSource, + attrmap=attrmap, + lengthUnit=lengthUnit) + # If location entry is a string or tuple. String may contain geojson + # or csv file extension: else: - random.seed(self.randomSelection) - inds = random.sample(list(range(len(fpHandler.footprints))),nbldgs); inds.sort() - footprints = [fpHandler.footprints[ind] for ind in inds] - fpareas = [fpHandler.fpAreas[ind] for ind in inds] - print(f'Randomly selected {nbldgs} buildings using the seed {self.randomSelection}') - else: - footprints = fpHandler.footprints[:] - fpareas = fpHandler.fpAreas[:] - print(f'Selected all {len(footprints)} buildings') + fpHandler.fetch_footprint_data(location,fpSource=fpSource,lengthUnit=lengthUnit) + + # A baseline inventory is specified: + else: + # If NSI is defined as the base inventory: + if baselineInv.lower()=='nsi': + fpHandler.fetch_footprint_data(location,fpSource=fpSource,lengthUnit=lengthUnit) + + # If a user-specified inventory is defined and is accompanied by an + # attribute mapping file: + elif attrmap: + if ('csv' in location.lower() or 'geojson' in location.lower()): + self.fpSource = 'user-specified' + fpInp = location + else: + fpInp = fpSource + fpHandler.fetch_footprint_data(baselineInv, + fpSource=fpInp, + attrmap=attrmap, + lengthUnit=lengthUnit) + + + # If a user-specified baseline inventory is defined but is not + # accompanied by an attribute mapping file, ignore the entered: + else: + warnings.warn('Missing attribute mapping file. ' + + 'Ignoring the user-specified baseline inventory') + fpHandler.fetch_footprint_data(location,fpSource=fpSource,lengthUnit=lengthUnit) + + # Write geometry information from footprint data into a DataFrame: + fpDataDf = pd.DataFrame(pd.Series(fpHandler.footprints,name='Footprint')) + fpDataDf.Footprint = fpDataDf.Footprint.apply(str) + lon = []; lat = [] + for pt in fpHandler.centroids: + lon.append(pt.x) + lat.append(pt.y) + fpDataDf['Latitude'] = lat + fpDataDf['Longitude'] = lon + fpDataDf['PlanArea'] = fpHandler.attributes['fparea'] - # Initialize the inventory DataFrame with the footprint data obtained for nbldgs: - self.inventory = pd.DataFrame(pd.Series(footprints,name='Footprint')) - self.inventory['PlanArea'] = fpareas + # Get the dictionary that maps between BRAILS and R2D attribute names: + brails2r2dmap = BRAILStoR2D_BldgAttrMap() - # Initialize a seperate inventory DataFrame including all footprints for - # the defined region for use with a data imputation application: - self.incompleteInventory = pd.DataFrame(pd.Series(fpHandler.footprints[:],name='Footprint')) - self.incompleteInventory['PlanArea'] = fpHandler.fpAreas[:] + # Get a list of the fphandler attributes that remain to be merged: + attr2merge = list(fpHandler.attributes.keys()) + attr2merge.remove('fparea') - # Initialize the image handler class to check if the provided API key - # is valid: - image_handler = ImageHandler(self.apiKey) + if baselineInv.lower()=='nsi': + # Read NSI data data corresponding to the extracted footprint polygons: + nsiParser = NSIParser() + nsiParser.GetNSIData(fpHandler.footprints, + lengthUnit=lengthUnit) + + # Write building attributes extracted from NSI into a DataFrame and merge with + # the footprint data: + nsiDataDf = pd.DataFrame(pd.Series(nsiParser.footprints,name='Footprint')) + nsiDataDf.Footprint = nsiDataDf.Footprint.apply(str) + for attr in nsiParser.attributes.keys(): + if attr not in ['fparea','lat','lon']: + nsiDataDf[brails2r2dmap[attr]] = nsiParser.attributes[attr] + + self.baselineInventory = fpDataDf.merge(nsiDataDf, how='left', on=['Footprint']) + + # Check if the attributes columns in fpHandler exists in inventoryBaseline: + for attr in attr2merge: + col = brails2r2dmap[attr] + # If a column does not exist, add it to inventoryBaseline: + if col not in self.baselineInventory: + self.baselineInventory[col] = pd.DataFrame(fpHandler.attributes[attr]).replace('NA',np.nan) + # If the column exists, merge it so that it overrides the existing values + # on that column, unless a row in fpHandler contains a None value: + else: + df = pd.DataFrame(fpHandler.attributes[attr], columns=[attr]) + df.replace('NA',np.nan) + self.baselineInventory[col] = df[attr].fillna(self.baselineInventory[col]) + self.baselineInventory.Footprint = fpDataDf.Footprint.apply(json.loads) + else: + for attr in attr2merge: + col = brails2r2dmap[attr] + fpDataDf[col] = pd.DataFrame(fpHandler.attributes[attr]).replace('NA',np.nan) + fpDataDf.Footprint = fpDataDf.Footprint.apply(json.loads) + self.baselineInventory = fpDataDf.copy(deep=True) + + # Write the genereated inventory in outFile: + if outputFile: + self.__write_inventory_output(self.baselineInventory, + lengthUnit=lengthUnit, + outputFile=outputFile) + + def __write_inventory_output(self,inventorydf:pd.DataFrame, + lengthUnit:str='ft',outputFile:str=''): + """ + Method that writes the data in an inventory DataFrame into a CSV + or GeoJSON file based on the file name defined in the outputFile. + lengthUnit is for informational purposes only and no unit conversion + is performed in this function. + + Inputs: + df: DataFrame + A DataFrame containing building attributes. If this + DataFrame contains columns for satellite_images and + street_images, these columns are removed before the + output file is created. + outputFile: string + String for the name of the output file, e.g., out.geojson. + This function writes data only in CSV and GeoJSON formats. + If an output format different than these two formats is + defined in the file extension, the function defaults to + GeoJSON format. + lengthUnit: string + String defining the length units used to report the inventory + data. Available options are ft or m. + """ + + # Create a new table that does not list the image names + # corresponding to each building but includes building ID: + dfout = inventorydf.copy(deep=True) + + imColumns2Remove = [] + if 'satellite_images' in dfout: imColumns2Remove.append('satellite_images') + if 'street_images' in dfout: imColumns2Remove.append('street_images') + + if imColumns2Remove: + dfout = dfout.drop(columns=imColumns2Remove,errors='ignore') + + # Rewrite the footprints in a format compatible with R2D: + for index, row in inventorydf.iterrows(): + dfout.loc[index, 'Footprint'] = ('{"type":"Feature","geometry":' + + '{"type":"Polygon","coordinates":[' + + f"""{row['Footprint']}""" + + ']},"properties":{}}') + + # Rearrange the column order of dfout such that the Footprint field is + # the last: + cols = [col for col in dfout.columns if col not in ['Footprint','Latitude','Longitude']] + new_cols = ['Latitude','Longitude'] + cols + ['Footprint'] + dfout = dfout[new_cols] + + # If the inventory is desired in CSV format, write dfout to a CSV: + if '.csv' in outputFile.lower(): + dfout.to_csv(outputFile, index=True, index_label='id') - @staticmethod - def enabled_attributes(self): - print('Here is the list of attributes currently enabled in InventoryGenerator:\n') + # Else write the inventory into a GeoJSON file: + else: + # If the extension of outputFile is not CSV or GeoJSON, write the + # inventory output in GeoJSON format: + if '.geojson' not in outputFile.lower(): + warnings.warn('Output format unimplemented! ' + 'Writing the inventory output in GeoJSON format.') + outputFile = outputFile.replace(outputFile.split('.')[-1],'geojson') + + # Define GeoJSON file dictionary including basic metadata: + geojson = {'type':'FeatureCollection', + 'generated':str(datetime.now()), + 'brails_version': version('BRAILS'), + "crs": {"type": "name", + "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84" }}, + 'units': {"length": lengthUnit}, + 'features':[]} + + # Get the list of attributes available in dfout: + attrs = dfout.columns.values.tolist() + attrs.remove('Footprint') + + # Run through each row of dfout and write the extracted data into + # the GeoJSON dictionary: + for index, row in dfout.iterrows(): + feature = {'type':'Feature', + 'properties':{}, + 'geometry':{'type':'Polygon', + 'coordinates':[]}} + fp = dfout.loc[index, 'Footprint'].split('"coordinates":')[-1] + fp = fp.replace('},"properties":{}}','') + feature['geometry']['coordinates'] = json.loads(fp) + feature['properties']['id'] = index + # Read attribute values on the current row, filling null values + # as NA: + for attr in attrs: + res = row[attr] + if pd.isnull(res): + feature['properties'][attr] = 'NA' + else: + feature['properties'][attr] = res + geojson['features'].append(feature) + + # Write the created GeoJSON dictionary into a GeoJSON file: + with open(outputFile, 'w') as output_file: + json.dump(geojson, output_file, indent=2) + + print(f'\nInventory data available in {outputFile} in {os.getcwd()}') + + def enabled_attributes(self)->None: + """ + Method that prints the building attributes that can be obtained by + BRAILS + """ + + print('Here is the list of attributes currently enabled in ' + + 'InventoryGenerator:\n') for attribute in self.enabledAttributes: print(f' {attribute}') print('\nIf you want to get all of these attributes when you run ' "InventoryGenerator.generate, simply set attributes='all'") def generate(self,attributes=['numstories','occupancy','roofshape'], - useNSIBaseline= True, outFile='inventory.csv', - lengthUnit='ft'): + GoogleAPIKey:str='',nbldgs=10,outputFile:str='', + randomSelection:int=0): + + """ + Method that generates the building inventory for the location entered + when InventoryGenerator class is initialized. + + Inputs: + attributes: list or string + Argument containing a description of the attributes requested + as a part of inventory generation. If set to 'all', all + attributes in self.enabledAttributes are included. If set to + 'hazuseq', numstories, erabuilt, repaircost, constype, and + occupancy are included. If defined as a list, all valid + attribute labels in the list are included in the inventory + output. + GoogleAPIKey: string + String containing a valid Google API key that has Street View + Static API enabled + nbldgs: int or string + If set to 'all' runs BRAILS models on all detected buildings. + If set to an integer value, runs BRAILS models on only the + number of building defined by the set integer value + randomSelection: int + Random seed for arbitrary selection of nbldgs from the detected + buildings + """ - def write_to_dataframe(df,predictions,column,imtype='street_images'): + def write_to_dataframe(df:pd.DataFrame,predictions,column:str, + imtype:str='street_images')->pd.DataFrame: """ Function that writes predictions of a model by going through the DataFrame, df, and finding the image name matches between the list of images corresponding to the predictions, i.e., imagelist, - then assigning the predictions to df. The function only searches + then assigning the predictions to df. The function only searches the column named im_type for image names in imagelist. Inputs: @@ -162,7 +358,8 @@ def write_to_dataframe(df,predictions,column,imtype='street_images'): imtype: string 'satellite_images' or 'street_images' depending on the type of source images - column: Name of the column where the predictions will be written + column: string + Name of the column where the predictions will be written Output: An updated version of df that includes an extra column for the predictions @@ -176,139 +373,94 @@ def write_to_dataframe(df,predictions,column,imtype='street_images'): for index, row in predictions.iterrows(): df.loc[df.index[df[imtype] == row['image']],column] = row['prediction'] - return df - + return df - def parse_attribute_input(attrIn,attrEnabled): + def parse_attribute_input(attrIn:list,attrEnabled:list)->list: """ Function that pre-processes the user attribute entries to InventoryGenerator().generate method such that pre-defined attribute sets are correctly parsed and incorrect entries are - removed: + removed. Inputs: - df: list - A list of string descriptions for the user requested + attrIn: list + A list of string descriptions for the user-requested building attributes attrEnabled: list A list of string descriptions for the building attributes currently enabled in InventoryGenerator().generate method - Output: A list of string descriptions for the user requested - building attributes that were determined to valid for use + Output: A list of string descriptions for the user-requested + building attributes that were determined to be valid for use with InventoryGenerator().generate method """ + # If all attributes are requested: if isinstance(attrIn,str) and attrIn=='all': attrOut = attrEnabled[:] + # If only the attributes required for HAZUS seismic analysis are + # requested: elif isinstance(attrIn,str) and attrIn=='hazuseq': - attrOut = ['numstories'] + attrOut = ['numstories', 'erabuilt', 'repaircost', 'constype', + 'occupancy'] + # If a custom list of attributes is requested: elif isinstance(attrIn,list): attrOut = [attribute.lower() for attribute in attrIn] + # Get the attributes that are not in enabled attributes and + # remove them from the requested list of attributes: ignore_entries = [] for attribute in attrOut: if attribute not in attrEnabled: ignore_entries.append(attribute) attrOut.remove(attribute) + # Display the ignored (removed) attribute entries: if len(ignore_entries)==1: print('An entry in attributes is not enabled.' f'\nIgnoring entry: {ignore_entries[0]}') elif len(ignore_entries)>1: print('Several entries in attributes are not enabled.' '\nIgnoring entries: ' + ', '.join(ignore_entries)) - - if len(attrOut)==0: - sys.exit('Defined list of attributes does not contain a ' + - 'correct attribute entry. Attribute entries enabled' + - ' are: ' + ', '.join(attrEnabled)) - # Remove duplicate attribute entries: - attrOut = sorted(list(set(attrOut))) + # If no valid attribute is detected, stop code execution: + if len(attrOut)==0: + sys.exit('Entered list of attributes does not contain a ' + + 'correct attribute entry. Attribute entries enabled' + + ' are: ' + ', '.join(attrEnabled)) + # Remove duplicate attribute entries: + attrOut = sorted(list(set(attrOut))) + + else: + warnings.warn('Incorrect attributes entry. Supported attributes' + + ' entries are a list containing the string labels ' + + " for requested attributes, 'all' or 'hazuseq'. " + + ' Running BRAILS for roof shape detection only...') + attrOut=['roofshape'] return attrOut - - def write_inventory_output(inventorydf,outFile,lengthUnit): - """ - Function that writes the data in inventorydf DataFrame into a CSV - or GeoJSON file based on the file name defined in the outFile. - - Inputs: - df: DataFrame - A DataFrame containing building attributes. If this - DataFrame contains columns for satellite_images and - street_images, these columns are removed before the - output file is created. - outFile: string - String for the name of the output file, e.g., out.geojson. - This function writes data only in CSV and GeoJSON formats. - If an output format different than these two formats is - defined in the file extension, the function defaults to - GeoJSON format. - """ - - # Create a new table with does not list the image names - # corresponding to each building but includes building ID, latitude, - # and longitudecolumns added: - dfout = inventorydf.copy(deep=True) - dfout = dfout.drop(columns=['satellite_images', 'street_images'], - errors='ignore') + + self.apiKey = GoogleAPIKey + self.nbldgs = nbldgs + self.inventoryInventoryOutputFile = outputFile - - for index, row in inventorydf.iterrows(): - dfout.loc[index, 'Footprint'] = ('{"type":"Feature","geometry":' + - '{"type":"Polygon","coordinates":[' + - f"""{row['Footprint']}""" + - ']},"properties":{}}') - centroid = Polygon(row['Footprint']).centroid - dfout.loc[index, 'Latitude'] = centroid.y - dfout.loc[index, 'Longitude'] = centroid.x - - # Rearrange the column order of dfout such that the Footprint field is - # the last: - cols = [col for col in dfout.columns if col not in ['Footprint','Latitude','Longitude']] - new_cols = ['Latitude','Longitude'] + cols + ['Footprint'] - dfout = dfout[new_cols] - - # If the inventory is desired in CSV format, write dfout to a CSV: - if '.csv' in outFile.lower(): - dfout.to_csv(outFile, index=True, index_label='id') + # Initialize the image handler class to check if the provided API key + # is valid: + image_handler = ImageHandler(self.apiKey) - # Else write the inventory into a GeoJSON file: + if nbldgs!='all': + # If randomSelection is set to a non-zero value, randomly select + # nbldgs from the inventory data with the specified seed. Else, + # pull nbldgs at random with an arbitrary seed: + if randomSelection<0: + randomSelection = random.randint(0,1e8) + print(f'Randomly selected {nbldgs} buildings') else: - if '.geojson' not in outFile.lower(): - print('Output format unimplemented!') - outFile = outFile.replace(outFile.split('.')[-1],'geojson') - geojson = {'type':'FeatureCollection', - 'generated':str(datetime.now()), - 'brails_version': version('BRAILS'), - "crs": {"type": "name", - "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84" }}, - 'units': {"length": lengthUnit}, - 'features':[]} - - attrs = dfout.columns.values.tolist() - attrs.remove('Footprint') - for index, row in dfout.iterrows(): - feature = {'type':'Feature', - 'properties':{}, - 'geometry':{'type':'Polygon', - 'coordinates':[]}} - fp = dfout.loc[index, 'Footprint'].split('"coordinates":')[-1] - fp = fp.replace('},"properties":{}}','') - feature['geometry']['coordinates'] = json.loads(fp) - feature['properties']['id'] = index - for attr in attrs: - res = row[attr] - if pd.isnull(res): - feature['properties'][attr] = 'NA' - else: - feature['properties'][attr] = res - geojson['features'].append(feature) - - with open(outFile, 'w') as output_file: - json.dump(geojson, output_file, indent=2) - - print(f'\nFinal inventory data available in {outFile} in {os.getcwd()}') + print(f'Randomly selected {nbldgs} buildings using the seed {randomSelection}') + self.inventory = self.baselineInventory.sample(n=nbldgs, + random_state=randomSelection) + self.randomSelection = randomSelection + else: + print(f'Selected all {len(self.baselineInventory.index)} buildings') + self.inventory = self.baselineInventory.copy(deep=True) # Parse/correct the list of user requested building attributes: self.attributes = parse_attribute_input(attributes, self.enabledAttributes) @@ -316,33 +468,25 @@ def write_inventory_output(inventorydf,outFile,lengthUnit): # Create a list of footprints for easier module calls: footprints = self.inventory['Footprint'].values.tolist() - if useNSIBaseline: - nsi = NSIParser() - nsi.GenerateBldgInventory(footprints) - attributes_process = set(self.attributes) - set(nsi.attributes) - self.inventory = nsi.inventory.copy(deep=True) - footprints = self.inventory['Footprint'].values.tolist() - else: - attributes_process = self.attributes.copy() + # Get the list of attributes that will be processed: + attributes_process = [attr for attr in self.attributes if attr not in + ['occupancy','constype','repaircost','roofcover']] # Download the images required for the requested attributes: - image_handler = ImageHandler(self.apiKey) - - if 'roofshape' in attributes_process: #or 'roofcover' in attributes_process: + if 'roofshape' or 'roofcover' in attributes_process: image_handler.GetGoogleSatelliteImage(footprints) imsat = [im for im in image_handler.satellite_images if im is not None] self.inventory['satellite_images'] = image_handler.satellite_images streetAttributes = self.enabledAttributes[:] - streetAttributes.remove('roofshape') - #streetAttributes.remove('roofcover') + streetAttributes.remove('roofshape'); streetAttributes.remove('roofcover') if set.intersection(set(streetAttributes),set(attributes_process))!=set(): image_handler.GetGoogleStreetImage(footprints) imstreet = [im for im in image_handler.street_images if im is not None] self.inventory['street_images'] = image_handler.street_images - + + # Run the obtained images through BRAILS computer vision models: for attribute in attributes_process: - if attribute=='chimney': # Initialize the chimney detector object: chimneyModel = ChimneyDetector() @@ -442,7 +586,7 @@ def write_inventory_output(inventorydf,outFile,lengthUnit): 'roofshape', 'satellite_images') - elif attribute in ['buildingheight','roofeaveheight','roofpitch']: + elif attribute in ['buildingheight','roofeaveheight','roofpitch','winarea']: if 'facadeParserModel' not in locals(): # Initialize the facade parser object: facadeParserModel = FacadeParser() @@ -460,7 +604,7 @@ def write_inventory_output(inventorydf,outFile,lengthUnit): self.inventory['StructureType'] = ['W1' for ind in range(len(self.inventory.index))] # Bring the attribute values to the desired length unit: - if lengthUnit.lower()=='m': + if self.lengthUnit.lower()=='m': self.inventory['PlanArea'] = self.inventory['PlanArea'].apply(lambda x: x*0.0929) if 'buildingheight' in attributes_process: self.inventory['buildingheight'] = self.inventory['buildingheight'].apply(lambda x: x*0.3048) @@ -468,8 +612,11 @@ def write_inventory_output(inventorydf,outFile,lengthUnit): self.inventory['roofeaveheight'] = self.inventory['roofeaveheight'].apply(lambda x: x*0.3048) # Write the genereated inventory in outFile: - write_inventory_output(self.inventory,outFile,lengthUnit) - + if outputFile: + self.__write_inventory_output(self.inventory, + lengthUnit=self.lengthUnit, + outputFile=outputFile) + # Merge the DataFrame of predicted attributes with the DataFrame of # incomplete inventory and print the resulting table to the output file # titled IncompleteInventory.csv: From 94527544d9a34a57a192adf61938200b1aae6b58 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Mon, 1 Apr 2024 09:09:42 -0700 Subject: [PATCH 50/63] Updated the example for InventoryGenerator calls --- README.md | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 0bd319b..8c879cb 100644 --- a/README.md +++ b/README.md @@ -22,9 +22,13 @@ pip install BRAILS This example demonstrates how to use the ``InventoryGenerator`` method embedded in BRAILS to generate regional-level inventories. -The primary input to ``InventoryGenerator`` is location. ``InventoryGenerator`` accepts four different location input: 1) region name, 2) list of region names, 3) bounding box of a region, 4) A GeoJSON file containing building footprints. +The primary input to ``InventoryGenerator`` is location. ``InventoryGenerator`` accepts four different ``location`` input types: 1) region name, 2) list of region names, 3) a tuple containing the coordinates for two opposite vertices of a bounding box for a region (e.g., (vert1lon,vert1lat,vert2lon,vert2lat), and a 4) GeoJSON file containing building footprints or location points. -Please note that you will need a Google API Key to run ``InventoryGenerator``. +InventoryGenerator automatically detects building locations in a region by downloading footprint data for the ``location`` input. The three footprint data sources (``fpSource``) included in BRAILS are i) OpenStreetMaps, ii) Microsoft Global Building Footprints dataset, and iii) FEMA USA Structures. The keywords for these sources are ``osm``, ``ms``, and ``usastr``, respectively. + +``InventoryGenerator`` also allows inventory data to be imported from the National Structure Inventory or another user-specified file to create a baseline building inventory. + +Please note that to run the ``generate`` method of ``InventoryGenerator``, you will need a Google API Key. ```python #import InventoryGenerator: @@ -32,12 +36,20 @@ from brails.InventoryGenerator import InventoryGenerator # Initialize InventoryGenerator: invGenerator = InventoryGenerator(location='Berkeley, CA', - nbldgs=100, randomSelection=True, - GoogleAPIKey="") + fpSource='usastr', + baselineInv='nsi', + lengthUnit='m', + outputFile='BaselineInvBerkeleyCA.geojson') + +# View a list of building attributes that can be obtained using BRAILS: +invGenerator.enabled_attributes() # Run InventoryGenerator to generate an inventory for the entered location: # To run InventoryGenerator for all enabled attributes set attributes='all': -invGenerator.generate(attributes=['numstories','roofshape','buildingheight']) +invGenerator.generate(attributes=['numstories','roofshape','buildingheight'], + GoogleAPIKey='ENTER-YOUR-API-KEY-HERE', + nbldgs=100, + outputFile='BldgInvBerkeleyCA.geojson') # View generated inventory: invGenerator.inventory From e67ccd2b31e9494fb65b2f07c17effe20ac36e7d Mon Sep 17 00:00:00 2001 From: bacetiner Date: Mon, 1 Apr 2024 09:12:53 -0700 Subject: [PATCH 51/63] Rewrote bbox tuple in code font type for clarity --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 8c879cb..93f0b84 100644 --- a/README.md +++ b/README.md @@ -22,9 +22,9 @@ pip install BRAILS This example demonstrates how to use the ``InventoryGenerator`` method embedded in BRAILS to generate regional-level inventories. -The primary input to ``InventoryGenerator`` is location. ``InventoryGenerator`` accepts four different ``location`` input types: 1) region name, 2) list of region names, 3) a tuple containing the coordinates for two opposite vertices of a bounding box for a region (e.g., (vert1lon,vert1lat,vert2lon,vert2lat), and a 4) GeoJSON file containing building footprints or location points. +The primary input to ``InventoryGenerator`` is location. ``InventoryGenerator`` accepts four different ``location`` input types: 1) region name, 2) list of region names, 3) a tuple containing the coordinates for two opposite vertices of a bounding box for a region (e.g., ``(vert1lon,vert1lat,vert2lon,vert2lat)``), and a 4) GeoJSON file containing building footprints or location points. -InventoryGenerator automatically detects building locations in a region by downloading footprint data for the ``location`` input. The three footprint data sources (``fpSource``) included in BRAILS are i) OpenStreetMaps, ii) Microsoft Global Building Footprints dataset, and iii) FEMA USA Structures. The keywords for these sources are ``osm``, ``ms``, and ``usastr``, respectively. +InventoryGenerator automatically detects building locations in a region by downloading footprint data for the ``location`` input. The three footprint data sources, ``fpSource``, included in BRAILS are i) OpenStreetMaps, ii) Microsoft Global Building Footprints dataset, and iii) FEMA USA Structures. The keywords for these sources are ``osm``, ``ms``, and ``usastr``, respectively. ``InventoryGenerator`` also allows inventory data to be imported from the National Structure Inventory or another user-specified file to create a baseline building inventory. From 03c236ab676d66354696c31b75f2ed0c1abd36ae Mon Sep 17 00:00:00 2001 From: bacetiner Date: Mon, 1 Apr 2024 11:43:43 -0700 Subject: [PATCH 52/63] Changed warning settings to ignore connection pooling warnings --- brails/InventoryGenerator.py | 19 +++++++++++-------- brails/workflow/FootprintHandler.py | 2 +- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/brails/InventoryGenerator.py b/brails/InventoryGenerator.py index 9004e50..1e06a55 100644 --- a/brails/InventoryGenerator.py +++ b/brails/InventoryGenerator.py @@ -63,7 +63,7 @@ # Set a custom warning message format: warnings.formatwarning = lambda message, category, filename, lineno, line=None: \ f"{category.__name__}: {message}\n" -warnings.simplefilter('always') +warnings.simplefilter('always',UserWarning) class InventoryGenerator: def __init__(self, location='Berkeley California', fpSource:str='osm', @@ -90,9 +90,9 @@ def __init__(self, location='Berkeley California', fpSource:str='osm', fpHandler = FootprintHandler() # If a baseline inventory is not specified: if not baselineInv: - # If location entry is a string containing geojson or csv file - # extension and an attribute mapping file is specified: - if ('csv' in location.lower() or 'geojson' in location.lower()) and attrmap: + # If location entry is a string containing geojson file extension + # and an attribute mapping file is specified: + if (location is str and 'geojson' in location.lower()) and attrmap: fpHandler.fetch_footprint_data(location, fpSource=fpSource, attrmap=attrmap, @@ -111,7 +111,7 @@ def __init__(self, location='Berkeley California', fpSource:str='osm', # If a user-specified inventory is defined and is accompanied by an # attribute mapping file: elif attrmap: - if ('csv' in location.lower() or 'geojson' in location.lower()): + if (location is str and 'csv' in location.lower() or 'geojson' in location.lower()): self.fpSource = 'user-specified' fpInp = location else: @@ -126,7 +126,8 @@ def __init__(self, location='Berkeley California', fpSource:str='osm', # accompanied by an attribute mapping file, ignore the entered: else: warnings.warn('Missing attribute mapping file. ' + - 'Ignoring the user-specified baseline inventory') + 'Ignoring the user-specified baseline inventory', + UserWarning) fpHandler.fetch_footprint_data(location,fpSource=fpSource,lengthUnit=lengthUnit) # Write geometry information from footprint data into a DataFrame: @@ -248,7 +249,8 @@ def __write_inventory_output(self,inventorydf:pd.DataFrame, # inventory output in GeoJSON format: if '.geojson' not in outputFile.lower(): warnings.warn('Output format unimplemented! ' - 'Writing the inventory output in GeoJSON format.') + 'Writing the inventory output in GeoJSON format', + UserWarning) outputFile = outputFile.replace(outputFile.split('.')[-1],'geojson') # Define GeoJSON file dictionary including basic metadata: @@ -434,7 +436,8 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: warnings.warn('Incorrect attributes entry. Supported attributes' + ' entries are a list containing the string labels ' + " for requested attributes, 'all' or 'hazuseq'. " + - ' Running BRAILS for roof shape detection only...') + ' Running BRAILS for roof shape detection only...', + UserWarning) attrOut=['roofshape'] return attrOut diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index df76876..9e2a374 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -59,7 +59,7 @@ # Set a custom warning message format: warnings.formatwarning = lambda message, category, filename, lineno, line=None: \ f"{category.__name__}: {message}\n" -warnings.simplefilter('always') +warnings.simplefilter('always',UserWarning) class FootprintHandler: def __init__(self): From 51bc4900ad1546903f975f0520cd297c9dbd5e4a Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 5 Apr 2024 11:23:39 -0700 Subject: [PATCH 53/63] All latest features of BRAILS are now a part of InventoryGenerator --- brails/InventoryGenerator.py | 207 ++++++++++++++++++++--------------- 1 file changed, 121 insertions(+), 86 deletions(-) diff --git a/brails/InventoryGenerator.py b/brails/InventoryGenerator.py index 1e06a55..a0ca73a 100644 --- a/brails/InventoryGenerator.py +++ b/brails/InventoryGenerator.py @@ -38,7 +38,7 @@ # Frank McKenna # # Last updated: -# 04-01-2024 +# 04-05-2024 from datetime import datetime from importlib.metadata import version @@ -63,7 +63,8 @@ # Set a custom warning message format: warnings.formatwarning = lambda message, category, filename, lineno, line=None: \ f"{category.__name__}: {message}\n" -warnings.simplefilter('always',UserWarning) +warnings.simplefilter('always',UserWarning) +warnings.filterwarnings("ignore", message="Implicit dimension choice for softmax has been deprecated.*") class InventoryGenerator: def __init__(self, location='Berkeley California', fpSource:str='osm', @@ -335,47 +336,60 @@ def generate(self,attributes=['numstories','occupancy','roofshape'], buildings """ - def write_to_dataframe(df:pd.DataFrame,predictions,column:str, - imtype:str='street_images')->pd.DataFrame: + def merge_results2inventory(inventory:pd.DataFrame, + predictions:pd.DataFrame, + attrname:str, + imtype:str='street_images')->pd.DataFrame: """ - Function that writes predictions of a model by going through the - DataFrame, df, and finding the image name matches between the - list of images corresponding to the predictions, i.e., imagelist, - then assigning the predictions to df. The function only searches - the column named im_type for image names in imagelist. + Function that merges predictions of a model to an inventory by + finding the image name matches between the inventory and predictions + The function only searches the column named imtype for image names Inputs: - df: DataFrame + inventory: DataFrame A DataFrame containing at least a column of names of source images named either 'satellite_images' or 'street_images', - depending on the value of im_type - predictions: list or DataFrame - A list consisting of two lists: 1) list of image names. - These names need to correspond to the predictions in - predictionlist 2) list of predictions. These predictions - need to correspond to the image names in imagelist. - Alternately, a Pandas DataFrame with a column titled image - for image names, and a column for the predictions titled - predictions + depending on the value of imtype + predictions: DataFrame + A DataFrame containing two columns for 1) image names + and 2) model predictions. Columns of this DataFrame are + titled with the strings contained in imtype and attrname imtype: string 'satellite_images' or 'street_images' depending on the type of source images column: string - Name of the column where the predictions will be written + Name of the column where the predictions are stored and + will be written - Output: An updated version of df that includes an extra column for - the predictions + Output: inventory expanded such that it includes a new or updated + column for the predictions """ - if isinstance(predictions,list): - imagelist = predictions[0][:] - predictionlist = predictions[1][:] - for ind, im in enumerate(imagelist): - df.loc[df.index[df[imtype] == im],column] = predictionlist[ind] - else: - for index, row in predictions.iterrows(): - df.loc[df.index[df[imtype] == row['image']],column] = row['prediction'] - - return df + + # If inventory does not contain a column for attrname: + if attrname not in inventory: + # Merge the prediction results to inventory DataFrame: + inventory = inventory.merge(predictions, + how='left', + on=[imtype]) + + else: + # Merge the prediction results to inventory DataFrame: + mergedInv = inventory.merge(predictions, + how='left', + on=[imtype]) + + # Augment prediction results by merging values from the + # first attribute column only if the corresponding value + # in the second (predictions) column is nan: + mergedInv[attrname] = mergedInv[attrname+'_y'].fillna( + mergedInv[attrname+'_x']) + + # Remove the two attribute columns, keeping the combined + # column, and assign the resulting DataFrame to inventory: + inventory = mergedInv.drop(columns=[attrname+'_x', + attrname+'_y'], + errors='ignore') + return inventory def parse_attribute_input(attrIn:list,attrEnabled:list)->list: """ @@ -476,7 +490,7 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: ['occupancy','constype','repaircost','roofcover']] # Download the images required for the requested attributes: - if 'roofshape' or 'roofcover' in attributes_process: + if 'roofshape' in attributes_process or 'roofcover' in attributes_process: image_handler.GetGoogleSatelliteImage(footprints) imsat = [im for im in image_handler.satellite_images if im is not None] self.inventory['satellite_images'] = image_handler.satellite_images @@ -488,21 +502,30 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: imstreet = [im for im in image_handler.street_images if im is not None] self.inventory['street_images'] = image_handler.street_images + # Get the dictionary that maps between BRAILS and R2D attribute names: + brails2r2dmap = BRAILStoR2D_BldgAttrMap() + # Run the obtained images through BRAILS computer vision models: for attribute in attributes_process: + # Define the column name: + colname = brails2r2dmap[attribute] + if attribute=='chimney': # Initialize the chimney detector object: chimneyModel = ChimneyDetector() # Call the chimney detector to determine the existence of chimneys: chimneyModel.predict(imstreet) - + # Write the results to the inventory DataFrame: - self.inventory = write_to_dataframe(self.inventory, - [chimneyModel.system_dict['infer']['images'], - chimneyModel.system_dict['infer']['predictions']], - 'chimneyExists') - self.inventory['chimneyExists'] = self.inventory['chimneyExists'].astype(dtype="boolean") + predResults = pd.DataFrame(list(zip(chimneyModel.system_dict['infer']['images'], + chimneyModel.system_dict['infer']['predictions'])), + columns=['street_images', colname]) + self.inventory = merge_results2inventory(self.inventory, + predResults, + colname) + self.inventory[colname] = self.inventory[colname].astype( + dtype="boolean") elif attribute=='erabuilt': # Initialize the era of construction classifier: @@ -511,10 +534,15 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: # Call the classifier to determine the era of construction for # each building: yearModel.predict(imstreet) - + # Write the results to the inventory DataFrame: - self.inventory = write_to_dataframe(self.inventory,yearModel.results_df,'YearBuilt') - self.inventory['YearBuilt'] = self.inventory['YearBuilt'].fillna('N/A') + predResults = yearModel.results_df.copy(deep=True) + predResults = predResults.rename(columns={'prediction': colname, + 'image':'street_images'}).drop( + columns=['probability']) + self.inventory = merge_results2inventory(self.inventory, + predResults, + colname) elif attribute=='garage': # Initialize the garage detector object: @@ -524,11 +552,14 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: garageModel.predict(imstreet) # Write the results to the inventory DataFrame: - self.inventory = write_to_dataframe(self.inventory, - [garageModel.system_dict['infer']['images'], - garageModel.system_dict['infer']['predictions']], - 'garageExists') - self.inventory['garageExists'] = self.inventory['garageExists'].astype(dtype="boolean") + predResults = pd.DataFrame(list(zip(garageModel.system_dict['infer']['images'], + garageModel.system_dict['infer']['predictions'])), + columns=['street_images', colname]) + self.inventory = merge_results2inventory(self.inventory, + predResults, + colname) + self.inventory[colname] = self.inventory[colname].astype( + dtype="boolean") elif attribute=='numstories': # Initialize the floor detector object: @@ -539,11 +570,14 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: storyModel.predict(imstreet) # Write the results to the inventory DataFrame: - self.inventory = write_to_dataframe(self.inventory, - [storyModel.system_dict['infer']['images'], - storyModel.system_dict['infer']['predictions']], - 'NumberOfStories') - self.inventory['NumberOfStories'] = self.inventory['NumberOfStories'].astype(dtype='Int64') + predResults = pd.DataFrame(list(zip(storyModel.system_dict['infer']['images'], + storyModel.system_dict['infer']['predictions'])), + columns=['street_images', colname]) + self.inventory = merge_results2inventory(self.inventory, + predResults, + colname) + self.inventory[colname] = self.inventory[colname].astype( + dtype='Int64') elif attribute=='occupancy': # Initialize the occupancy classifier object: @@ -553,12 +587,12 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: # class of each building: occupancyModel.predict(imstreet) - # Write the results to the inventory DataFrame: - occupancy = [[im for (im,_) in occupancyModel.preds], - [pred for (_,pred) in occupancyModel.preds]] - self.inventory = write_to_dataframe(self.inventory,occupancy, - 'OccupancyClass') - self.inventory['OccupancyClass'] = self.inventory['OccupancyClass'].fillna('N/A') + # Write the prediction results to a DataFrame: + predResults = pd.DataFrame(occupancyModel.preds, columns= + ['street_images', colname]) + self.inventory = merge_results2inventory(self.inventory, + predResults, + colname) elif attribute=='roofcover': # Initialize the roof cover classifier object: @@ -567,12 +601,14 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: # Call the roof cover classifier to classify roof cover type of # each building: roofCoverModel.predict(imsat) - - # Write the results to the inventory DataFrame: - self.inventory = write_to_dataframe(self.inventory, - roofCoverModel.system_dict['infer']['predictions'], - 'roofCover', - 'satellite_images') + + # Write the prediction results to a DataFrame: + predResults = pd.DataFrame(roofCoverModel.preds, columns= + ['satellite_images', colname]) + self.inventory = merge_results2inventory(self.inventory, + predResults, + colname, + 'satellite_images') elif attribute=='roofshape': # Initialize the roof type classifier object: @@ -582,12 +618,13 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: # each building: roofModel.predict(imsat) - # Write the results to the inventory DataFrame: - roofShape = [[im for (im,_) in roofModel.preds], - [pred for (_,pred) in roofModel.preds]] - self.inventory = write_to_dataframe(self.inventory,roofShape, - 'roofshape', - 'satellite_images') + # Write the prediction results to a DataFrame: + predResults = pd.DataFrame(roofModel.preds, columns= + ['satellite_images', colname]) + self.inventory = merge_results2inventory(self.inventory, + predResults, + colname, + 'satellite_images') elif attribute in ['buildingheight','roofeaveheight','roofpitch','winarea']: if 'facadeParserModel' not in locals(): @@ -597,22 +634,20 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: # Call the facade parser to determine the requested # attribute for each building: facadeParserModel.predict(image_handler) - - self.inventory = write_to_dataframe(self.inventory, - [facadeParserModel.predictions['image'].to_list(), - facadeParserModel.predictions[attribute].to_list()], - attribute) - - elif attribute=='constype': - self.inventory['StructureType'] = ['W1' for ind in range(len(self.inventory.index))] - - # Bring the attribute values to the desired length unit: - if self.lengthUnit.lower()=='m': - self.inventory['PlanArea'] = self.inventory['PlanArea'].apply(lambda x: x*0.0929) - if 'buildingheight' in attributes_process: - self.inventory['buildingheight'] = self.inventory['buildingheight'].apply(lambda x: x*0.3048) - if 'roofeaveheight' in attributes_process: - self.inventory['roofeaveheight'] = self.inventory['roofeaveheight'].apply(lambda x: x*0.3048) + + # Get the relevant subset of the prediction results: + predResults = facadeParserModel.predictions[['image',attribute]] + + # Bring the results to the desired length unit: + if self.lengthUnit.lower()=='m' and (attribute in ['buildingheight','roofeaveheight']): + predResults[colname] = predResults[colname]*0.3048 + predResults = predResults.rename(columns={'image': 'street_images', + attribute: colname}) + + # Write the results to the inventory DataFrame: + self.inventory = merge_results2inventory(self.inventory, + predResults, + colname) # Write the genereated inventory in outFile: if outputFile: From 324d1107465f623c82768da62c63d9163c960404 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 5 Apr 2024 11:44:55 -0700 Subject: [PATCH 54/63] Reformatted print statements for consistent InventoryGenerator outputs --- brails/InventoryGenerator.py | 7 ++++--- brails/workflow/NSIParser.py | 5 ++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/brails/InventoryGenerator.py b/brails/InventoryGenerator.py index a0ca73a..658fdd9 100644 --- a/brails/InventoryGenerator.py +++ b/brails/InventoryGenerator.py @@ -469,14 +469,14 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: # pull nbldgs at random with an arbitrary seed: if randomSelection<0: randomSelection = random.randint(0,1e8) - print(f'Randomly selected {nbldgs} buildings') + print(f'\nRandomly selected {nbldgs} buildings\n') else: - print(f'Randomly selected {nbldgs} buildings using the seed {randomSelection}') + print(f'\nRandomly selected {nbldgs} buildings using the seed {randomSelection}\n') self.inventory = self.baselineInventory.sample(n=nbldgs, random_state=randomSelection) self.randomSelection = randomSelection else: - print(f'Selected all {len(self.baselineInventory.index)} buildings') + print(f'\nSelected all {len(self.baselineInventory.index)} buildings\n') self.inventory = self.baselineInventory.copy(deep=True) # Parse/correct the list of user requested building attributes: @@ -501,6 +501,7 @@ def parse_attribute_input(attrIn:list,attrEnabled:list)->list: image_handler.GetGoogleStreetImage(footprints) imstreet = [im for im in image_handler.street_images if im is not None] self.inventory['street_images'] = image_handler.street_images + print('') # Get the dictionary that maps between BRAILS and R2D attribute names: brails2r2dmap = BRAILStoR2D_BldgAttrMap() diff --git a/brails/workflow/NSIParser.py b/brails/workflow/NSIParser.py index adfffaa..684152c 100644 --- a/brails/workflow/NSIParser.py +++ b/brails/workflow/NSIParser.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 03-28-2024 +# 04-05-2024 import requests import pandas as pd @@ -319,8 +319,7 @@ def occ_parser(occtype): fpHandler = FootprintHandler() fpHandler._FootprintHandler__write_fp2geojson(footprints,attributes,outfile) outfile = outfile.split('.')[0] + '.geojson' - - print(f'\nFinal inventory data available in {os.getcwd()}/{outfile}') + print(f'\nFinal inventory data available in {os.getcwd()}/{outfile}') # Reformat the class variables to make them compatible with the # InventoryGenerator: From 247e7fe424de71ced358dabd5c5eb1abc6340dbd Mon Sep 17 00:00:00 2001 From: Barbaros Cetiner Date: Thu, 11 Apr 2024 10:33:57 -0700 Subject: [PATCH 55/63] Added Zenodo and PyPi badges --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 93f0b84..4d278ae 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,6 @@ - +[![DOI](https://zenodo.org/badge/184673734.svg)](https://zenodo.org/badge/latestdoi/184673734) +[![PyPi version](https://badgen.net/pypi/v/BRAILS/)](https://pypi.org/project/BRAILS/) +[![PyPI download month](https://img.shields.io/pypi/dm/BRAILS.svg)](https://pypi.python.org/pypi/BRAILS/) ## What is BRAILS? From 68146585a3b1f04fdb8cdfd19f074865944be60d Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 19 Apr 2024 17:02:22 -0700 Subject: [PATCH 56/63] Cleaned up code formatting --- brails/EnabledAttributes.py | 14 +++++++------- brails/workflow/NSIParser.py | 17 +++++++++-------- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/brails/EnabledAttributes.py b/brails/EnabledAttributes.py index 73d5b7d..926d52d 100755 --- a/brails/EnabledAttributes.py +++ b/brails/EnabledAttributes.py @@ -37,21 +37,21 @@ # Barbaros Cetiner # # Last updated: -# 03-31-2024 +# 04-19-2024 -def BldgAttributes()->list: +def BldgAttributes() -> list: attr = ['buildingheight','chimney','constype','erabuilt','garage', 'numstories','occupancy','repaircost','roofcover','roofeaveheight', 'roofshape','roofpitch','winarea'] return attr -def BRAILStoR2D_BldgAttrMap()->dict: +def BRAILStoR2D_BldgAttrMap() -> dict: r2dHeaders = ['BuildingHeight','ChimneyExists','StructureType', - 'YearBuilt','GarageExists','NumberOfStories', - 'OccupancyClass','ReplacementCost','RoofCoverType', - 'RoofEaveHeight','RoofShape','RoofPitch','WindowAreaRatio'] + 'YearBuilt','GarageExists','NumberOfStories', + 'OccupancyClass','ReplacementCost','RoofCoverType', + 'RoofEaveHeight','RoofShape','RoofPitch','WindowAreaRatio'] brailsAttributes = BldgAttributes() brails2r2dmap = {} for ind,attr in enumerate(brailsAttributes): brails2r2dmap[attr] = r2dHeaders[ind] - return brails2r2dmap + return brails2r2dmap \ No newline at end of file diff --git a/brails/workflow/NSIParser.py b/brails/workflow/NSIParser.py index 684152c..293ccc2 100644 --- a/brails/workflow/NSIParser.py +++ b/brails/workflow/NSIParser.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 04-05-2024 +# 04-19-2024 import requests import pandas as pd @@ -65,7 +65,7 @@ def __init__(self): 'val_struct':'repaircost','bldgtype':'constype', 'occtype':'occupancy'} - def __get_bbox(self,footprints:list)->tuple: + def __get_bbox(self, footprints:list) -> tuple: """ Method that determines the extent of the area covered by the footprints as a tight-fit rectangular bounding box @@ -93,7 +93,7 @@ def __get_bbox(self,footprints:list)->tuple: minlon = vert[0] return (minlat,minlon,maxlat,maxlon) - def __get_nbi_data(self,bbox:tuple)->dict: + def __get_nbi_data(self, bbox:tuple) -> dict: """ Method that gets the NBI data for a bounding box entry @@ -136,7 +136,7 @@ def __get_nbi_data(self,bbox:tuple)->dict: datadict[pt] = data['properties'] return datadict - def GetRawDataROI(self,roi,outfile:str)->None: + def GetRawDataROI(self, roi, outfile:str) -> None: """ Method that reads NSI buildings data finds the points within a bounding polygon or matches a set of footprints, then writes the @@ -212,7 +212,7 @@ def GetRawDataROI(self,roi,outfile:str)->None: with open(outfile, 'w') as outputFile: json.dump(geojson, outputFile, indent=2) - def GetNSIData(self,footprints:list,lengthUnit:str='ft',outfile:str=''): + def GetNSIData(self, footprints:list, lengthUnit:str='ft', outfile:str=''): """ Method that reads NSI buildings points and matches the data to a set of footprints and writes the data in a BRAILS-compatible format @@ -230,7 +230,7 @@ def GetNSIData(self,footprints:list,lengthUnit:str='ft',outfile:str=''): type, 8)occupancy class, 9)footprint polygon """ - def get_attr_from_datadict(datadict,footprints,nsi2brailsmap): + def get_attr_from_datadict(datadict, footprints, nsi2brailsmap): # Parsers for building and occupancy types: def bldgtype_parser(bldgtype): bldgtype = bldgtype + '1' @@ -266,8 +266,9 @@ def occ_parser(occtype): ptres = datadict[pt] attributes['fp'].append(fp) attributes['fp_json'].append(('{"type":"Feature","geometry":' + - '{"type":"Polygon","coordinates":[' + - f"""{fp}""" + ']},"properties":{}}')) + '{"type":"Polygon","coordinates":[' + + f"""{fp}""" + + ']},"properties":{}}')) for key in nsikeys: if key=='bldgtype': attributes[nsi2brailsmap[key]].append(bldgtype_parser(ptres[key])) From cb9024b193af9b745d69265c8f3322990221809d Mon Sep 17 00:00:00 2001 From: bacetiner Date: Fri, 19 Apr 2024 17:30:14 -0700 Subject: [PATCH 57/63] Modularized the code --- .../workflow/TransportationElementHandler.py | 139 ++++++------------ 1 file changed, 49 insertions(+), 90 deletions(-) diff --git a/brails/workflow/TransportationElementHandler.py b/brails/workflow/TransportationElementHandler.py index d9e14d3..b2882a6 100644 --- a/brails/workflow/TransportationElementHandler.py +++ b/brails/workflow/TransportationElementHandler.py @@ -37,98 +37,23 @@ # Barbaros Cetiner # # Last updated: -# 01-26-2024 +# 04-19-2024 import requests import copy import json -import sys -from shapely.geometry import Polygon, LineString, Point, box +from shapely.geometry import LineString, Point from shapely import wkt -from itertools import groupby from requests.adapters import HTTPAdapter, Retry +from brails.workflow.FootprintHandler import FootprintHandler class TransportationElementHandler: def __init__(self): - self.queryarea = [] + self.queryarea = '' self.output_files = {'roads':'Roads.geojson'} def fetch_transportation_elements(self,queryarea): - def fetch_roi(queryarea): - if isinstance(queryarea,str): - # Search for the query area using Nominatim API: - print(f"\nSearching for {queryarea}...") - queryarea = queryarea.replace(" ", "+").replace(',','+') - - queryarea_formatted = "" - for i, j in groupby(queryarea): - if i=='+': - queryarea_formatted += i - else: - queryarea_formatted += ''.join(list(j)) - - nominatimquery = ('https://nominatim.openstreetmap.org/search?' + - f"q={queryarea_formatted}&format=jsonv2") - r = requests.get(nominatimquery) - datalist = r.json() - - areafound = False - for data in datalist: - queryarea_osmid = data['osm_id'] - queryarea_name = data['display_name'] - if(data['osm_type']=='relation' and - 'university' in queryarea.lower() and - data['type']=='university'): - areafound = True - break - elif (data['osm_type']=='relation' and - data['type']=='administrative'): - areafound = True - break - - if areafound==True: - print(f"Found {queryarea_name}") - else: - sys.exit(f"Could not locate an area named {queryarea}. " + - 'Please check your location query to make sure' + - 'it was entered correctly.') - - queryarea_printname = queryarea_name.split(",")[0] - - print(f"\nFetching the transportation network for {queryarea_printname}...") - url = 'http://overpass-api.de/api/interpreter' - - if isinstance(queryarea,str): - query = f""" - [out:json][timeout:5000]; - rel({queryarea_osmid}); - out geom; - """ - r = requests.get(url, params={'data': query}) - - datastruct = r.json()['elements'][0] - - boundarypoly = [] - if datastruct['tags']['type']=='boundary': - for coorddict in datastruct['members']: - if coorddict['role']=='outer': - for coord in coorddict['geometry']: - boundarypoly.append([coord['lon'],coord['lat']]) - - bpoly = Polygon(boundarypoly) - - elif isinstance(queryarea,tuple): - bpoly = box(queryarea[0],queryarea[1],queryarea[2],queryarea[3]) - print("\nFetching the transportation network for the bounding box: " + - f"[{queryarea[0]}, {queryarea[1]}, {queryarea[2]},{queryarea[3]}]...") - else: - sys.exit('Incorrect location entry. The location entry must be defined' + - ' as a string or a list of strings containing the area name(s)' + - ' or a tuple containing the longitude and latitude pairs for' + - ' the bounding box of the area of interest.') - return bpoly - def query_generator(bpoly,eltype): bbox = bpoly.bounds eltype = eltype.lower() @@ -177,11 +102,32 @@ def query_generator(bpoly,eltype): else: raise NotImplementedError('Element type not implemented') return query - + + def get_el_counts(bpoly, eltype: str) -> int: + # Define a retry stratey for common error codes to use when + # downloading data: + s = requests.Session() + retries = Retry(total=10, + backoff_factor=0.1, + status_forcelist=[500, 502, 503, 504]) + s.mount('https://', HTTPAdapter(max_retries=retries)) + + # Create the query required to get the element counts: + query = query_generator(bpoly,eltype) + query.replace('outSR=4326','returnCountOnly=true') + + # Download element count using the defined retry strategy: + r = s.get(query) + elcount = r.json()['count'] + + return elcount + + def conv2geojson(datalist,eltype,bpoly): eltype = eltype.lower() geojson = {'type':'FeatureCollection', - "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } }, + "crs": {"type": "name", "properties": + {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, 'features':[]} for item in datalist: if eltype in ['bridge','tunnel']: @@ -277,7 +223,8 @@ def lstring2xylist(lstring): def combine_write_roadjsons(roadjsons,bpoly): roadjsons_combined = {'type':'FeatureCollection', - "crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } }, + "crs": {"type": "name", "properties": + {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, 'features':[]} rtype = {'primary_road':'P','secondary_road':'S','local_road':'L'} for key, roadjson in roadjsons.items(): @@ -300,9 +247,11 @@ def combine_write_roadjsons(roadjsons,bpoly): nedges = len(inds) for i in range(nedges): if i+1!=nedges: - edge = wkt.loads('LINESTRING ' + mlstring_wkt[inds[i]:inds[i+1]-2]) + edge = wkt.loads('LINESTRING ' + + mlstring_wkt[inds[i]:inds[i+1]-2]) else: - edge = wkt.loads('LINESTRING ' + mlstring_wkt[inds[i]:-1]) + edge = wkt.loads('LINESTRING ' + + mlstring_wkt[inds[i]:-1]) coordsout = lstring2xylist(edge) newitem = copy.deepcopy(item) newitem['geometry']['coordinates'] = [coordsout] @@ -323,14 +272,24 @@ def mergeDataList(dataLists): ("This may be solved by running Brails again")) left['attributes'].update(right['attributes']) return merged - - + self.queryarea = queryarea - bpoly = fetch_roi(self.queryarea) - - eltypes = ['bridge','tunnel','railroad','primary_road','secondary_road','local_road'] + + # Initialize FootprintHandler: + fpHandler = FootprintHandler() + + # Run FootprintHandler to get the boundary for the entered location: + if isinstance(queryarea,tuple): + bpoly,_ = fpHandler._FootprintHandler__bbox2poly(queryarea) + else: + bpoly,_,_ = fpHandler._FootprintHandler__fetch_roi(queryarea) - roadjsons = {'primary_road':[],'secondary_road':[],'local_road':[]} + # Define supported element types: + eltypes = ['bridge', 'tunnel', 'railroad', 'primary_road', + 'secondary_road', 'local_road'] + roadjsons = {'primary_road':[], 'secondary_road':[], 'local_road':[]} + + # Write the GeoJSON output for each element: for eltype in eltypes: if '_road' not in eltype: jsonout = write2geojson(bpoly,eltype) From 2baaa31a9d72cb7eab1f85e4334bcff3be37c5ca Mon Sep 17 00:00:00 2001 From: bacetiner Date: Tue, 30 Apr 2024 07:00:03 -0700 Subject: [PATCH 58/63] Updated NSIParser and FootprintHandler for better R2D compatibility --- brails/workflow/FootprintHandler.py | 35 +++++++++++++++------ brails/workflow/NSIParser.py | 48 ++++++++++++++++------------- 2 files changed, 52 insertions(+), 31 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 9e2a374..8ebb7e9 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 04-01-2024 +# 04-30-2024 import math import json @@ -55,6 +55,7 @@ from requests.adapters import HTTPAdapter, Retry import unicodedata import warnings +from brails.EnabledAttributes import BRAILStoR2D_BldgAttrMap # Set a custom warning message format: warnings.formatwarning = lambda message, category, filename, lineno, line=None: \ @@ -175,20 +176,29 @@ def __bbox2poly(self,queryarea:tuple,outfile:str=''): write_polygon2geojson(bpoly,outfile) return bpoly, queryarea_printname - def __write_fp2geojson(self,footprints:list,attributes:dict,outputFilename:str): + def __write_fp2geojson(self,footprints:list,attributes:dict, + outputFilename:str,convertKeys:bool=False): + attrmap = BRAILStoR2D_BldgAttrMap(); attrmap['lat'] = 'Latitude'; + attrmap['lon'] = 'Longitude'; attrmap['fparea'] = 'PlanArea' attrkeys = list(attributes.keys()) geojson = {'type':'FeatureCollection', "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, 'features':[]} for ind,fp in enumerate(footprints): - feature = {'type':'Feature', + feature = {'id': str(ind), + 'type':'Feature', 'properties':{}, 'geometry':{'type':'Polygon', 'coordinates':[]}} feature['geometry']['coordinates'] = [fp] for key in attrkeys: attr = attributes[key][ind] - feature['properties'][key] = 'NA' if attr is None else attr + if convertKeys: + keyout = attrmap[key] + else: + keyout = key + feature['properties'][keyout] = 'NA' if attr is None else attr + feature['properties']['type'] = 'Building' geojson['features'].append(feature) with open(outputFilename, 'w') as outputFile: @@ -685,13 +695,18 @@ def download_ustruct_bldgattr4region(cellsFinal,bpoly): bpoly, queryarea_printname = self.__bbox2poly(queryarea) elif isinstance(queryarea,str): bpoly, queryarea_printname, _ = self.__fetch_roi(queryarea) - - plotCells = True - + + ####################### For debugging only ####################### + #plotCells = True + plotCells = False if plotCells: meshInitialfout = queryarea_printname.replace(' ','_') + '_Mesh_Initial.png' meshFinalfout = queryarea_printname.replace(' ','_') + '_Mesh_Final.png' - + else: + meshInitialfout = False + meshFinalfout = False + ################################################################## + print('\nMeshing the defined area...') cellsPrem = get_polygon_cells(bpoly, plotfout=meshInitialfout) @@ -705,9 +720,11 @@ def download_ustruct_bldgattr4region(cellsFinal,bpoly): else: cellsFinal = cellsPrem.copy() print(f'\nMeshing complete. Covered {queryarea_printname} with a rectangular cell') - + + ####################### For debugging only ####################### if plotCells: plot_polygon_cells(bpoly, cellsFinal, meshFinalfout) + ################################################################# footprints, attributes = download_ustruct_bldgattr4region(cellsFinal,bpoly) print(f"\nFound a total of {len(footprints)} building footprints in {queryarea_printname}") diff --git a/brails/workflow/NSIParser.py b/brails/workflow/NSIParser.py index 293ccc2..2a82d87 100644 --- a/brails/workflow/NSIParser.py +++ b/brails/workflow/NSIParser.py @@ -37,7 +37,7 @@ # Barbaros Cetiner # # Last updated: -# 04-19-2024 +# 04-30-2024 import requests import pandas as pd @@ -191,26 +191,27 @@ def GetRawDataROI(self, roi, outfile:str) -> None: print(f'Found a total of {len(pointsKeep)} building points in' ' NSI that are within the entered region of interest') - # Write the extracted NSI data in a GeoJSON file: - attrkeys = list(datadict[pointsKeep[0]].keys()) - geojson = {'type':'FeatureCollection', - "crs": {"type": "name", "properties": - {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, - 'features':[]} - for pt in pointsKeep: - feature = {'type':'Feature', - 'properties':{}, - 'geometry':{'type':'Point', - 'coordinates':[]}} - feature['geometry']['coordinates'] = [pt.x,pt.y] - attributes = datadict[pt] - for key in attrkeys: - attr = attributes[key] - feature['properties'][key] = 'NA' if attr is None else attr - geojson['features'].append(feature) - - with open(outfile, 'w') as outputFile: - json.dump(geojson, outputFile, indent=2) + if len(pointsKeep)!=0: + # Write the extracted NSI data in a GeoJSON file: + attrkeys = list(datadict[pointsKeep[0]].keys()) + geojson = {'type':'FeatureCollection', + "crs": {"type": "name", "properties": + {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, + 'features':[]} + for pt in pointsKeep: + feature = {'type':'Feature', + 'properties':{}, + 'geometry':{'type':'Point', + 'coordinates':[]}} + feature['geometry']['coordinates'] = [pt.x,pt.y] + attributes = datadict[pt] + for key in attrkeys: + attr = attributes[key] + feature['properties'][key] = 'NA' if attr is None else attr + geojson['features'].append(feature) + + with open(outfile, 'w') as outputFile: + json.dump(geojson, outputFile, indent=2) def GetNSIData(self, footprints:list, lengthUnit:str='ft', outfile:str=''): """ @@ -318,7 +319,10 @@ def occ_parser(occtype): del attributes['fp_json'] del attributes['fp'] fpHandler = FootprintHandler() - fpHandler._FootprintHandler__write_fp2geojson(footprints,attributes,outfile) + fpHandler._FootprintHandler__write_fp2geojson(footprints, + attributes, + outfile, + convertKeys=True) outfile = outfile.split('.')[0] + '.geojson' print(f'\nFinal inventory data available in {os.getcwd()}/{outfile}') From 0626354c39104d19aa8ce8f46f6bb83481dd7704 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Tue, 30 Apr 2024 07:04:01 -0700 Subject: [PATCH 59/63] Bumping version number for minor release --- brails/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/brails/_version.py b/brails/_version.py index 726691b..f71b21a 100755 --- a/brails/_version.py +++ b/brails/_version.py @@ -1 +1 @@ -__version__ = '3.1.1' +__version__ = '3.1.2' From 9565ae9ff7a87b0b6405c5f16f673398cabc9c5b Mon Sep 17 00:00:00 2001 From: bacetiner Date: Tue, 30 Apr 2024 07:09:55 -0700 Subject: [PATCH 60/63] Updated BRAILS version number in the documentation --- docs/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index de27b11..04cb120 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,7 +13,7 @@ # The short X.Y version #version = '1.0' # The full version, including alpha/beta/rc tags -release = '3.1.1' +release = '3.1.2' rst_prolog = """ .. |app| replace:: BRAILS @@ -28,7 +28,7 @@ .. |short tool id| replace:: BRAILS .. |tool github link| replace:: `BRAILS Github page`_ .. _brails Github page: https://github.com/NHERI-SimCenter/BRAILS -.. |tool version| replace:: 3.1.1 +.. |tool version| replace:: 3.1.2 .. |SimCenter| replace:: `SimCenter`_ .. _SimCenter: https://simcenter.designsafe-ci.org/ From 3eba2e93795bdd1fd4169232842574a409aa48d0 Mon Sep 17 00:00:00 2001 From: bacetiner Date: Tue, 30 Apr 2024 12:14:27 -0700 Subject: [PATCH 61/63] Added better server error handling for ROI searching --- brails/workflow/FootprintHandler.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/brails/workflow/FootprintHandler.py b/brails/workflow/FootprintHandler.py index 8ebb7e9..a5ac82f 100644 --- a/brails/workflow/FootprintHandler.py +++ b/brails/workflow/FootprintHandler.py @@ -91,13 +91,19 @@ def __fetch_roi(self,queryarea:str,outfile:str=''): r = requests.get(nominatimquery, headers=headers) datalist = r.json() - areafound = False - for data in datalist: - queryarea_osmid = data['osm_id'] - queryarea_name = data['display_name'] - if data['osm_type']=='relation': - areafound = True - break + if r.status_code==200: + areafound = False + for data in datalist: + queryarea_osmid = data['osm_id'] + queryarea_name = data['display_name'] + if data['osm_type']=='relation': + areafound = True + break + else: + sys.exit('Please try again. Server performing region search ' + "returned the following error message: {datalist['title']}. " + + ' This is a server-side error that does not require a' + + ' change to BRAILS inputs.') if areafound==True: try: From 0e31a3467839f45b1175abc589b371333d1d038d Mon Sep 17 00:00:00 2001 From: bacetiner Date: Tue, 30 Apr 2024 15:23:04 -0700 Subject: [PATCH 62/63] Added better error handling --- brails/workflow/NSIParser.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/brails/workflow/NSIParser.py b/brails/workflow/NSIParser.py index 2a82d87..e51708c 100644 --- a/brails/workflow/NSIParser.py +++ b/brails/workflow/NSIParser.py @@ -154,10 +154,13 @@ def GetRawDataROI(self, roi, outfile:str) -> None: # If ROI is defined as a list of lists of footprint coordinates: if type(roi) is list: - # Determine the coordinates of the bounding box including the footprints: - bbox = self.__get_bbox(roi) - footprints = roi.copy() - bpoly = None + if len(roi)>0: + # Determine the coordinates of the bounding box including the footprints: + bbox = self.__get_bbox(roi) + footprints = roi.copy() + bpoly = None + else: + sys.exit('No building footprint detected. Please try again for a larger region of interest') else: try: roitype = roi.geom_type @@ -284,7 +287,11 @@ def occ_parser(occtype): return attributes # Determine the coordinates of the bounding box including the footprints: - bbox = self.__get_bbox(footprints) + if len(footprints)>0: + bbox = self.__get_bbox(footprints) + else: + sys.exit('No building footprint detected. Please provide valid ' + + 'footprint data to run this method.') # Get the NBI data for computed bounding box: datadict = self.__get_nbi_data(bbox) From f63c0948dea965444af2a38f623584629264322a Mon Sep 17 00:00:00 2001 From: bacetiner Date: Tue, 21 May 2024 19:50:51 -0700 Subject: [PATCH 63/63] Added tiling functionality for by-passing API max feature restrictions --- .../workflow/TransportationElementHandler.py | 122 +++++++++++++----- 1 file changed, 89 insertions(+), 33 deletions(-) diff --git a/brails/workflow/TransportationElementHandler.py b/brails/workflow/TransportationElementHandler.py index b2882a6..4454a2c 100644 --- a/brails/workflow/TransportationElementHandler.py +++ b/brails/workflow/TransportationElementHandler.py @@ -42,7 +42,7 @@ import requests import copy import json -from shapely.geometry import LineString, Point +from shapely.geometry import Polygon, LineString, Point, box from shapely import wkt from requests.adapters import HTTPAdapter, Retry from brails.workflow.FootprintHandler import FootprintHandler @@ -52,11 +52,16 @@ def __init__(self): self.queryarea = '' self.output_files = {'roads':'Roads.geojson'} - def fetch_transportation_elements(self,queryarea): + def fetch_transportation_elements(self, queryarea:str): - def query_generator(bpoly,eltype): + def query_generator(bpoly: Polygon, eltype:str) -> str: + # Get the bounds of the entered bounding polygon and lowercase the + # entered element type: bbox = bpoly.bounds eltype = eltype.lower() + + # If element type is bridge, generate an NBI query for the bounds + # of bpoly: if eltype=='bridge': query = ('https://geo.dot.gov/server/rest/services/Hosted/' + 'National_Bridge_Inventory_DS/FeatureServer/0/query?'+ @@ -64,6 +69,9 @@ def query_generator(bpoly,eltype): f"&geometry={bbox[0]}%2C{bbox[1]}%2C{bbox[2]}%2C{bbox[3]}" + '&geometryType=esriGeometryEnvelope&inSR=4326' + '&spatialRel=esriSpatialRelIntersects&outSR=4326&f=json') + + # If element type is tunnel, generate an NTI query for the bounds + # of bpoly: elif eltype=='tunnel': query = ('https://geo.dot.gov/server/rest/services/Hosted/' + 'National_Tunnel_Inventory_DS/FeatureServer/0/query?' + @@ -71,6 +79,9 @@ def query_generator(bpoly,eltype): f"&geometry={bbox[0]}%2C{bbox[1]}%2C{bbox[2]}%2C{bbox[3]}" + '&geometryType=esriGeometryEnvelope&inSR=4326' + '&spatialRel=esriSpatialRelIntersects&outSR=4326&f=json') + + # If element type is railroad, generate an NARNL query for the + # bounds of bpoly: elif eltype=='railroad': query = ('https://geo.dot.gov/server/rest/services/Hosted/' + 'North_American_Rail_Network_Lines_DS/FeatureServer/0/query?' + @@ -78,6 +89,9 @@ def query_generator(bpoly,eltype): f"&geometry={bbox[0]}%2C{bbox[1]}%2C{bbox[2]}%2C{bbox[3]}" + '&geometryType=esriGeometryEnvelope&inSR=4326' + '&spatialRel=esriSpatialRelIntersects&outSR=4326&f=json') + + # If element type is primary_road, generate a TIGER query for the + # bounds of bpoly: elif eltype=='primary_road': query = ('https://tigerweb.geo.census.gov/arcgis/rest/services/' + 'TIGERweb/Transportation/MapServer/2/query?where=&text='+ @@ -85,6 +99,9 @@ def query_generator(bpoly,eltype): f"&geometry={bbox[0]}%2C{bbox[1]}%2C{bbox[2]}%2C{bbox[3]}" + '&geometryType=esriGeometryEnvelope&inSR=4326' + '&spatialRel=esriSpatialRelIntersects&outSR=4326&f=json') + + # If element type is secondary_road, generate a TIGER query for the + # bounds of bpoly: elif eltype=='secondary_road': query = ('https://tigerweb.geo.census.gov/arcgis/rest/services/' + 'TIGERweb/Transportation/MapServer/6/query?where=&text='+ @@ -92,6 +109,9 @@ def query_generator(bpoly,eltype): f"&geometry={bbox[0]}%2C{bbox[1]}%2C{bbox[2]}%2C{bbox[3]}" + '&geometryType=esriGeometryEnvelope&inSR=4326' + '&spatialRel=esriSpatialRelIntersects&outSR=4326&f=json') + + # If element type is local_road, generate a TIGER query for the + # bounds of bpoly: elif eltype=='local_road': query = ('https://tigerweb.geo.census.gov/arcgis/rest/services/' + 'TIGERweb/Transportation/MapServer/8/query?where=&text='+ @@ -99,11 +119,13 @@ def query_generator(bpoly,eltype): f"&geometry={bbox[0]}%2C{bbox[1]}%2C{bbox[2]}%2C{bbox[3]}" + '&geometryType=esriGeometryEnvelope&inSR=4326' + '&spatialRel=esriSpatialRelIntersects&outSR=4326&f=json') + + # Otherwise, raise a NotImplementedError: else: raise NotImplementedError('Element type not implemented') return query - def get_el_counts(bpoly, eltype: str) -> int: + def create_pooling_session(): # Define a retry stratey for common error codes to use when # downloading data: s = requests.Session() @@ -112,24 +134,54 @@ def get_el_counts(bpoly, eltype: str) -> int: status_forcelist=[500, 502, 503, 504]) s.mount('https://', HTTPAdapter(max_retries=retries)) + return s + + def get_el_counts(bpoly:Polygon, eltype: str) -> int: + # Create a persistent requests session: + s = create_pooling_session() + # Create the query required to get the element counts: query = query_generator(bpoly,eltype) - query.replace('outSR=4326','returnCountOnly=true') + query = query.replace('outSR=4326','returnCountOnly=true') - # Download element count using the defined retry strategy: + # Download the element count for the bounding polygon using the + # defined retry strategy: r = s.get(query) elcount = r.json()['count'] - return elcount + return elcount + + def get_max_el_count(eltype: str) -> int: + # Create a persistent requests session: + s = create_pooling_session() + + # Create the query required to get the element counts: + query = query_generator(box(-1,1,-1,1),eltype) + query = query.split('/query?') + query = query[0] + '?f=pjson' + # Download the maximum element count for the bounding polygon using + # the defined retry strategy: + r = s.get(query) + maxelcount = r.json()['maxRecordCount'] + + return maxelcount - def conv2geojson(datalist,eltype,bpoly): + def list2geojson(datalist:list, eltype:str, bpoly:Polygon) -> dict: + # Lowercase the entered element type string: eltype = eltype.lower() + + # Populate the geojson header: geojson = {'type':'FeatureCollection', "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, 'features':[]} + + # Scroll through each item in the datalist: for item in datalist: + # If the element is a bridge or tunnel, parse its geometry as a + # point and check if the extracted point is within the bounding + # polygon: if eltype in ['bridge','tunnel']: geometry = [item['geometry']['x'],item['geometry']['y']] if bpoly.contains(Point(geometry)): @@ -140,6 +192,9 @@ def conv2geojson(datalist,eltype,bpoly): else: continue else: + # If the element is a road segment, parse it as a MultiLineString + # and check if the extracted segment is within the bounding + # polygon: geometry = item['geometry']['paths'] if bpoly.intersects(LineString(geometry[0])): feature = {'type':'Feature', @@ -148,18 +203,34 @@ def conv2geojson(datalist,eltype,bpoly): 'coordinates':[]}} else: continue + + # Copy the obtained geometry in a feature dictionary: feature['geometry']['coordinates'] = geometry.copy() + + # Read item attributes properties = item['attributes'] - for prop in properties: - if prop.count('_')>1: - propname = prop[:prop.rindex('_')] + + # For each attribute: + for prop in properties.keys(): + # Clean up the property name from redundant numeric text: + if '_' in prop: + strsegs = prop.split('_') + removestr = '' + for seg in strsegs: + if any(char.isdigit() for char in seg): + removestr = '_' + seg + propname = prop.replace(removestr,'') else: propname = prop + + # Write the property in a feature dictionary: feature['properties'][propname] = properties[prop] + + # Add the feature in the geojson dictionary: geojson['features'].append(feature) return geojson - def print_el_counts(datalist,eltype): + def print_el_counts(datalist: list, eltype:str): nel = len(datalist) eltype_print = eltype.replace('_',' ') @@ -175,14 +246,11 @@ def print_el_counts(datalist,eltype): print(f'Found {nel} {eltype_print} {elntype}{suffix}') - def write2geojson(bpoly,eltype): - # Define a retry stratey for common error codes to use when - # downloading data: - s = requests.Session() - retries = Retry(total=10, - backoff_factor=0.1, - status_forcelist=[500, 502, 503, 504]) - s.mount('https://', HTTPAdapter(max_retries=retries)) + def write2geojson(bpoly:Polygon, eltype:str) -> dict: + nel = get_el_counts(bpoly,eltype) + + # Create a persistent requests session: + s = create_pooling_session() # Download element data using the defined retry strategy: query = query_generator(bpoly,eltype) @@ -197,7 +265,7 @@ def write2geojson(bpoly,eltype): datalist = r.json()['features'] # If road data convert it into GeoJSON format: - jsonout = conv2geojson(datalist,eltype,bpoly) + jsonout = list2geojson(datalist,eltype,bpoly) # If not road data convert it into GeoJSON format and write it into # a file: @@ -260,18 +328,6 @@ def combine_write_roadjsons(roadjsons,bpoly): print_el_counts(roadjsons_combined['features'],'road') with open('Roads.geojson', 'w') as output_file: json.dump(roadjsons_combined, output_file, indent=2) - - def mergeDataList(dataLists): - merged = dataLists[0] - for i in range(1,len(dataLists)): - data = dataLists[i] - for left, right in zip(merged,data): - if left['attributes']['OBJECTID'] != right['attributes']['OBJECTID']: - print(("Data downloaded from the National Bridge Inventory")+ - ("has miss matching OBJECTID between batches.")+ - ("This may be solved by running Brails again")) - left['attributes'].update(right['attributes']) - return merged self.queryarea = queryarea