Skip to content

Commit

Permalink
Merge pull request #183 from bacetiner/master
Browse files Browse the repository at this point in the history
Added new GeoJSON input and NSI interfacing capabilities planned for new R2D widget UI
  • Loading branch information
bacetiner authored Feb 1, 2024
2 parents 69fb187 + 6813edd commit dafaa05
Show file tree
Hide file tree
Showing 4 changed files with 197 additions and 90 deletions.
28 changes: 19 additions & 9 deletions brails/InventoryGenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
# Satish Rao
#
# Last updated:
# 01-29-2024
# 02-01-2024

import random
import sys
Expand All @@ -58,6 +58,7 @@
YearBuiltClassifier)
from .workflow.ImHandler import ImageHandler
from .workflow.FootprintHandler import FootprintHandler
from brails.workflow.NSIParser import NSIParser

class InventoryGenerator:

Expand Down Expand Up @@ -136,7 +137,8 @@ def enabled_attributes(self):
"InventoryGenerator.generate, simply set attributes='all'")

def generate(self,attributes=['numstories','occupancy','roofshape'],
outFile='inventory.csv', lengthUnit='ft'):
useNSIBaseline= True, outFile='inventory.csv',
lengthUnit='ft'):

def write_to_dataframe(df,predictions,column,imtype='street_images'):
"""
Expand Down Expand Up @@ -266,7 +268,7 @@ def write_inventory_output(inventorydf,outFile):
# Rearrange the column order of dfout such that the Footprint field is
# the last:
cols = [col for col in dfout.columns if col!='Footprint']
new_cols = ['Latitude','Longitude'] + cols[:-2] + ['Footprint']
new_cols = ['Latitude','Longitude'] + cols + ['Footprint']
dfout = dfout[new_cols]

# If the inventory is desired in CSV format, write dfout to a CSV:
Expand Down Expand Up @@ -305,30 +307,38 @@ def write_inventory_output(inventorydf,outFile):
json.dump(geojson, output_file, indent=2)

print(f'\nFinal inventory data available in {outFile} in {os.getcwd()}')

# Parse/correct the list of user requested building attributes:
self.attributes = parse_attribute_input(attributes, self.enabledAttributes)

# Create a list of footprints for easier module calls:
footprints = self.inventory['Footprint'].values.tolist()

if useNSIBaseline:
nsi = NSIParser()
nsi.GenerateBldgInventory(footprints)
attributes_process = set(self.attributes) - set(nsi.attributes)
self.inventory = nsi.inventory.copy(deep=True)
footprints = self.inventory['Footprint'].values.tolist()
else:
attributes_process = self.attributes.copy()

# Download the images required for the requested attributes:
image_handler = ImageHandler(self.apiKey)

if 'roofshape' in self.attributes: #or 'roofcover' in self.attributes:
if 'roofshape' in attributes_process: #or 'roofcover' in attributes_process:
image_handler.GetGoogleSatelliteImage(footprints)
imsat = [im for im in image_handler.satellite_images if im is not None]
self.inventory['satellite_images'] = image_handler.satellite_images

streetAttributes = self.enabledAttributes[:]
streetAttributes.remove('roofshape')
#streetAttributes.remove('roofcover')
if set.intersection(set(streetAttributes),set(self.attributes))!=set():
if set.intersection(set(streetAttributes),set(attributes_process))!=set():
image_handler.GetGoogleStreetImage(footprints)
imstreet = [im for im in image_handler.street_images if im is not None]
self.inventory['street_images'] = image_handler.street_images

for attribute in self.attributes:
for attribute in attributes_process:

if attribute=='chimney':
# Initialize the chimney detector object:
Expand Down Expand Up @@ -449,9 +459,9 @@ def write_inventory_output(inventorydf,outFile):
# Bring the attribute values to the desired length unit:
if lengthUnit.lower()=='m':
self.inventory['PlanArea'] = self.inventory['PlanArea'].apply(lambda x: x*0.0929)
if 'buildingheight' in self.attributes:
if 'buildingheight' in attributes_process:
self.inventory['buildingheight'] = self.inventory['buildingheight'].apply(lambda x: x*0.3048)
if 'roofeaveheight' in self.attributes:
if 'roofeaveheight' in attributes_process:
self.inventory['roofeaveheight'] = self.inventory['roofeaveheight'].apply(lambda x: x*0.3048)

# Write the genereated inventory in outFile:
Expand Down
4 changes: 2 additions & 2 deletions brails/TranspInventoryGenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,8 @@ def combineAndFormat_HWY(self, minimumHAZUS=True, connectivity=False, maxRoadLen
# Dump to json file
with open("hwy_inventory.geojson", "w") as f:
json.dump(combinedGeoJSON, f, indent = 2)
print('Combined transportation inventory saved in hwy_inventory.geojson'
'This file is suitable for R2D use and is available in {os.getcwd()}')
print('Combined transportation inventory saved in hwy_inventory.geojson.'
f' This file is suitable for R2D use and is available in {os.getcwd()}')
return

# Convert common length units
Expand Down
216 changes: 148 additions & 68 deletions brails/workflow/FootprintHandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
# Barbaros Cetiner
#
# Last updated:
# 01-24-2024
# 02-01-2024

import math
import json
Expand All @@ -58,7 +58,6 @@ def __init__(self):
self.availableDataSources = ['osm','ms','usastr']
self.fpSource = 'osm'
self.footprints = []
self.bldgheights = []

def fetch_footprint_data(self,queryarea,fpSource='osm',attrmap=None):
"""
Expand Down Expand Up @@ -494,7 +493,7 @@ def polygon_area(lats, lons):
else: #return in ratio of sphere total area
return area

def load_footprint_data(fpfile,attrmapfile,fpSource):
def load_footprint_data(fpfile,fpSource,attrmapfile):
"""
Function that loads footprint data from a GeoJSON file
Expand Down Expand Up @@ -544,26 +543,70 @@ def fp_download(bbox,fpSource):
elif fpSource=='usastr':
footprints = get_usastruct_footprints(bbox)
return footprints

def parse_fp_geojson(data, attrmap, attrkeys, fpfile):
# Create the attribute fields that will be extracted from the
# GeoJSON file:
attributes = {}
for attr in attrmap.values():
attributes[attr] = []

footprints_out = []
discardedfp_count = 0
correctedfp_count = 0
for loc in data:
# If the footprint is a polygon:
if loc['geometry']['type']=='Polygon':
# Read footprint coordinates:
temp_fp = loc['geometry']['coordinates']

with open(attrmapfile) as f:
lines = [line.rstrip() for line in f]
attrmap = {}
for line in lines:
lout = line.split(':')
if lout[1]!='':
attrmap[lout[1]] = lout[0]

attributes = {}
for attr in attrmap.values():
attributes[attr] = []

with open(fpfile) as f:
data = json.load(f)['features']

attrkeys = list(data[0]['properties'].keys())
print(attrkeys)
ptdata = all(loc['geometry']['type']=='Point' for loc in data)
if ptdata:
# Check down to two levels deep into extracted JSON
# structure to account for inconsistencies in the
# provided footprint data
if len(temp_fp)>1:
fp = temp_fp[:]
elif len(temp_fp[0])>1:
fp = temp_fp[0][:]
elif len(temp_fp[0][0])>1:
fp = temp_fp[0][0][:]

# If mutliple polygons are detected for a location,
# take the outermost polygon:
if len(fp)==2:
list_len = [len(i) for i in fp]
fp = fp[list_len.index(max(list_len))]
correctedfp_count+=1

# Add the footprint and attributes to the output
# variables
footprints_out.append(fp)
if attrkeys:
for key in attrkeys:
try:
attributes[attrmap[key]].append(loc['properties'][key])
except:
pass
# If the footprint is a multi-polygon, discard the footprint:
elif loc['geometry']['type']=='MultiPolygon':
discardedfp_count+=1

# Print the results of the footprint extraction:
if discardedfp_count==0:
print(f"Extracted a total of {len(footprints_out)} building footprints from {fpfile}")
else:
print(f"Corrected {correctedfp_count} building footprint{pluralsuffix(correctedfp_count)} with invalid geometry")
print(f"Discarded {discardedfp_count} building footprint{pluralsuffix(discardedfp_count)} with invalid geometry")
print(f"Extracted a total of {len(footprints_out)} building footprints from {fpfile}")

return (footprints_out, attributes)

def parse_pt_geojson(data, attrmap, attrkeys, fpSource):
# Create the attribute fields that will be extracted from the
# GeoJSON file:
attributes = {}
for attr in attrmap.values():
attributes[attr] = []

# Write the data in datalist into a dictionary for better data access,
# and filtering the duplicate entries:
datadict = {}
Expand All @@ -575,9 +618,21 @@ def fp_download(bbox,fpSource):
datadict[pt] = loc['properties']

points = list(datadict.keys())
# Determine the coordinates of the bounding box including the points:

# Determine the coordinates of the bounding box containing the
# points:
bbox = get_bbox(ptcoords)
footprints = fp_download(bbox,fpSource)

# Get the footprint data corresponding to the point GeoJSON
# input:
if 'geojson' in fpSource.lower():
with open(fpSource) as f:
data = json.load(f)['features']
(footprints,_) = parse_fp_geojson(data, {}, {}, fpSource)
else:
footprints = fp_download(bbox,fpSource)

# Create an STR tree for efficient parsing of point coordinates:
pttree = STRtree(points)

# Find the data points that are enclosed in each footprint:
Expand All @@ -601,44 +656,64 @@ def fp_download(bbox,fpSource):
attributes[attrmap[key]].append(ptres[key])
except:
pass


return (footprints_out, attributes)

if attrmapfile:
# Create a dictionary for mapping the attributes in the GeoJSON
# file to BRAILS inventory naming conventions:
with open(attrmapfile) as f:
lines = [line.rstrip() for line in f]
attrmap = {}
for line in lines:
lout = line.split(':')
if lout[1]!='':
attrmap[lout[1]] = lout[0]

# Read the GeoJSON file and check if all the data in the file is
# point data:
with open(fpfile) as f:
data = json.load(f)['features']
ptdata = all(loc['geometry']['type']=='Point' for loc in data)

# Identify the attribute keys in the GeoJSON file:
attrkeys0 = list(data[0]['properties'].keys())
if attrkeys0:
print('Building attributes detected in the input GeoJSON: ' +
', '.join(attrkeys0))

# Check if all of the attribute keys in the GeoJSON have
# correspondence in the map. Ignore the keys that do not have
# correspondence:
attrkeys = set()
for key in attrkeys0:
try:
attrmap[key]
attrkeys.add(key)
except:
pass
ignored_Attr = set(attrkeys0) - attrkeys
if ignored_Attr:
print('Attribute mapping does not cover all attributes detected in'
'the input GeoJSON. Ignoring detected attributes: ' +
', '.join(ignored_Attr))
else:
attrmap = {}
attrkeys = {}

if ptdata:
(footprints_out, attributes) = parse_pt_geojson(data,
attrmap,
attrkeys,
fpSource)
else:
footprints_out = []
discardedfp_count = 0
correctedfp_count = 0
for loc in data:
if loc['geometry']['type']=='Polygon':
temp_fp = loc['geometry']['coordinates']
if len(temp_fp)>1:
fp = temp_fp[:]
elif len(temp_fp[0])>1:
fp = temp_fp[0][:]
elif len(temp_fp[0][0])>1:
fp = temp_fp[0][0][:]

if len(fp)==2:
list_len = [len(i) for i in fp]
fp = fp[list_len.index(max(list_len))]
correctedfp_count+=1

footprints_out.append(fp)
for key in attrkeys:
try:
attributes[attrmap[key]].append(loc['properties'][key])
except:
pass

elif loc['geometry']['type']=='MultiPolygon':
discardedfp_count+=1

if discardedfp_count==0:
print(f"Extracted a total of {len(footprints_out)} building footprints from {fpfile}")
else:
print(f"Corrected {correctedfp_count} building footprint{pluralsuffix(correctedfp_count)} with invalid geometry")
print(f"Discarded {discardedfp_count} building footprint{pluralsuffix(discardedfp_count)} with invalid geometry")
print(f"Extracted a total of {len(footprints_out)} building footprints from {fpfile}")
(footprints_out, attributes) = parse_fp_geojson(data,
attrmap,
attrkeys,
fpfile)
fpSource = fpfile

return (footprints_out, attributes)
return (footprints_out, attributes, fpSource)

def fp_source_selector(self):
if self.fpSource=='osm':
Expand All @@ -655,20 +730,22 @@ def fp_source_selector(self):
self.fpSource = fpSource
if isinstance(self.queryarea,str):
if 'geojson' in queryarea.lower():
(self.footprints,self.attributes) = load_footprint_data(
(self.footprints,self.attributes,self.fpSource) = load_footprint_data(
self.queryarea,
attrmap,
self.fpSource)
self.fpSource,
attrmap)
bldgheights = []
else:
(self.footprints,self.bldgheights) = fp_source_selector(self)
(self.footprints,bldgheights) = fp_source_selector(self)
elif isinstance(queryarea,tuple):
(self.footprints,self.bldgheights) = fp_source_selector(self)
(self.footprints,bldgheights) = fp_source_selector(self)
elif isinstance(queryarea,list):
self.footprints = []
bldgheights = []
for query in self.queryarea:
(fps, bldghts) = fp_source_selector(query)
self.footprints.extend(fps)
self.bldgheights.extend(bldghts)
bldgheights.extend(bldghts)
else:
sys.exit('Incorrect location entry. The location entry must be defined as' +
' 1) a string or a list of strings containing the name(s) of the query areas,' +
Expand All @@ -677,8 +754,11 @@ def fp_source_selector(self):
' bounding box of interest in (lon1, lat1, lon2, lat2) format.' +
' For defining a bounding box, longitude and latitude values' +
' shall be entered for the vertex pairs of any of the two' +
' diagonals of the rectangular bounding box.')

' diagonals of the rectangular bounding box.')

if bldgheights!=[]:
self.attributes['buildingheight'] = bldgheights.copy()

self.fpAreas = []
for fp in self.footprints:
lons = []
Expand Down
Loading

0 comments on commit dafaa05

Please sign in to comment.