Skip to content

Commit

Permalink
Merge pull request #175 from bacetiner/master
Browse files Browse the repository at this point in the history
NSIParser now includes comments for better readability
  • Loading branch information
bacetiner authored Jan 20, 2024
2 parents 0d9f6ba + 114295a commit 2be7850
Showing 1 changed file with 146 additions and 88 deletions.
234 changes: 146 additions & 88 deletions brails/workflow/NSIParser.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
# Barbaros Cetiner
#
# Last updated:
# 01-18-2024
# 01-19-2024

import requests
import numpy as np
Expand All @@ -52,96 +52,154 @@ def __init__(self):
self.inventory = []

def GenerateBldgInventory(self,footprints):
minlat = footprints[0][0][1]
minlon = footprints[0][0][0]
maxlat = footprints[0][0][1]
maxlon = footprints[0][0][0]
for fp in footprints:
for vert in fp:
if vert[1]>maxlat:
maxlat = vert[1]
if vert[0]>maxlon:
maxlon = vert[0]
if vert[1]<minlat:
minlat = vert[1]
if vert[0]<minlon:
minlon = vert[0]

bboxstr = f'{minlon:.5f},{minlat:.5f},{minlon:.5f},{maxlat:.5f},{maxlon:.5f},{maxlat:.5f},{maxlon:.5f},{minlat:.5f},{minlon:.5f},{minlat:.5f}'

url = "https://nsi.sec.usace.army.mil/nsiapi/structures?bbox="
"""
Function that reads NSI buildings points and matches the data to a set
of footprints.
Input: List of footprint data defined as a list of lists of
coordinates in EPSG 4326, i.e., [[vert1],....[vertlast]].
Vertices are defined in [longitude,latitude] fashion.
Output: Building inventory for the footprints containing the attributes
1)latitude, 2)longitude, 3)building plan area, 4)number of
floors, 5)year of construction, 6)replacement cost, 7)structure
type, 8)occupancy class, 9)footprint polygon
"""

# Getting the NSI building points for the
# Define a retry stratey fgor common error codes to use when
# downloading tiles:
s = requests.Session()
retries = Retry(total=5,
backoff_factor=0.1,
status_forcelist=[500, 502, 503, 504])
s.mount('https://', HTTPAdapter(max_retries=retries))
def get_bbox(footprints):
"""
Function that determines the extent of the area covered by the
footprints as a tight-fit rectangular bounding box
Input: List of footprint data defined as a list of lists of
coordinates in EPSG 4326, i.e., [[vert1],....[vertlast]].
Vertices are defined in [longitude,latitude] fashion
Output: Tuple containing the minimum and maximum latitude and
longitude values
"""
# :
minlat = footprints[0][0][1]
minlon = footprints[0][0][0]
maxlat = footprints[0][0][1]
maxlon = footprints[0][0][0]
for fp in footprints:
for vert in fp:
if vert[1]>maxlat:
maxlat = vert[1]
if vert[0]>maxlon:
maxlon = vert[0]
if vert[1]<minlat:
minlat = vert[1]
if vert[0]<minlon:
minlon = vert[0]
return (minlat,minlon,maxlat,maxlon)

# Download tile using the defined retry strategy:
print('\nGetting National Structure Inventory (NSI) building points for the building footprints...')
response = s.get(url+bboxstr)
datalist = response.json()['features']
def get_nbi_data(box):
"""
Function that gets the NBI data for a bounding box entry
Input: Tuple containing the minimum and maximum latitude and
longitude values
Output: Dictionary containing extracted NBI data keyed using the
NBI point coordinates
"""

# Unpack the bounding box coordinates:
(minlat,minlon,maxlat,maxlon) = bbox

# Construct the query URL for the bounding box input
baseurl = "https://nsi.sec.usace.army.mil/nsiapi/structures?bbox="
bboxstr = (f'{minlon:.5f},{minlat:.5f},{minlon:.5f},{maxlat:.5f},'
'{maxlon:.5f},{maxlat:.5f},{maxlon:.5f},{minlat:.5f},'
'{minlon:.5f},{minlat:.5f}')
url = baseurl + bboxstr

# Define a retry stratey for common error codes to use in downloading
# NBI data:
s = requests.Session()
retries = Retry(total=5,
backoff_factor=0.1,
status_forcelist=[500, 502, 503, 504])
s.mount('https://', HTTPAdapter(max_retries=retries))

# Download NBI data using the defined retry strategy, read downloaded
# GeoJSON data into a list:
print('\nGetting National Structure Inventory (NSI) building points for the building footprints...')
response = s.get(url)
datalist = response.json()['features']

# Write the data in datalist into a dictionary for better data access,
# and filtering the duplicate entries:
datadict = {}
for data in datalist:
pt = Point(data['geometry']['coordinates'])
datadict[pt] = data['properties']
return datadict

datadict = {}
count = 0
for data in datalist:
pt = Point(data['geometry']['coordinates'])
if pt in datadict.keys():
count+=1
datadict[pt] = data['properties']
points = list(datadict.keys())
pttree = STRtree(points)

ress = []
for fp in footprints:
res = pttree.query(Polygon(fp))
if res.size!=0:
ress.append(res)
else:
ress.append(np.empty(shape=(0, 0)))

footprints_out = []
lat = []
lon = []
planarea = []
nstories = []
yearbuilt = []
repcost = []
strtype = []
occ = []
for ind, fp in enumerate(footprints):
if ress[ind].size!=0:
footprints_out.append(('{"type":"Feature","geometry":' +
'{"type":"Polygon","coordinates":[' +
f"""{fp}""" +
']},"properties":{}}'))
ptind = ress[ind][0]
ptres = datadict[points[ptind]]
lat.append(ptres['y'])
lon.append(ptres['x'])
planarea.append(ptres['sqft'])
nstories.append(ptres['num_story'])
yearbuilt.append(ptres['med_yr_blt'])
repcost.append(ptres['val_struct'])
strtype.append(ptres['bldgtype'])
occ.append(ptres['occtype'])

print(f'Found a total of {len(footprints_out)} building points in NSI that match the footprint data.')

inventory = pd.DataFrame(pd.Series(lat,name='Latitude'))
inventory['Longitude'] = lon
inventory['PlanArea'] = planarea
inventory['NumberOfStories'] = nstories
inventory['YearBuilt'] = yearbuilt
inventory['ReplacementCost'] = repcost
inventory['StructureType'] = strtype
inventory['OccupancyClass'] = occ
inventory['Footprint'] = footprints_out
def get_inv_from_datadict(datadict):
# Create an STR tree for the building points obtained from NBI:
points = list(datadict.keys())
pttree = STRtree(points)

# Find the data points that are enclosed in each footprint:
ress = []
for fp in footprints:
res = pttree.query(Polygon(fp))
if res.size!=0:
ress.append(res)
else:
ress.append(np.empty(shape=(0, 0)))

# Match NBI data to each footprint:
footprints_out = []
lat = []
lon = []
planarea = []
nstories = []
yearbuilt = []
repcost = []
strtype = []
occ = []
for ind, fp in enumerate(footprints):
if ress[ind].size!=0:
footprints_out.append(('{"type":"Feature","geometry":' +
'{"type":"Polygon","coordinates":[' +
f"""{fp}""" +
']},"properties":{}}'))
ptind = ress[ind][0]
ptres = datadict[points[ptind]]
lat.append(ptres['y'])
lon.append(ptres['x'])
planarea.append(ptres['sqft'])
nstories.append(ptres['num_story'])
yearbuilt.append(ptres['med_yr_blt'])
repcost.append(ptres['val_struct'])
strtype.append(ptres['bldgtype'])
occ.append(ptres['occtype'])

# Display the number of footprints that can be matched to NSI points:
print(f'Found a total of {len(footprints_out)} building points in'
' NSI that match the footprint data.')

# Write the extracted features into a Pandas dataframe:
inventory = pd.DataFrame(pd.Series(lat,name='Latitude'))
inventory['Longitude'] = lon
inventory['PlanArea'] = planarea
inventory['NumberOfStories'] = nstories
inventory['YearBuilt'] = yearbuilt
inventory['ReplacementCost'] = repcost
inventory['StructureType'] = strtype
inventory['OccupancyClass'] = occ
inventory['Footprint'] = footprints_out
return inventory

# Determine the coordinates of the bounding box including the footprints:
bbox = get_bbox(footprints)

# Get the NBI data for computed bounding box:
datadict = get_nbi_data(bbox)

self.inventory= inventory
# Create a footprint-merged building inventory from extracted NBI data:
self.inventory = get_inv_from_datadict(datadict)

inventory.to_csv('inventory.csv', index=True, index_label='id')
# Write the created inventory in R2D-compatible CSV format:
self.inventory.to_csv('inventory.csv', index=True, index_label='id')
print(f'\nFinal inventory data available in {os.getcwd()}/inventory.csv')

0 comments on commit 2be7850

Please sign in to comment.