Skip to content

Commit

Permalink
initial scripts for HESS manuscript submission
Browse files Browse the repository at this point in the history
  • Loading branch information
jupflug committed Apr 5, 2022
0 parents commit f725a4f
Show file tree
Hide file tree
Showing 11 changed files with 2,792 additions and 0 deletions.
2,340 changes: 2,340 additions & 0 deletions discretSensitivity_combined_script.ipynb

Large diffs are not rendered by default.

116 changes: 116 additions & 0 deletions reanalyPlotTools/.ipynb_checkpoints/reanalyPlotTools-checkpoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
import glob
import netCDF4 as nc
import numpy as np
import datetime
from datetime import timedelta, date

def sumSWE(SWE,lenx,leny,resolution):
# SWE = np.sum(SWE,axis=(1,2))*lenx*leny*resolution
SWE = np.sum(SWE,axis=(1,2))*(resolution**2)
return SWE
def sumSCA(SCA,lenx,leny,resolution):
# SCA = np.sum(SCA,axis=(1,2))*lenx*leny*resolution
SCA = np.sum(SCA,axis=(1,2))*(resolution**2)
return SCA
def habitat_Apr(SWE,lenx,leny,resolution,density):
hab = np.sum(np.where(SWE > 0.33,1,0),axis=(0,1))*lenx*leny*resolution
return hab
def habitat_May(SWE,lenx,leny,resolution,density):
hab = np.sum(np.where(SWE > 0.20,1,0),axis=(0,1))*lenx*leny*resolution
return hab
def date_range(start, end):
delta = end - start
days = [start + timedelta(days=i) for i in range(delta.days + 1)]
return days

class reanalyPlotTools:

def peakSWE_distribution(dat_path,extension,posteriorIdx,resolution):
peakSWE = []
fnames = sorted(glob.glob(dat_path+extension))
for file in fnames:
dset = nc.Dataset(file)
SWE = sumSWE(dset['SWE_Post'][:,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution)
peakSWE.append(np.max(SWE))
return peakSWE

def P_vs_SWEP(dat_path,extensionSWE,extensionForcing,posteriorIdx,peakSWE,resolution):
PSUM,SWE_P = [],[]
fnamesSWE = sorted(glob.glob(dat_path+extensionSWE))
fnamesForce = sorted(glob.glob(dat_path+extensionForcing))
for fileSWE,sweVal,fileForce in zip(fnamesSWE,peakSWE,fnamesForce):
dset = nc.Dataset(fileSWE)
SWEsave = dset['SWE_Post'][:,posteriorIdx,:,:]
SWE = sumSWE(dset['SWE_Post'][:,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution)
end_idx = np.where(SWE == sweVal)[0][0]
SWE = SWE[0:end_idx]
start_idx = np.subtract(SWE,sweVal*0.05)
start_idx = np.where(start_idx < 0,9999,start_idx)
start_idx = np.where(start_idx == min(start_idx))[0][0]
SWE = np.mean(SWEsave[start_idx:end_idx,:,:],axis=(1,2))
dset.close()
dset = nc.Dataset(fileForce)
P = np.cumsum(dset['PPT_Post'][0:end_idx,:,:],axis=0)
PSUM.append(np.mean(P[-1,:,:]-P[start_idx,:,:],axis=(0,1)))
P = np.mean(P,axis=(1,2))/1000
# P = np.mean(P[start_idx:],axis=(1,2))/1000
# P = np.where(P < SWE,np.nan,P)
P[start_idx:] = np.where(P[start_idx:] < SWE,np.nan,P[start_idx:])
SWE_P.append(np.mean(SWE/P[start_idx:]))
# SWE_P.append(np.mean(SWE/P))
dset.close()
return PSUM,SWE_P

def extractData(dat_path,extensionSWE,posteriorIdx,resolution,dayApr15,dayMay15,density,PSUM,SWE_P,pctile,years):
SWEsave,SCAsave,ttSave = np.empty((0,360)),np.empty((0,360)),np.empty((0,360))
hot_cold,wet_dry,habApr15,habMay15 = [],[],[],[]
fnamesSWE = sorted(glob.glob(dat_path+extensionSWE))
for file,psum,swe_p,yr in zip(fnamesSWE,PSUM,SWE_P,years):
dset = nc.Dataset(file)
SWE = sumSWE(dset['SWE_Post'][:,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution)
SWEsave = np.vstack((SWEsave,SWE[0:360]))
SCA = sumSCA(dset['SCA_Post'][:,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution)
SCAsave = np.vstack((SCAsave,SCA[0:360]))
dtt = date_range(date(yr-1,10,1),date(yr,9,30))
ttSave = np.vstack((ttSave,dtt[0:360]))
habApr15.append(habitat_Apr(dset['SWE_Post'][dayApr15,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution,density))
habMay15.append(habitat_May(dset['SWE_Post'][dayMay15,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution,density))
if swe_p > np.percentile(SWE_P,50+pctile):
hot_cold.append(1)
elif swe_p < np.percentile(SWE_P,50-pctile):
hot_cold.append(2)
else:
hot_cold.append(0)
if psum > np.percentile(PSUM,50+pctile):
wet_dry.append(1)
elif psum < np.percentile(PSUM,50-pctile):
wet_dry.append(2)
else:
wet_dry.append(0)
return SWEsave,SCAsave,ttSave,habApr15,habMay15,hot_cold,wet_dry








107 changes: 107 additions & 0 deletions reanalyPlotTools/.ipynb_checkpoints/snotelPlotTools-checkpoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
import geopandas as gpd
from geopandas import GeoSeries
from shapely.geometry import Polygon, Point
import netCDF4 as nc
import glob
import ulmo
import pandas as pd
import numpy as np

def reanalysisPixels(dset,geo_df_list):
reanalysis_coords = np.empty((0,2))
for coordinates in geo_df_list:
diffy = np.abs(dset['Latitude'][:]-coordinates[0])
diffx = np.abs(dset['Longitude'][:]-coordinates[1])
reanalysis_coords = np.vstack((reanalysis_coords,[np.where(diffx == diffx.min())[0][0],
np.where(diffy == diffy.min())[0][0]]))
return reanalysis_coords

def fetch(wsdlurl,sitecode,variablecode,start_date,end_date):
values_df = None
try:
# Pull data into dictionary, indexed by station and date
site_values = ulmo.cuahsi.wof.get_values(wsdlurl,sitecode,variablecode,
start = start_date,end = end_date)
values_df = pd.DataFrame.from_dict(site_values['values'])
values_df['datetime'] = pd.to_datetime(values_df['datetime'], utc=True)
values_df = values_df.set_index('datetime')
values_df['value'] = pd.to_numeric(values_df['value']).replace(-9999,np.nan)
values_df = values_df[values_df['quality_control_level_code'] == '1']
# Throw exception in the case that no good data exists
except:
print("unable to fetch %s" % variablecode)
return values_df

class snotelPlotTools:

def getBoundingBox(dat_path,extension):
fnames = sorted(glob.glob(dat_path+extension))
for file in fnames:
dset = nc.Dataset(file)
break
xmin = dset['Longitude'][:].min()
xmax = dset['Longitude'][:].max()
ymin = dset['Latitude'][:].min()
ymax = dset['Latitude'][:].max()

coords = ((xmin, ymin), (xmin, ymax), (xmax, ymax), (xmax, ymin), (xmin, ymin))
pgon = Polygon(coords)

df = GeoSeries([pgon])
return df,dset

def pullSNOTEL(wsdlurl,df,dset,variablecode,start_date,end_date):
value_dict = {}
title_dict = {}

sites = ulmo.cuahsi.wof.get_sites(wsdlurl)
sites_df = pd.DataFrame.from_dict(sites,orient='index')
sites_df = sites_df.dropna()
sites_df['geometry'] = [Point(float(loc['longitude']),
float(loc['latitude'])) for loc in sites_df['location']]
sites_df = sites_df.drop(columns='location')
sites_df = sites_df.astype({"elevation_m":float})
sites_gdf = gpd.GeoDataFrame(sites_df, geometry='geometry')
sites_gdf.crs = {'init':'epsg:4326'}

poly = df.geometry.unary_union
sites_gdf_filt = sites_gdf[sites_gdf.geometry.intersects(poly)]
sites_gdf_filt = sites_gdf_filt.assign(siteStr=sites_gdf_filt.index.str[:])

geo_df_list = [[point.xy[1][0], point.xy[0][0]] for point in sites_gdf_filt.geometry]

reanalysis_coords = reanalysisPixels(dset,geo_df_list)

for i,sitecode in enumerate(sites_gdf_filt.index):
print('%i of %i sites' % (i+1, len(sites_gdf_filt.index)))
out = fetch(wsdlurl,sitecode,variablecode,start_date,end_date)

if out is not None:
value_dict[sitecode] = out['value']
title_dict[sitecode] = sites_gdf_filt['name'][i]

return sites_gdf_filt,geo_df_list,reanalysis_coords,value_dict,title_dict

def overlappingReanalysis(dat_path,extension,value_dict,posteriorIdx,reanalysis_coords):
fnames = sorted(glob.glob(dat_path+extension))
keys = value_dict.keys()

for z,file in enumerate(fnames):
data = nc.Dataset(file)
for i,key in enumerate(keys):
print(file,key)
data_filt = data['SWE_Post'][:,posteriorIdx,reanalysis_coords[i,0],
reanalysis_coords[i,1]]
if i == 0:
dataHolder = data_filt
else:
dataHolder = np.ma.vstack((dataHolder,data_filt))
if z == 0:
overlapping_reanalysis = dataHolder
else:
overlapping_reanalysis = np.ma.hstack((overlapping_reanalysis,dataHolder))
return overlapping_reanalysis




6 changes: 6 additions & 0 deletions reanalyPlotTools/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from .reanalyPlotTools import reanalyPlotTools
from .snotelPlotTools import snotelPlotTools

__all__ = [
'reanalyPlotTools','snotelPlotTools'
]
Binary file added reanalyPlotTools/__pycache__/__init__.cpython-37.pyc
Binary file not shown.
Binary file added reanalyPlotTools/__pycache__/__init__.cpython-39.pyc
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
116 changes: 116 additions & 0 deletions reanalyPlotTools/reanalyPlotTools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
import glob
import netCDF4 as nc
import numpy as np
import datetime
from datetime import timedelta, date

def sumSWE(SWE,lenx,leny,resolution):
# SWE = np.sum(SWE,axis=(1,2))*lenx*leny*resolution
SWE = np.sum(SWE,axis=(1,2))*(resolution**2)
return SWE
def sumSCA(SCA,lenx,leny,resolution):
# SCA = np.sum(SCA,axis=(1,2))*lenx*leny*resolution
SCA = np.sum(SCA,axis=(1,2))*(resolution**2)
return SCA
def habitat_Apr(SWE,lenx,leny,resolution,density):
hab = np.sum(np.where(SWE > 0.33,1,0),axis=(0,1))*lenx*leny*resolution
return hab
def habitat_May(SWE,lenx,leny,resolution,density):
hab = np.sum(np.where(SWE > 0.20,1,0),axis=(0,1))*lenx*leny*resolution
return hab
def date_range(start, end):
delta = end - start
days = [start + timedelta(days=i) for i in range(delta.days + 1)]
return days

class reanalyPlotTools:

def peakSWE_distribution(dat_path,extension,posteriorIdx,resolution):
peakSWE = []
fnames = sorted(glob.glob(dat_path+extension))
for file in fnames:
dset = nc.Dataset(file)
SWE = sumSWE(dset['SWE_Post'][:,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution)
peakSWE.append(np.max(SWE))
return peakSWE

def P_vs_SWEP(dat_path,extensionSWE,extensionForcing,posteriorIdx,peakSWE,resolution):
PSUM,SWE_P = [],[]
fnamesSWE = sorted(glob.glob(dat_path+extensionSWE))
fnamesForce = sorted(glob.glob(dat_path+extensionForcing))
for fileSWE,sweVal,fileForce in zip(fnamesSWE,peakSWE,fnamesForce):
dset = nc.Dataset(fileSWE)
SWEsave = dset['SWE_Post'][:,posteriorIdx,:,:]
SWE = sumSWE(dset['SWE_Post'][:,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution)
end_idx = np.where(SWE == sweVal)[0][0]
SWE = SWE[0:end_idx]
start_idx = np.subtract(SWE,sweVal*0.05)
start_idx = np.where(start_idx < 0,9999,start_idx)
start_idx = np.where(start_idx == min(start_idx))[0][0]
SWE = np.mean(SWEsave[start_idx:end_idx,:,:],axis=(1,2))
dset.close()
dset = nc.Dataset(fileForce)
P = np.cumsum(dset['PPT_Post'][0:end_idx,:,:],axis=0)
PSUM.append(np.mean(P[-1,:,:]-P[start_idx,:,:],axis=(0,1)))
P = np.mean(P,axis=(1,2))/1000
# P = np.mean(P[start_idx:],axis=(1,2))/1000
# P = np.where(P < SWE,np.nan,P)
P[start_idx:] = np.where(P[start_idx:] < SWE,np.nan,P[start_idx:])
SWE_P.append(np.mean(SWE/P[start_idx:]))
# SWE_P.append(np.mean(SWE/P))
dset.close()
return PSUM,SWE_P

def extractData(dat_path,extensionSWE,posteriorIdx,resolution,dayApr15,dayMay15,density,PSUM,SWE_P,pctile,years):
SWEsave,SCAsave,ttSave = np.empty((0,360)),np.empty((0,360)),np.empty((0,360))
hot_cold,wet_dry,habApr15,habMay15 = [],[],[],[]
fnamesSWE = sorted(glob.glob(dat_path+extensionSWE))
for file,psum,swe_p,yr in zip(fnamesSWE,PSUM,SWE_P,years):
dset = nc.Dataset(file)
SWE = sumSWE(dset['SWE_Post'][:,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution)
SWEsave = np.vstack((SWEsave,SWE[0:360]))
SCA = sumSCA(dset['SCA_Post'][:,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution)
SCAsave = np.vstack((SCAsave,SCA[0:360]))
dtt = date_range(date(yr-1,10,1),date(yr,9,30))
ttSave = np.vstack((ttSave,dtt[0:360]))
habApr15.append(habitat_Apr(dset['SWE_Post'][dayApr15,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution,density))
habMay15.append(habitat_May(dset['SWE_Post'][dayMay15,posteriorIdx,:,:],
len(dset['Longitude'][:]),
len(dset['Latitude'][:]),
resolution,density))
if swe_p > np.percentile(SWE_P,50+pctile):
hot_cold.append(1)
elif swe_p < np.percentile(SWE_P,50-pctile):
hot_cold.append(2)
else:
hot_cold.append(0)
if psum > np.percentile(PSUM,50+pctile):
wet_dry.append(1)
elif psum < np.percentile(PSUM,50-pctile):
wet_dry.append(2)
else:
wet_dry.append(0)
return SWEsave,SCAsave,ttSave,habApr15,habMay15,hot_cold,wet_dry








Loading

0 comments on commit f725a4f

Please sign in to comment.