Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Develop equ advanced #168

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 11 additions & 3 deletions src/hsp2/hsp2/HYDR.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,10 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state):
# must split dicts out of state Dict since numba cannot handle mixed-type nested Dicts
state_ix, dict_ix, ts_ix = state['state_ix'], state['dict_ix'], state['ts_ix']
state_paths = state['state_paths']
ep_list = ["DEP","IVOL","O1","O2","O3","OVOL1","OVOL2","OVOL3","PRSUPY","RO","ROVOL","SAREA","TAU","USTAR","VOL","VOLEV"]
model_exec_list = model_domain_dependencies(state, state_info['domain'], ep_list)
ep_list = hydr_state_vars()
# note: calling dependencies with 4th arg = True grabs only "runnable" types, which can save time
# in long simulations, as iterating through non-runnables like Constants consumes time.
model_exec_list = model_domain_dependencies(state, state_info['domain'], ep_list, True)
model_exec_list = asarray(model_exec_list, dtype="i8") # format for use in numba
op_tokens = state['op_tokens']
#######################################################################################
Expand Down Expand Up @@ -698,4 +700,10 @@ def expand_HYDR_masslinks(flags, uci, dat, recs):
rec['SVOL'] = dat.SVOL
recs.append(rec)
return recs


def hydr_state_vars():
return ["DEP","IVOL","O1","O2","O3","OVOL1","OVOL2","OVOL3","PRSUPY","RO","ROVOL","SAREA","TAU","USTAR","VOL","VOLEV"]

def hydr_load_om(state, io_manager, siminfo):
for i in hydr_state_vars():
state['model_data'][seg_name][i] = {'object_class':'ModelVariable', 'name':i, 'value':0.0}
4 changes: 2 additions & 2 deletions src/hsp2/hsp2/SPECL.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from numba import njit
from pandas import DataFrame, date_range

def specl_load_actions(state, io_manager, siminfo):
def specl_load_om(state, io_manager, siminfo):
if 'ACTIONS' in state['specactions']:
dc = state['specactions']['ACTIONS']
for ix in dc.index:
Expand All @@ -32,7 +32,7 @@ def specl_load_actions(state, io_manager, siminfo):
return

def specl_load_state(state, io_manager, siminfo):
specl_load_actions(state, io_manager, siminfo)
specl_load_om(state, io_manager, siminfo)
# others defined below, like:
# specl_load_uvnames(state, io_manager, siminfo)
# ...
Expand Down
2 changes: 1 addition & 1 deletion src/hsp2/hsp2/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def main(io_manager:Union[str, IOManager], saveall:bool=False, jupyterlab:bool=T
#######################################################################################
# Set up Things in state that will be used in all modular activities like SPECL
state = init_state_dicts()
state_siminfo_hsp2(uci_obj, siminfo)
state_siminfo_hsp2(uci_obj, siminfo, io_manager, state)
# Add support for dynamic functions to operate on STATE
# - Load any dynamic components if present, and store variables on objects
state_load_dynamics_hsp2(state, io_manager, siminfo)
Expand Down
76 changes: 45 additions & 31 deletions src/hsp2/hsp2/om.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def model_element_paths(mel, state):
# Import Code Classes
from hsp2.hsp2.om_model_object import ModelObject, ModelVariable, ModelRegister, pre_step_register
from hsp2.hsp2.om_sim_timer import SimTimer, step_sim_timer
#from hsp2.hsp2.om_equation import *
from hsp2.hsp2.om_equation import Equation, step_equation
from hsp2.hsp2.om_model_linkage import ModelLinkage, step_model_link
from hsp2.hsp2.om_special_action import SpecialAction, step_special_action
#from hsp2.hsp2.om_data_matrix import *
Expand Down Expand Up @@ -148,16 +148,22 @@ def state_load_dynamics_om(state, io_manager, siminfo):
def state_om_model_root_object(state, siminfo):
# Create the base that everything is added to. this object does nothing except host the rest.
if 'model_root_object' not in state.keys():
model_root_object = ModelObject("", False, {}, state) # we give this no name so that it does not interfer with child paths like timer, year, etc (i.e. /STATE/year, ...)
model_root_object = ModelObject(state['model_root_name'], False, {}, state) # we give this no name so that it does not interfer with child paths like timer, year, etc (i.e. /STATE/year, ...)
state['model_root_object'] = model_root_object
# set up the timer as the first element
model_root_object = state['model_root_object']
if '/STATE/timer' not in state['state_paths'].keys():
timer = SimTimer('timer', model_root_object, siminfo)
# add base object for the HSP2 domains and other things already added to state so they can be influenced
for (seg_name,seg_path) in state['hsp_segments'].items():
if (seg_path not in state['model_object_cache'].keys()):
# BUG: need to figure out if this is OK, then how do we add attributes to these River Objects
# later when adding from json?
# Can we simply check the model_object_cache during load step?
# Create an object shell for this
ModelObject(seg_name, model_root_object)
#river_seg = ModelObject(seg_name, model_root_object, {}, state)
#state['model_object_cache'][river_seg.state_path] = river_seg
pass


def state_om_model_run_prep(state, io_manager, siminfo):
Expand All @@ -166,7 +172,7 @@ def state_om_model_run_prep(state, io_manager, siminfo):
# now instantiate and link objects
# state['model_data'] has alread been prepopulated from json, .py files, hdf5, etc.
model_root_object = state['model_root_object']
model_loader_recursive(state['model_data'], model_root_object)
model_loader_recursive(state['model_data'], model_root_object, state)
# print("Loaded objects & paths: insures all paths are valid, connects models as inputs")
# both state['model_object_cache'] and the model_object_cache property of the ModelObject class def
# will hold a global repo for this data this may be redundant? They DO point to the same datset?
Expand Down Expand Up @@ -204,26 +210,29 @@ def state_om_model_run_prep(state, io_manager, siminfo):

#print("op_tokens is type", type(op_tokens))
#print("state_ix is type", type(state['state_ix']))
#print("state_paths final", state['state_paths'])
#print("op_tokens final", op_tokens)

# Stash a list of runnables
state['runnables'] = ModelObject.runnable_op_list(state['op_tokens'], list(state['state_paths'].values()))
#print("Operational model status:", state['state_step_om'])
if len(model_exec_list) > 0:
pass
#print("op_tokens has", len(op_tokens),"elements, with ", len(model_exec_list),"executable elements")
#pass
print("op_tokens has", len(op_tokens),"elements, with ", len(model_exec_list),"executable elements")
print("Exec list:", model_exec_list)
return

# model class reader
# get model class to guess object type in this lib
# the parent object must be known
def model_class_loader(model_name, model_props, container = False):
def model_class_loader(model_name, model_props, container = False, state = None):
# todo: check first to see if the model_name is an attribute on the container
# Use: if hasattr(container, model_name):
# if so, we set the value on the container, if not, we create a new subcomp on the container
if model_props == None:
return False
if type(model_props) is str:
if is_float_digit(model_props):
model_object = ModelVariable(model_name, container, float(model_props) )
model_object = ModelVariable(model_name, container, {'value':float(model_props)}, state )
return model_object
else:
return False
Expand All @@ -240,45 +249,45 @@ def model_class_loader(model_name, model_props, container = False):
# for attributes to pass in.
# ".get()" will return NoValue if it does not exist or the value.
if object_class == 'Equation':
model_object = Equation(model_props.get('name'), container, model_props )
model_object = Equation(model_props.get('name'), container, model_props, state)
#remove_used_keys(model_props,
elif object_class == 'SimpleChannel':
model_object = SimpleChannel(model_props.get('name'), container, model_props )
model_object = SimpleChannel(model_props.get('name'), container, model_props, state)
elif object_class == 'Impoundment':
model_object = Impoundment(model_props.get('name'), container, model_props )
model_object = Impoundment(model_props.get('name'), container, model_props, state )
elif object_class == 'Constant':
model_object = ModelVariable(model_props.get('name'), container, model_props.get('value') )
model_object = ModelVariable(model_props.get('name'), container, {'value':model_props.get('value')} )
elif ( object_class.lower() == 'datamatrix'):
# add a matrix with the data, then add a matrix accessor for each required variable
has_props = DataMatrix.check_properties(model_props)
if has_props == False:
print("Matrix object must have", DataMatrix.required_properties())
return False
# create it
model_object = DataMatrix(model_props.get('name'), container, model_props)
model_object = DataMatrix(model_props.get('name'), container, model_props, state)
elif object_class == 'ModelBroadcast':
# add a matrix with the data, then add a matrix accessor for each required variable
has_props = ModelBroadcast.check_properties(model_props)
if has_props == False:
print("ModelBroadcast object must have", ModelBroadcast.required_properties())
return False
# create it
model_object = ModelBroadcast(model_props.get('name'), container, model_props)
model_object = ModelBroadcast(model_props.get('name'), container, model_props, state)
elif object_class == 'MicroWatershedModel':
# add a matrix with the data, then add a matrix accessor for each required variable
has_props = MicroWatershedModel.check_properties(model_props)
if has_props == False:
print("MicroWatershedModel object must have", MicroWatershedModel.required_properties())
return False
# create it
model_object = DataMatrix(model_props.get('name'), container, model_props)
model_object = DataMatrix(model_props.get('name'), container, model_props, state)
elif object_class == 'ModelLinkage':
model_object = ModelLinkage(model_props.get('name'), container, model_props)
model_object = ModelLinkage(model_props.get('name'), container, model_props, state)
elif object_class == 'SpecialAction':
model_object = SpecialAction(model_props.get('name'), container, model_props)
model_object = SpecialAction(model_props.get('name'), container, model_props, state)
else:
#print("Loading", model_props.get('name'), "with object_class", object_class,"as ModelObject")
model_object = ModelObject(model_props.get('name'), container, model_props)
model_object = ModelObject(model_props.get('name'), container, model_props, state)
# one way to insure no class attributes get parsed as sub-comps is:
# model_object.remove_used_keys()
if len(model_object.model_props_parsed) == 0:
Expand Down Expand Up @@ -318,7 +327,7 @@ def model_class_translate(model_props, object_class):
print("Disabling class", model_props['object_class'], 'rendering as ModelObject')
model_props['object_class'] = 'ModelObject'

def model_loader_recursive(model_data, container):
def model_loader_recursive(model_data, container, state):
k_list = model_data.keys()
object_names = dict.fromkeys(k_list , 1)
if type(object_names) is not dict:
Expand All @@ -340,22 +349,23 @@ def model_loader_recursive(model_data, container):
if not ('object_class' in model_props):
# this is either a class attribute or an un-handleable meta-data
# if the class atttribute exists, we should pass it to container to load
#print("Skipping un-typed", object_name)
print("Skipping un-typed", object_name)
continue
#print("Translating", object_name)
# this is a kludge, but can be important
object_class = model_props['object_class']
model_class_translate(model_props, object_class)
# now we either have a constant (key and value), or a
# fully defined object. Either one should work OK.
#print("Trying to load", object_name)
model_object = model_class_loader(object_name, model_props, container)
#print("Loading", object_name)
model_object = model_class_loader(object_name, model_props, container, state)
if model_object == False:
print("Could not load", object_name)
continue # not handled, but for now we will continue, tho later we should bail?
# now for container type objects, go through its properties and handle
#print("loaded object", model_object, "with container", container)
if type(model_props) is dict:
model_loader_recursive(model_props, model_object)
model_loader_recursive(model_props, model_object, state)

def model_path_loader(model_object_cache):
k_list = model_object_cache.keys()
Expand All @@ -381,7 +391,7 @@ def model_tokenizer_recursive(model_object, model_object_cache, model_exec_list,
"""
if model_touch_list is None:
model_touch_list = []
#print("Handling", model_object.name, " ", model_object.state_path)
#print("Tokenizing", model_object.name, " ", model_object.state_path)
if model_object.ix in model_exec_list:
return
if model_object.ix in model_touch_list:
Expand Down Expand Up @@ -474,7 +484,7 @@ def model_order_recursive(model_object, model_object_cache, model_exec_list, mod
# now after loading input dependencies, add this to list
model_exec_list.append(model_object.ix)

def model_domain_dependencies(state, domain, ep_list):
def model_domain_dependencies(state, domain, ep_list, only_runnable = False):
"""
Given an hdf5 style path to a domain, and a list of variable endpoints in that domain,
Find all model elements that influence the endpoints state
Expand All @@ -485,11 +495,14 @@ def model_domain_dependencies(state, domain, ep_list):
mel = []
mtl = []
# if the given element is NOT in model_object_cache, then nothing is acting on it, so we return empty list
if (domain + '/' + ep) in state['model_object_cache'].keys():
endpoint = state['model_object_cache'][domain + '/' + ep]
model_order_recursive(endpoint, state['model_object_cache'], mel, mtl)
mello = mello + mel
if (domain + '/' + ep) in state['state_paths']:
if (domain + '/' + ep) in state['model_object_cache'].keys():
endpoint = state['model_object_cache'][domain + '/' + ep]
model_order_recursive(endpoint, state['model_object_cache'], mel, mtl)
mello = mello + mel

if (only_runnable == True):
mello = ModelObject.runnable_op_list(state['op_tokens'], mello)
return mello

def save_object_ts(io_manager, siminfo, op_tokens, ts_ix, ts):
Expand Down Expand Up @@ -533,8 +546,9 @@ def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0):
# todo: decide if all step_[class() functions should set value in state_ix instead of returning value?
if debug > 0:
print("DEBUG: Operator ID", ops[1], "is op type", ops[0])
print("DEBUG: ops: ", ops)
if ops[0] == 1:
pass #step_equation(ops, state_ix)
step_equation(ops, state_ix)
elif ops[0] == 2:
# todo: this should be moved into a single function,
# with the conforming name step_matrix(op_tokens, ops, state_ix, dict_ix)
Expand Down
Loading
Loading