Skip to content

Commit

Permalink
remove some unneeded files prior to merging into main, comment out so…
Browse files Browse the repository at this point in the history
…me print statements related to new om implementation
  • Loading branch information
PaulDudaRESPEC committed May 28, 2024
1 parent 9f542a8 commit 7a88a65
Show file tree
Hide file tree
Showing 30 changed files with 8 additions and 37,508 deletions.
13 changes: 7 additions & 6 deletions src/hsp2/hsp2/om.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def state_load_om_json(state, io_manager, siminfo):
(fbase, fext) = os.path.splitext(hdf5_path)
# see if there is custom json
fjson = fbase + ".json"
print("Looking for custom om json ", fjson)
# print("Looking for custom om json ", fjson)
if (os.path.isfile(fjson)):
print("Found local json file", fjson)
jfile = open(fjson)
Expand All @@ -117,7 +117,7 @@ def state_load_om_python(state, io_manager, siminfo):
hdf5_path = io_manager._input.file_path
(fbase, fext) = os.path.splitext(hdf5_path)
# see if there is a code module with custom python
print("Looking for custom om loader in python code ", (fbase + ".py"))
# print("Looking for custom om loader in python code ", (fbase + ".py"))
hsp2_local_py = state['hsp2_local_py']
# Load a function from code if it exists
if 'om_init_model' in dir(hsp2_local_py):
Expand Down Expand Up @@ -167,7 +167,7 @@ def state_om_model_run_prep(state, io_manager, siminfo):
# state['model_data'] has alread been prepopulated from json, .py files, hdf5, etc.
model_root_object = state['model_root_object']
model_loader_recursive(state['model_data'], model_root_object)
print("Loaded objects & paths: insures all paths are valid, connects models as inputs")
# print("Loaded objects & paths: insures all paths are valid, connects models as inputs")
# both state['model_object_cache'] and the model_object_cache property of the ModelObject class def
# will hold a global repo for this data this may be redundant? They DO point to the same datset?
# since this is a function that accepts state as an argument and these were both set in state_load_dynamics_om
Expand All @@ -177,7 +177,7 @@ def state_om_model_run_prep(state, io_manager, siminfo):
# len() will be 1 if we only have a simtimer, but > 1 if we have a river being added
model_exec_list = state['model_exec_list']
# put all objects in token form for fast runtime execution and sort according to dependency order
print("Tokenizing models")
# print("Tokenizing models")
if 'ops_data_type' in siminfo.keys():
model_root_object.ops_data_type = siminfo['ops_data_type'] # allow override of dat astructure settings
model_root_object.state['op_tokens'] = ModelObject.make_op_tokens(max(model_root_object.state['state_ix'].keys()) + 1)
Expand Down Expand Up @@ -206,9 +206,10 @@ def state_om_model_run_prep(state, io_manager, siminfo):
#print("state_ix is type", type(state['state_ix']))
#print("op_tokens final", op_tokens)

print("Operational model status:", state['state_step_om'])
#print("Operational model status:", state['state_step_om'])
if len(model_exec_list) > 0:
print("op_tokens has", len(op_tokens),"elements, with ", len(model_exec_list),"executable elements")
pass
#print("op_tokens has", len(op_tokens),"elements, with ", len(model_exec_list),"executable elements")
return

# model class reader
Expand Down
2 changes: 1 addition & 1 deletion src/hsp2/hsp2/state.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def state_siminfo_hsp2(uci_obj, siminfo):

def state_init_hsp2(state, opseq, activities):
# This sets up the state entries for all state compatible HSP2 model variables
print("STATE initializing contexts.")
# print("STATE initializing contexts.")
for _, operation, segment, delt in opseq.itertuples():
if operation != 'GENER' and operation != 'COPY':
for activity, function in activities[operation].items():
Expand Down
Binary file not shown.
10 changes: 0 additions & 10 deletions tests/instruction.txt

This file was deleted.

Loading

0 comments on commit 7a88a65

Please sign in to comment.