diff --git a/HSP2/HYDR.py b/HSP2/HYDR.py index 0ea0bb68..d972ccb6 100644 --- a/HSP2/HYDR.py +++ b/HSP2/HYDR.py @@ -20,6 +20,14 @@ from HSP2.utilities import initm, make_numba_dict from HSP2.state import * from HSP2.SPECL import specl +from HSP2.om import * +from HSP2.om_model_object import * +from HSP2.om_sim_timer import * +from HSP2.om_special_action import * +#from HSP2.om_equation import * +from HSP2.om_model_linkage import * +#from HSP2.om_data_matrix import * +#from HSP2.om_model_broadcast import * ERRMSGS =('HYDR: SOLVE equations are indeterminate', #ERRMSG0 @@ -123,10 +131,10 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): Olabels.append(f'O{i+1}') OVOLlabels.append(f'OVOL{i+1}') - # state_info is some generic things about the simulation + # state_info is some generic things about the simulation - must be numba safe, so we don't just pass the whole state which is not state_info = Dict.empty(key_type=types.unicode_type, value_type=types.unicode_type) state_info['operation'], state_info['segment'], state_info['activity'] = state['operation'], state['segment'], state['activity'] - state_info['domain'], state_info['state_step_hydr'] = state['domain'], state['state_step_hydr'] + state_info['domain'], state_info['state_step_hydr'], state_info['state_step_om'] = state['domain'], state['state_step_hydr'], state['state_step_om'] hsp2_local_py = state['hsp2_local_py'] # It appears necessary to load this here, instead of from main.py, otherwise, # _hydr_() does not recognize the function state_step_hydr()? @@ -140,15 +148,16 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): # initialize the hydr paths in case they don't already reside here hydr_init_ix(state_ix, state_paths, state['domain']) + ########################################################################### - # specactions - special actions code TBD + # OM - load the tokens to pass in. ########################################################################### - specactions = make_numba_dict(state['specactions']) # Note: all values coverted to float automatically - + op_tokens = state['op_tokens'] + ########################################################################### # Do the simulation with _hydr_() ########################################################################### - errors = _hydr_(ui, ts, COLIND, OUTDGT, rchtab, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, specactions, state_step_hydr) # run reaches simulation code + errors = _hydr_(ui, ts, COLIND, OUTDGT, rchtab, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens) # run reaches simulation code ########################################################################### if 'O' in ts: del ts['O'] @@ -163,7 +172,7 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): @njit(cache=True) -def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, specactions, state_step_hydr): +def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens): errors = zeros(int(ui['errlen'])).astype(int64) steps = int(ui['steps']) # number of simulation steps @@ -309,7 +318,6 @@ def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_inf # HYDR (except where noted) for step in range(steps): # call specl - specl(ui, ts, step, state_info, state_paths, state_ix, specactions) convf = CONVF[step] outdgt[:] = OUTDGT[step, :] colind[:] = COLIND[step, :] @@ -323,17 +331,34 @@ def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_inf state_ix[out_ix[oi]] = outdgt[oi] state_ix[vol_ix], state_ix[ivol_ix] = vol, IVOL0[step] state_ix[volev_ix] = volev + # ***************************************** # Execute dynamic code if enabled + # - these if statements may be irrelevant if default functions simply return + # when no objects are defined. + if (state_info['state_step_om'] == 'enabled'): + pre_step_model(op_tokens[0], op_tokens, state_ix, dict_ix, ts_ix, step) if (state_info['state_step_hydr'] == 'enabled'): state_step_hydr(state_info, state_paths, state_ix, dict_ix, ts_ix, hydr_ix, step) + # Execute dynamic code if enabled + if (state_info['state_step_om'] == 'enabled'): + #print("trying to execute state_step_om()") + # op_tokens[0] contains the model exec list. Later we may amend this + # perhaps even storing a domain specific exec list under domain/exec_list? + step_model(op_tokens[0], op_tokens, state_ix, dict_ix, ts_ix, step) + # Execute dynamic code if enabled + if ( (state_info['state_step_hydr'] == 'enabled') + or (state_info['state_step_om'] == 'enabled') + ): # Do write-backs for editable STATE variables # OUTDGT is writeable for oi in range(nexits): outdgt[oi] = state_ix[out_ix[oi]] - # IVOL is writeable. - # Note: we must convert IVOL to the units expected in _hydr_ + # IVOL is writeable. + # Note: we must convert IVOL to the units expected in _hydr_ # maybe routines should do this, and this is not needed (but pass VFACT in state) IVOL[step] = state_ix[ivol_ix] * VFACT + # End dynamic code step() + # ***************************************** # vols, sas variables and their initializations not needed. if irexit >= 0: # irrigation exit is set, zero based number if rirwdl > 0.0: # equivalent to OVOL for the irrigation exit diff --git a/HSP2/SPECL.py b/HSP2/SPECL.py index c7cf5e23..2f4345ec 100644 --- a/HSP2/SPECL.py +++ b/HSP2/SPECL.py @@ -1,6 +1,49 @@ ''' process special actions in this domain +Notes: + - code for parsing UCI SPEC-ACTIONS is in HSP2tools/readUCI.py + - code for object classes that transform parsed data into OP codes for OM and STATE support + is in this directory tree as om_special_[action type].py, + - Ex: om_special_action.py contains object support and runtime functions for classic ACTIONS +''' + +from numba import njit +from pandas import DataFrame, date_range +import h5py + +def specl_load_actions(state, io_manager, siminfo): + dc = state['specactions']['ACTIONS'] + #print(dc.index) + #print("speca entry 0:0", dc[0:0]) + #print("speca entry 0:1", dc[0:1]) + #print("speca entry 1:2", dc[1:2]) + #print("speca entry 0:", dc[0:]) + #print("speca entry 1:", dc[1:]) + #print("speca entry 1:1", dc[1:1]) + for ix in dc.index: + # add the items to the state['model_data'] dict + speca = dc[ix:(ix+1)] + # need to add a name attribute + opname = 'SPEC' + 'ACTION' + str(ix) + state['model_data'][opname] = {} + state['model_data'][opname]['name'] = opname + for ik in speca.keys(): + #print("looking for speca key ", ik) + state['model_data'][opname][ik] = speca.to_dict()[ik][ix] + state['model_data'][opname]['object_class'] = 'SpecialAction' + #print("model_data", ix, " = ", state['model_data'][opname]) + return -CALL: specl(io_manager, siminfo, uci, ts, state, specl_actions) +def state_load_dynamics_specl(state, io_manager, siminfo): + specl_load_actions(state, io_manager, siminfo) + # others defined below, like: + # specl_load_uvnames(state, io_manager, siminfo) + # ... + return + +''' +# the code specl() is deprecated in favor of execution inside OM +# see om_special_action.py for example of object support and runtime functions for classic ACTIONS +CALL: specl(ui, ts, step, state_info, state_paths, state_ix, specactions) store is the Pandas/PyTable open store siminfo is a dictionary with simulation level infor (OP_SEQUENCE for example) ui is a dictionary with RID specific HSPF UCI like data @@ -9,8 +52,6 @@ specl_actions is a dictionary with all SPEC-ACTIONS entries ''' -from numba import njit - @njit def specl(ui, ts, step, state_info, state_paths, state_ix, specactions): # ther eis no need for _specl_ because this code must already be njit diff --git a/HSP2/main.py b/HSP2/main.py index abd954ae..a21b9803 100644 --- a/HSP2/main.py +++ b/HSP2/main.py @@ -12,6 +12,8 @@ from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries from HSP2.configuration import activities, noop, expand_masslinks from HSP2.state import * +from HSP2.om import * +from HSP2.SPECL import * from HSP2IO.io import IOManager, SupportsReadTS, Category @@ -75,6 +77,10 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None hydr_init_ix(state['state_ix'], state['state_paths'], state['domain']) # - finally stash specactions in state, not domain (segment) dependent so do it once state['specactions'] = specactions # stash the specaction dict in state + state_load_dynamics_specl(state, io_manager, siminfo) + state_load_dynamics_om(state, io_manager, siminfo) + # finalize all dynamically loaded components and prepare to run the model + state_om_model_run_prep(state, io_manager, siminfo) ####################################################################################### # main processing loop msg(1, f'Simulation Start: {start}, Stop: {stop}') diff --git a/HSP2/om.py b/HSP2/om.py new file mode 100644 index 00000000..39a267e9 --- /dev/null +++ b/HSP2/om.py @@ -0,0 +1,474 @@ +# set up libraries to import for the load_sim_dicts function +# later, this will be drawing from the hdf5, but for now we +# are hard-wiring a set of components for testing. +# Note: these import calls must be done down here AFTER the helper functions +# defined aove that are called by the object classes +import random # this is only used for a demo so may be deprecated +import json +import requests +from requests.auth import HTTPBasicAuth +import csv +import pandas as pd +import numpy as np +import time +from numba.typed import Dict +from numpy import zeros +from numba import int8, float32, njit, types, typed # import the types +import random # this is only used for a demo so may be deprecated +from HSP2.state import * + + +def get_exec_order(model_exec_list, var_ix): + """ + Find the integer key of a variable name in state_ix + """ + model_exec_list = dict(enumerate(model_exec_list.flatten(), 1)) + for exec_order, ix in model_exec_list.items(): + if var_ix == ix: + # we need to add this to the state + return exec_order + return False + +def init_op_tokens(op_tokens, tops, eq_ix): + """ + Iinitialize the op_tokens Dict + This contains the runtime op code for every dynamic operation to be used + """ + for j in range(len(tops)): + if isinstance(tops[j], str): + # must add this to the state array as a constant + s_ix = append_state(state_ix, float(tops[j])) + tops[j] = s_ix + + op_tokens[eq_ix] = np.asarray(tops, dtype="i8") + +def is_float_digit(n: str) -> bool: + """ + Helper Function to determine if a variable is numeric + """ + try: + float(n) + return True + except ValueError: + return False + +# Import Code Classes +from HSP2.om_model_object import * +from HSP2.om_sim_timer import * +#from HSP2.om_equation import * +from HSP2.om_model_linkage import * +from HSP2.om_special_action import * +#from HSP2.om_data_matrix import * +#from HSP2.om_model_broadcast import * +#from HSP2.om_simple_channel import * +from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries + +def init_om_dicts(): + """ + The base dictionaries used to store model object info + """ + op_tokens = Dict.empty(key_type=types.int64, value_type=types.i8[:]) + model_object_cache = {} # this does not need to be a special Dict as it is not used in numba + return op_tokens, model_object_cache + + +def state_load_om_json(state, io_manager, siminfo): + # - model objects defined in file named '[model h5 base].json -- this will populate an array of object definitions that will + # be loadable by "model_loader_recursive()" + model_data = state['model_data'] + # JSON file would be in same path as hdf5 + hdf5_path = io_manager._input.file_path + (fbase, fext) = os.path.splitext(hdf5_path) + # see if there is custom json + fjson = fbase + ".json" + print("Looking for custom om json ", fjson) + if (os.path.isfile(fjson)): + print("Found local json file", fjson) + jfile = open(fjson) + json_data = json.load(jfile) + # dict.update() combines the arg dict with the base + model_data.update(json_data) + state['model_data'] = model_data + return + +def state_load_om_python(state, io_manager, siminfo): + # Look for a [hdf5 file base].py file with specific named functions + # - function "om_init_model": This function can be defined in the [model h5 base].py file containing things to be done + # early in the model loading, like setting up model objects. This file will already have been loaded by the state module, + # and will be present in the module variable hsp2_local_py (we should rename to state_local_py?) + # - this file may also contain other dynamically redefined functions such as state_step_hydr() + # which can contain code that is executed every timestep inside the _hydr_() function + # and can literally supply hooks for any desired user customizable code + hdf5_path = io_manager._input.file_path + (fbase, fext) = os.path.splitext(hdf5_path) + # see if there is a code module with custom python + print("Looking for custom om loader in python code ", (fbase + ".py")) + hsp2_local_py = state['hsp2_local_py'] + # Load a function from code if it exists + if 'om_init_model' in dir(hsp2_local_py): + hsp2_local_py.om_init_model(io_manager, siminfo, state['op_tokenModelObject.model_object_caches'], state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'], state['model_object_cache']) + + +def state_load_dynamics_om(state, io_manager, siminfo): + # this function will check to see if any of the multiple paths to loading + # dynamic operational model objects has been supplied for the model. + # Grab globals from state for easy handling + op_tokens, model_object_cache = init_om_dicts() + state_paths, state_ix, dict_ix, ts_ix = state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] + # set globals on ModelObject, this makes them persistent throughout all subsequent object instantiation and use + ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache = ( + op_tokens, state_paths, state_ix, dict_ix, model_object_cache + ) + state['op_tokens'], state['model_object_cache'] = op_tokens, model_object_cache + # load dynamic coding libraries if defined by user + # note: this used to be inside this function, I think that the loaded module should be no problem + # occuring within this function call, since this function is also called from another runtime engine + # but if things fail post develop-specact-1 pull requests we may investigate here + # also, it may be that this should be loaded elsewhere? + state_load_om_python(state, io_manager, siminfo) + state_load_om_json(state, io_manager, siminfo) + return + +def state_om_model_run_prep(state, io_manager, siminfo): + # Create the base that everything is added to. this object does nothing except host the rest. + model_root_object = ModelObject("") + # set up the timer as the first element + timer = SimTimer('timer', model_root_object, siminfo) + + # now instantiate and link objects + # state['model_data'] has alread been prepopulated from json, .py files, hdf5, etc. + model_loader_recursive(state['model_data'], model_root_object) + print("Loaded objects & paths: insures all paths are valid, connects models as inputs") + # both state['model_object_cache'] and the model_object_cache property of the ModelObject class def + # will hold a global repo for this data this may be redundant? They DO point to the same datset? + # since this is a function that accepts state as an argument and these were both set in state_load_dynamics_om + # we can assume they are there and functioning + model_object_cache = state['model_object_cache'] + op_tokens = state['op_tokens'] + model_path_loader(model_object_cache) + # len() will be 1 if we only have a simtimer, but > 1 if we have a river being added + model_exec_list = [] + # put all objects in token form for fast runtime execution and sort according to dependency order + print("Tokenizing models") + model_tokenizer_recursive(model_root_object, model_object_cache, model_exec_list) + # model_exec_list is the ordered list of component operations + print("model_exec_list:", model_exec_list) + # This is used to stash the model_exec_list -- is this used? + op_tokens[0] = np.asarray(model_exec_list, dtype="i8") + # the resulting set of objects is returned. + state['model_object_cache'] = model_object_cache + state['op_tokens'] = op_tokens + state['state_step_om'] = 'disabled' + if len(op_tokens) > 0: + state['state_step_om'] = 'enabled' + return + +# model class reader +# get model class to guess object type in this lib +# the parent object must be known +def model_class_loader(model_name, model_props, container = False): + # todo: check first to see if the model_name is an attribute on the container + # Use: if hasattr(container, model_name): + # if so, we set the value on the container, if not, we create a new subcomp on the container + if model_props == None: + return False + if type(model_props) is str: + if is_float_digit(model_props): + model_object = ModelConstant(model_name, container, float(model_props) ) + return model_object + else: + return False + elif type(model_props) is dict: + object_class = model_props.get('object_class') + if object_class == None: + # return as this is likely an attribute that is used for the containing class as attribute + # and is handled by the container + # todo: we may want to handle this here? Or should this be a method on the class? + # Use: if hasattr(container, model_name): + return False + model_object = False + # Note: this routine uses the ".get()" method of the dict class type + # for attributes to pass in. + # ".get()" will return NoValue if it does not exist or the value. + if object_class == 'Equation': + eqn = model_props.get('equation') + if type(eqn) is str: + eqn_str = eqn + else: + if eqn == None: + # try for equation stored as normal propcode + eqn_str = model_props.get('value') + else: + eqn_str = eqn.get('value') + if eqn_str == None: + raise Exception("Equation object", model_name, "does not have a valid equation string. Halting. ") + return False + model_object = Equation(model_props.get('name'), container, eqn_str ) + #remove_used_keys(model_props, + elif object_class == 'SimpleChannel': + model_object = SimpleChannel(model_props.get('name'), container, model_props ) + elif object_class == 'Constant': + model_object = ModelConstant(model_props.get('name'), container, model_props.get('value') ) + elif ( object_class.lower() == 'datamatrix'): + # add a matrix with the data, then add a matrix accessor for each required variable + has_props = DataMatrix.check_properties(model_props) + if has_props == False: + print("Matrix object must have", DataMatrix.required_properties()) + return False + # create it + model_object = DataMatrix(model_props.get('name'), container, model_props) + elif object_class == 'ModelBroadcast': + # add a matrix with the data, then add a matrix accessor for each required variable + #print("Loading ModelBroadcast class ") + has_props = ModelBroadcast.check_properties(model_props) + if has_props == False: + print("ModelBroadcast object must have", ModelBroadcast.required_properties()) + return False + # create it + model_object = ModelBroadcast(model_props.get('name'), container, model_props) + elif object_class == 'MicroWatershedModel': + # add a matrix with the data, then add a matrix accessor for each required variable + has_props = MicroWatershedModel.check_properties(model_props) + if has_props == False: + print("MicroWatershedModel object must have", MicroWatershedModel.required_properties()) + return False + # create it + model_object = DataMatrix(model_props.get('name'), container, model_props) + elif object_class == 'ModelLinkage': + model_object = ModelLinkage(model_props.get('name'), container, model_props) + elif object_class == 'SpecialAction': + model_object = SpecialAction(model_props.get('name'), container, model_props) + else: + print("Loading", model_props.get('name'), "with object_class", object_class,"as ModelObject") + model_object = ModelObject(model_props.get('name'), container) + # one way to insure no class attributes get parsed as sub-comps is: + # model_object.remove_used_keys() + if len(model_object.model_props_parsed) == 0: + # attach these to the object for posterity + model_object.model_props_parsed = model_props + # better yet to just NOT send those attributes as typed object_class arrays, instead just name : value + return model_object + +def model_class_translate(model_props, object_class): + # make adjustments to non-standard items + # this might better be moved to methods on the class handlers + if object_class == 'hydroImpoundment': + # special handling of matrix/storage_stage_area column + # we need to test to see if the storage table has been renamed + # make table from matrix or storage_stage_area + # then make accessors from + storage_stage_area = model_props.get('storage_stage_area') + matrix = model_props.get('matrix') + if ( (storage_stage_area == None) and (matrix != None)): + model_props['storage_stage_area'] = matrix + del model_props['matrix'] + if object_class == 'broadCastObject': + model_props['object_class'] = 'ModelBroadcast' + model_props['broadcast_channel'] = model_props['broadcast_class'] + if object_class == 'USGSChannelGeomObject_sub': + model_props['object_class'] = 'SimpleChannel' + print("Handling USGSChannelGeomObject_sub as SimpleChannel") + if object_class == 'hydroImpoundment': + model_props['object_class'] = 'SimpleImpoundment' + print("Handling hydroImpoundment as SimpleImpoundment") + if object_class == 'hydroImpSmall': + model_props['object_class'] = 'SimpleImpoundment' + print("Handling hydroImpSmall as SimpleImpoundment") + +def model_loader_recursive(model_data, container): + k_list = model_data.keys() + object_names = dict.fromkeys(k_list , 1) + if type(object_names) is not dict: + return False + for object_name in object_names: + #print("Handling", object_name) + if object_name in {'name', 'object_class', 'id', 'value', 'default'}: + # we should ask the class what properties are part of the class and also skips these + # therefore, we can assume that anything else must be a child object that needs to + # be handled first -- but how to do this? + continue + model_props = model_data[object_name] + if type(model_props) is not dict: + # this is a constant, the loader is built to handle this, but this causes errors with + # properties on the class that are expected so we just skip and trust that all constants + # are formally declared as type Constant + continue + if type(model_props) is dict: + if not ('object_class' in model_props): + # this is either a class attribute or an un-handleable meta-data + # if the class atttribute exists, we should pass it to container to load + #print("Skipping un-typed", object_name) + continue + #print("Translating", object_name) + # this is a kludge, but can be important + object_class = model_props['object_class'] + model_class_translate(model_props, object_class) + # now we either have a constant (key and value), or a + # fully defined object. Either one should work OK. + #print("Trying to load", object_name) + model_object = model_class_loader(object_name, model_props, container) + if model_object == False: + print("Could not load", object_name) + continue # not handled, but for now we will continue, tho later we should bail? + # now for container type objects, go through its properties and handle + if type(model_props) is dict: + model_loader_recursive(model_props, model_object) + +def model_path_loader(model_object_cache): + k_list = model_object_cache.keys() + model_names = dict.fromkeys(k_list , 1) + for model_name in model_names: + #print("Loading paths for", model_name) + model_object = model_object_cache[model_name] + model_object.find_paths() + + +def model_tokenizer_recursive(model_object, model_object_cache, model_exec_list, model_touch_list = []): + """ + Given a root model_object, trace the inputs to load things in order + Store this order in model_exec_list + Note: All ordering is as-needed organic, except Broadcasts + - read from children is completed after all other inputs + - read from parent is completed before all other inputs + - could this be accomplished by more sophisticated handling of read + broadcasts? + - When loading a read broadcast, can we iterate through items + that are sending to that broadcast? + - Or is it better to let it as it is, + """ + if model_object.ix in model_exec_list: + return + if model_object.ix in model_touch_list: + #print("Already touched", model_object.name, model_object.ix, model_object.state_path) + return + # record as having been called, and will ultimately return, to prevent recursions + model_touch_list.append(model_object.ix) + k_list = model_object.inputs.keys() + input_names = dict.fromkeys(k_list , 1) + if type(input_names) is not dict: + return + # isolate broadcasts, and sort out -- what happens if an equation references a broadcast var? + # is this a limitation of treating all children as inputs? + # alternative, leave broadcasts organic, but load children first? + # children first, then local sub-comps is old method? old method: + # - read parent broadcasts + # - get inputs (essentially, linked vars) + # - send child broadcasts (will send current step parent reads, last step local proc data) + # - execute children + # - execute local sub-comps + for input_name in input_names: + #print("Checking input", input_name) + input_path = model_object.inputs[input_name] + if input_path in model_object_cache.keys(): + input_object = model_object_cache[input_path] + model_tokenizer_recursive(input_object, model_object_cache, model_exec_list, model_touch_list) + else: + if input_path in model_object.state_paths.keys(): + # this is a valid state reference without an object + # thus, it is likely part of internals that are manually added + # which should be fine. tho perhaps we should have an object for these too. + continue + print("Problem loading input", input_name, "input_path", input_path, "not in model_object_cache.keys()") + return + # now after tokenizing all inputs this should be OK to tokenize + model_object.add_op_tokens() + model_exec_list.append(model_object.ix) + + +def save_object_ts(io_manager, siminfo, op_tokens, ts_ix, ts): + # Decide on using from utilities.py: + # - save_timeseries(io_manager, ts, savedict, siminfo, saveall, operation, segment, activity, compress=True) + # Or, skip the save_timeseries wrapper and call write_ts() directly in io.py: + # write_ts(self, data_frame:pd.DataFrame, save_columns: List[str], category:Category, operation:Union[str,None]=None, segment:Union[str,None]=None, activity:Union[str,None]=None) + # see line 317 in utilities.py for use example of write_ts() + x = 0 # dummy + return + +@njit +def iterate_models(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps): + checksum = 0.0 + for step in range(steps): + pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) + step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) + return checksum + +@njit +def pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + for i in model_exec_list: + if op_tokens[i][0] == 1: + pass + elif op_tokens[i][0] == 2: + pass + elif op_tokens[i][0] == 3: + pass + elif op_tokens[i][0] == 4: + pass + elif op_tokens[i][0] == 5: + pass + elif op_tokens[i][0] == 12: + # register type data (like broadcast accumulators) + # disabled till broadcasts are defined pre_step_register(op_tokens[i], state_ix, dict_ix) + pass + return + +@njit +def step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + val = 0 + for i in model_exec_list: + step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) + return + +@njit +def post_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + return + +@njit +def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): + # op_tokens is passed in for ops like matrices that have lookups from other + # locations. All others rely only on ops + # todo: decide if all step_[class() functions should set value in state_ix instead of returning value? + val = 0 + if debug == 1: + print("DEBUG: Operator ID", ops[1], "is op type", ops[0]) + if ops[0] == 1: + pass #state_ix[ops[1]] = step_equation(ops, state_ix) + elif ops[0] == 2: + # todo: this should be moved into a single function, + # with the conforming name step_matrix(op_tokens, ops, state_ix, dict_ix) + if (ops[1] == ops[2]): + if debug == 1: + print("DEBUG: Calling exec_tbl_values", ops) + # this insures a matrix with variables in it is up to date + # only need to do this if the matrix data and matrix config are on same object + # otherwise, the matrix data is an input and has already been evaluated + pass# state_ix[ops[1]] = exec_tbl_values(ops, state_ix, dict_ix) + if (ops[3] > 0): + # this evaluates a single value from a matrix if the matrix is configured to do so. + if debug == 1: + print("DEBUG: Calling exec_tbl_eval", ops) + pass# state_ix[ops[1]] = exec_tbl_eval(op_tokens, ops, state_ix, dict_ix) + elif ops[0] == 3: + step_model_link(ops, state_ix, ts_ix, step) + elif ops[0] == 4: + val = 0 + elif ops[0] == 5: + step_sim_timer(ops, state_ix, dict_ix, ts_ix, step) + elif ops[0] == 9: + val = 0 + elif ops[0] == 13: + pass #step_simple_channel(ops, state_ix, dict_ix, step) + # Op 100 is Basic ACTION in Special Actions + elif ops[0] == 100: + state_ix[ops[1]] = step_special_action(ops, state_ix, dict_ix, step) + return + + +@njit +def test_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + val = 0 + for i in model_exec_list: + print(i) + print(op_tokens[i][0]) + print(op_tokens[i]) + step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) + return diff --git a/HSP2/om_model_linkage.py b/HSP2/om_model_linkage.py new file mode 100644 index 00000000..b005743a --- /dev/null +++ b/HSP2/om_model_linkage.py @@ -0,0 +1,133 @@ +""" +The class ModelLinkage is used to translate copy data from one state location to another. +It is also used to make an implicit parent child link to insure that an object is loaded +during a model simulation. +""" +from HSP2.state import * +from HSP2.om import * +from HSP2.om_model_object import ModelObject +from numba import njit +class ModelLinkage(ModelObject): + def __init__(self, name, container = False, model_props = []): + super(ModelLinkage, self).__init__(name, container) + # ModelLinkage copies a values from right to left + # right_path: is the data source for the link + # left_path: is the destination of the link + # left_path: is implicit in types 1-3, i.e., the ModelLinkage object path itself is the left_path + # - left_path parameter is only needed for pushes (type 4 and 5) + # - the push is functionally equivalent to a pull whose path resolves to the specified left_path + # - but the push allows the potential for multiple objects to set a single state + # This can be dangerous or difficult to debug, but essential to replicate old HSPF behaviour + # especially in the case of If/Then type structures. + # it is also useful for the broadcast objects, see om_model_broadcast for those + # link_type: 1 - local parent-child, 2 - local property link (state data), 3 - remote linkage (ts data only), 4 - push to accumulator (like a hub), 5 - overwrite remote value + self.optype = 3 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - ModelLinkage, 4 - + if container == False: + # this is required + print("Error: a link must have a container object to serve as the destination") + return False + right_path = self.handle_prop(model_props, 'right_path') + link_type = self.handle_prop(model_props, 'link_type', False, 0) + left_path = self.handle_prop(model_props, 'left_path') + + if self.left_path == False: + # self.state_path gets set when creating at the parent level + self.left_path = self.state_path + # this breaks for some reason, doesn't like the input name being different than the variable path ending? + self.add_input(self.right_path, self.right_path) + + def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): + # parent method handles most cases, but subclass handles special situations. + prop_val = super().handle_prop(model_props, prop_name, strict, default_value) + if ( (prop_name == 'right_value') and (prop_val == None) or (prop_val == '')): + raise Exception("right_path cannot be empty. Object creation halted. Path to object with error is " + self.state_path) + return prop_val + + @staticmethod + def required_properties(): + # returns a list or minimum properties to create. + # see ModelConstant below for how to call this in a sub-class + # note: + # req_props = super(DataMatrix, DataMatrix).required_properties() + req_props = ['name', 'right_path'] + return req_props + + def find_paths(self): + # this should be needed if this is a PUSH link_type = 4 or 5 + super().find_paths() + self.paths_found = False # override parent setting until we verify everything + # do we need to do this, or just trust it exists? + #self.insure_path(self, self.right_path) + # the left path, if this is type 4 or 5, is a push, so we must require it + if ( (self.link_type == 4) or (self.link_type == 5) ): + self.insure_path(self.left_path) + self.paths_found = True + return + + def tokenize(self): + super().tokenize() + # - if this is a data property link then we add op codes to do a copy of data from one state address to another + # - if this is simply a parent-child connection, we do not render op-codes, but we do use this for assigning + # - execution hierarchy + if self.link_type in (2, 3): + src_ix = get_state_ix(self.state_ix, self.state_paths, self.right_path) + if not (src_ix == False): + self.ops = self.ops + [src_ix, self.link_type] + else: + print("Error: link ", self.name, "does not have a valid source path") + #print("tokenize() result", self.ops) + if (self.link_type == 4) or (self.link_type == 5): + # we push to the remote path in this one + left_ix = get_state_ix(self.state_ix, self.state_paths, self.left_path) + right_ix = get_state_ix(self.state_ix, self.state_paths, self.right_path) + if (left_ix != False) and (right_ix != False): + self.ops = self.ops + [left_ix, self.link_type, right_ix] + else: + print("Error: link ", self.name, "does not have valid paths", "(left = ", self.left_path, left_ix, "right = ", self.right_path, right_ix, ")") + #print("tokenize() result", self.ops) + +# Function for use during model simulations of tokenized objects +@njit +def step_model_link(op_token, state_ix, ts_ix, step): + if step == 2: + print("step_model_link() called at step 2 with op_token=", op_token) + if op_token[3] == 1: + return True + elif op_token[3] == 2: + state_ix[op_token[1]] = state_ix[op_token[2]] + elif op_token[3] == 3: + # read from ts variable TBD + # state_ix[op_token[1]] = ts_ix[op_token[2]][step] + return True + elif op_token[3] == 4: + # add value in local state to the remote broadcast hub+register state + state_ix[op_token[2]] = state_ix[op_token[2]] + state_ix[op_token[4]] + return True + elif op_token[3] == 5: + # overwrite remote variable state with value in another paths state + if step == 2: + print("Setting state_ix[", op_token[2], "] =", state_ix[op_token[4]]) + state_ix[op_token[2]] = state_ix[op_token[4]] + return True + + +def test_model_link(op_token, state_ix, ts_ix, step): + if op_token[3] == 1: + return True + elif op_token[3] == 2: + state_ix[op_token[1]] = state_ix[op_token[2]] + elif op_token[3] == 3: + # read from ts variable TBD + # state_ix[op_token[1]] = ts_ix[op_token[2]][step] + return True + elif op_token[3] == 4: + print("Remote Broadcast accumulator type link.") + print("Setting op ID", str(op_token[2]), "to value from ID", str(op_token[4]), "with value of ") + # add value in local state to the remote broadcast hub+register state + state_ix[op_token[2]] = state_ix[op_token[2]] + state_ix[op_token[4]] + print(str(state_ix[op_token[2]]) + " = ", str(state_ix[op_token[2]]) + "+" + str(state_ix[op_token[4]])) + return True + elif op_token[3] == 5: + # push value in local state to the remote broadcast hub+register state + state_ix[op_token[2]] = state_ix[op_token[4]] + return True \ No newline at end of file diff --git a/HSP2/om_model_object.py b/HSP2/om_model_object.py new file mode 100644 index 00000000..999ee419 --- /dev/null +++ b/HSP2/om_model_object.py @@ -0,0 +1,323 @@ +""" +The class ModelObject is the base class upon which all other dynamic model objects are built on. +It handles all Dict management functions, but provides for no runtime execution of it's own. +All runtime exec is done by child classes. +""" +from HSP2.state import * +from HSP2.om import * +from pandas import Series, DataFrame, concat, HDFStore, set_option, to_numeric +from pandas import Timestamp, Timedelta, read_hdf, read_csv + +class ModelObject: + state_ix = {} # Shared Dict with the numerical state of each object + state_paths = {} # Shared Dict with the hdf5 path of each object + dict_ix = {} # Shared Dict with the hdf5 path of each object + ts_ix = {} # Shared Dict with the hdf5 path of each object + op_tokens = {} # Shared Dict with the tokenized representation of each object + model_object_cache = {} # Shared with actual objects, keyed by their path + model_exec_list = {} # Shared with actual objects, keyed by their path + + def __init__(self, name, container = False, model_props = []): + self.name = name + self.container = container # will be a link to another object + self.log_path = "" # Ex: "/RESULTS/RCHRES_001/SPECL" + self.attribute_path = "" # + self.model_props_parsed = {} # a place to stash parse record for debugging + if (hasattr(self,'state_path') == False): + # if the state_path has already been set, we accept it. + # this allows sub-classes to override the standard path guessing approach. + self.state_path = "" # Ex: "/STATE/RCHRES_001" # the pointer to this object state + self.inputs = {} # associative array with key=local_variable_name, value=hdf5_path Ex: [ 'Qin' : '/STATE/RCHRES_001/IVOL' ] + self.inputs_ix = {} # associative array with key=local_variable_name, value=state_ix integer key + self.ix = False + self.paths_found = False # this should be False at start + self.default_value = 0.0 + self.ops = [] + self.optype = 0 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - input/ModelLinkage, 4 - broadcastChannel, 5 - SimTimer, 6 - Conditional, 7 - ModelConstant (numeric), 8 - matrix accessor, 9 - MicroWatershedModel, 10 - MicroWatershedNetwork, 11 - ModelTimeseries, 12 - ModelRegister, 13 - SimpleChannel, 14 - SimpleImpoundment + self.register_path() + self.parse_model_props(model_props) + + @staticmethod + def required_properties(): + # returns a list or minimum properties to create. + # see ModelConstant below for how to call this in a sub-class + # note: + # req_props = super(DataMatrix, DataMatrix).required_properties() + req_props = ['name'] + return req_props + + @classmethod + def check_properties(cls, model_props): + # this is for pre-screening properties for validity in model creation routines + # returns True or False and can be as simple as checking the list of required_properties + # or a more detailed examination of suitability of what those properties contain + req_props = cls.required_properties() + matching_props = set(model_props).intersection(set(req_props)) + if len(matching_props) < len(req_props): + return False + return True + + def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): + # this checks to see if the prop is in dict with value form, or just a value + # strict = True causes an exception if property is missing from model_props dict + prop_val = model_props.get(prop_name) + if type(prop_val) == list: + prop_val = prop_val.get('value') + elif type(prop_val) == dict: + prop_val = prop_val.get('value') + if strict and (prop_val == None): + raise Exception("Cannot find property " + prop_name + " in properties passed to "+ self.name + " and strict = True. Object creation halted. Path to object with error is " + self.state_path) + return prop_val + + def parse_model_props(self, model_props, strict = False ): + # sub-classes will allow an create argument "model_props" and handle them here. + # see also: handle_prop(), which will be called y parse_model_props + # for all attributes supported by the class + self.model_props_parsed = model_props + return True + + def set_state(self, set_value): + var_ix = set_state(self.state_ix, self.state_paths, self.state_path, set_value) + return var_ix + + def load_state_dicts(self, op_tokens, state_paths, state_ix, dict_ix): + self.op_tokens = op_tokens + self.state_paths = state_paths + self.state_ix = state_ix + self.dict_ix = dict_ix + + def save_object_hdf(self, hdfname, overwrite = False ): + # save the object in the full hdf5 path + # if overwrite = True replace this and all children, otherwise, just save this. + # note: "with" statement helps prevent unclosed resources, see: https://www.geeksforgeeks.org/with-statement-in-python/ + with HDFStore(hdfname, mode = 'a') as store: + dummy_var = True + + def make_paths(self, base_path = False): + if base_path == False: # we are NOT forcing paths + if not (self.container == False): + self.state_path = self.container.state_path + "/" + self.name + self.attribute_path = self.container.attribute_path + "/" + self.name + elif self.name == "": + self.state_path = "/STATE" + self.attribute_path = "/OBJECTS" + else: + self.state_path = "/STATE/" + self.name + self.attribute_path = "/OBJECTS/" + self.name + else: + # base_path is a Dict with state_path and attribute_path set + self.state_path = base_path['STATE'] + self.name + self.attribute_path = base_path['OBJECTS'] + self.name + return self.state_path + + def get_state(self, var_name = False): + if var_name == False: + return self.state_ix[self.ix] + else: + var_path = self.find_var_path(var_name) + var_ix = get_state_ix(self.state_ix, self.state_paths, var_path) + if (var_ix == False): + return False + return self.state_ix[var_ix] + + def get_exec_order(self, var_name = False): + if var_name == False: + var_ix = self.ix + else: + var_path = self.find_var_path(var_name) + var_ix = get_state_ix(self.state_ix, self.state_paths, var_path) + exec_order = get_exec_order(self.model_exec_list,var_ix) + return exec_order + + def get_object(self, var_name = False): + if var_name == False: + return self.model_object_cache[self.state_path] + else: + var_path = self.find_var_path(var_name) + return self.model_object_cache[var_path] + + + def find_var_path(self, var_name, local_only = False): + # check local inputs for name + if var_name in self.inputs.keys(): + return self.inputs[var_name] + if local_only: + return False # we are limiting the scope, so just return + # check parent for name + if not (self.container == False): + return self.container.find_var_path(var_name) + # check for root state vars STATE + var_name + if ("/STATE/" + var_name) in self.state_paths.keys(): + #return self.state_paths[("/STATE/" + var_name)] + return ("/STATE/" + var_name) + # check for root state vars + if var_name in self.state_paths.keys(): + #return self.state_paths[var_name] + return var_name + return False + + def constant_or_path(self, keyname, keyval, trust = False): + if is_float_digit(keyval): + # we are given a constant value, not a variable reference + k = ModelConstant(keyname, self, float(keyval)) + kix = k.ix + else: + kix = self.add_input(keyname, keyval, 2, trust) + return kix + + def register_path(self): + # initialize the path variable if not already set + if self.state_path == '': + self.make_paths() + self.ix = set_state(self.state_ix, self.state_paths, self.state_path, self.default_value) + # store object in model_object_cache + if not (self.state_path in self.model_object_cache.keys()): + self.model_object_cache[self.state_path] = self + # this should check to see if this object has a parent, and if so, register the name on the parent + # default is as a child object. + if not (self.container == False): + # since this is a request to actually create a new path, we instruct trust = True as last argument + return self.container.add_input(self.name, self.state_path, 1, True) + return self.ix + + def add_input(self, var_name, var_path, input_type = 1, trust = False): + # this will add to the inputs, but also insure that this + # requested path gets added to the state/exec stack via an input object if it does + # not already exist. + # - var_name = the local name for this linked entity/attribute + # - var_path = the full path of the entity/attribute we are linking to + # - input types: 1: parent-child link, 2: state property link, 3: timeseries object property link + # - trust = False means fail if the path does not already exist, True means assume it will be OK which is bad policy, except for the case where the path points to an existing location + # do we have a path here already or can we find on the parent? + # how do we check if this is a path already, in which case we trust it? + # todo: we should be able to alias a var_name to a var_path, for example + # calling add_input('movar', 'month', 1, True) + # this *should* search for month and find the STATE/month variable + # BUT this only works if both var_name and var_path are month + # so add_input('month', 'month', 1, True) works. + found_path = self.find_var_path(var_path) + #print("Searched", var_name, "with path", var_path,"found", found_path) + var_ix = get_state_ix(self.state_ix, self.state_paths, found_path) + if var_ix == False: + if (trust == False): + raise Exception("Cannot find variable path: " + var_path + " when adding input to object " + self.name + " as input named " + var_name + " ... process terminated. Path to object with error is " + self.state_path) + var_ix = self.insure_path(var_path) + else: + # if we are to trust the path, this might be a child property just added, + # and therefore, we don't look further than this + # otherwise, we use found_path, whichever it is, as + # we know that this path is better, as we may have been given a simple variable name + # and so found_path will look more like /STATE/RCHRES_001/... + if trust == False: + var_path = found_path + self.inputs[var_name] = var_path + self.inputs_ix[var_name] = var_ix + return self.inputs_ix[var_name] + + def add_object_input(self, var_name, var_object, link_type = 1): + # See above for details. + # this adds an object as a link to another object + self.inputs[var_name] = var_object.state_path + self.inputs_ix[var_name] = var_object.ix + return self.inputs_ix[var_name] + + def create_parent_var(self, parent_var_name, source_object): + # see decision points: https://github.com/HARPgroup/HSPsquared/issues/78 + # This is used when an object sets an additional property on its parent + # Like in simple_channel sets [channel prop name]_Qout on its parent + # Generally, this should have 2 components. + # 1 - a state variable on the child (this could be an implicit sub-comp, or a constant sub-comp, the child handles the setup of this) see constant_or_path() + # 2 - an input link + self.container.add_object_input(parent_var_name, source_object, 1) + + def insure_path(self, var_path): + # if this path can be found in the hdf5 make sure that it is registered in state + # and that it has needed object class to render it at runtime (some are automatic) + # RIGHT NOW THIS DOES NOTHING TO CHECK IF THE VAR EXISTS THIS MUST BE FIXED + var_ix = set_state(self.state_ix, self.state_paths, var_path, 0.0) + return var_ix + + def get_dict_state(self, ix = -1): + if ix >= 0: + return self.dict_ix[ix] + return self.dict_ix[self.ix] + + def find_paths(self): + # Note: every single piece of data used by objects, even constants, are resolved to a PATH in the hdf5 + # find_paths() is called to insure that all of these can be found, and then, are added to inputs/inputs_ix + # - We wait to find the index values for those variables after all things have been loaded + # - base ModelObject does not have any "implicit" inputs, since all of its inputs are + # explicitly added children objects, thus we default to True + self.paths_found = True + # - But children such as Equation and DataMatrix, etc + # so they mark paths_found = False and then + # should go through their own locally defined data + # and call add_input() for any data variables encountered + # - add_input() will handle searching for the paths and ix values + # and should also handle deciding if this is a constant, like a numeric value + # or a variable data and should handle them accordingly + return True + + def tokenize(self): + # renders tokens for high speed execution + if (self.paths_found == False): + raise Exception("path_found False for object" + self.name + "(" + self.state_path + "). " + "Tokens cannot be generated until method '.find_paths()' is run for all model objects ... process terminated. (see function `model_path_loader(model_object_cache)`)") + self.ops = [self.optype, self.ix] + + def add_op_tokens(self): + # this puts the tokens into the global simulation queue + # can be customized by subclasses to add multiple lines if needed. + if self.ops == []: + self.tokenize() + #print(self.name, "tokens", self.ops) + self.op_tokens[self.ix] = np.asarray(self.ops, dtype="i8") + + def step(self, step): + # this tests the model for a single timestep. + # this is not the method that is used for high-speed runs, but can theoretically be used for + # easier to understand demonstrations + step_one(self.op_tokens, self.op_tokens[self.ix], self.state_ix, self.dict_ix, self.ts_ix, step) + #step_model({self.op_tokens[self.ix]}, self.state_ix, self.dict_ix, self.ts_ix, step) + + def dddstep_model(op_tokens, state_ix, dict_ix, ts_ix, step): + for i in op_tokens.keys(): + if op_tokens[i][0] == 1: + state_ix[i] = step_equation(op_tokens[i], state_ix) + elif op_tokens[i][0] == 2: + state_ix[i] = exec_tbl_eval(op_tokens[i], state_ix, dict_ix) + elif op_tokens[i][0] == 3: + step_model_link(op_tokens[i], state_ix, ts_ix, step) + elif op_tokens[i][0] == 4: + return False + elif op_tokens[i][0] == 5: + step_sim_timer(op_tokens[i], state_ix, dict_ix, ts_ix, step) + return + +""" +The class ModelConstant is for storing constants. It must be loaded here because ModelObject calls it. +Is this useful or just clutter? Useful I think since there are numerical constants... +""" +class ModelConstant(ModelObject): + def __init__(self, name, container = False, value = 0.0, state_path = False): + if (state_path != False): + # this allows us to mandate the location. useful for placeholders, broadcasts, etc. + self.state_path = state_path + super(ModelConstant, self).__init__(name, container) + self.default_value = float(value) + self.optype = 7 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - input, 4 - broadcastChannel, 5 - SimTimer, 6 - Conditional, 7 - ModelConstant (numeric) + #print("ModelConstant named",self.name, "with path", self.state_path,"and ix", self.ix, "value", value) + var_ix = self.set_state(float(value)) + self.paths_found = True + # self.state_ix[self.ix] = self.default_value + + def required_properties(): + req_props = super(ModelConstant, ModelConstant).required_properties() + req_props.extend(['value']) + return req_props + +# njit functions for runtime + +@njit +def exec_model_object( op, state_ix, dict_ix): + ix = op[1] + return 0.0 \ No newline at end of file diff --git a/HSP2/om_sim_timer.py b/HSP2/om_sim_timer.py new file mode 100644 index 00000000..33056fa6 --- /dev/null +++ b/HSP2/om_sim_timer.py @@ -0,0 +1,84 @@ +""" +The class SimTimer is used to translate copy data from one state location to another. +It is also used to make an implicit parent child link to insure that an object is loaded +during a model simulation. +""" +from HSP2.state import * +from HSP2.om import * +from HSP2.om_model_object import ModelObject +from pandas import DataFrame, DatetimeIndex +from numba import njit + +class SimTimer(ModelObject): + def __init__(self, name, container, siminfo): + super(SimTimer, self).__init__(name, container) + self.state_path = '/STATE/timer' + self.time_array = self.dti_to_time_array(siminfo) # creates numpy formatted array of year, mo, day, ... for each timestep + self.date_path_ix = [] # where are the are components stored in the state_ix Dict + self.optype = 5 # 0 - ModelObject, 1 - Equation, 2 - datamatrix, 3 - ModelLinkage, 4 - BroadcastChannel, 5 - SimTimer + self.register_components() # now that we have a time array, we set the basic state and all time comps into state + + def register_components(self): + # initialize the path variable if not already set + self.ix = set_state(self.state_ix, self.state_paths, self.state_path, float(self.time_array[0][0])) + # now register all other paths. + # register "year", "month" "day", ... + year_ix = set_state(self.state_ix, self.state_paths, "/STATE/year", float(self.time_array[0][1])) + month_ix = set_state(self.state_ix, self.state_paths, "/STATE/month", float(self.time_array[0][2])) + day_ix = set_state(self.state_ix, self.state_paths, "/STATE/day", float(self.time_array[0][3])) + hr_ix = set_state(self.state_ix, self.state_paths, "/STATE/hour", float(self.time_array[0][4])) + min_ix = set_state(self.state_ix, self.state_paths, "/STATE/minute", float(self.time_array[0][5])) + sec_ix = set_state(self.state_ix, self.state_paths, "/STATE/second", float(self.time_array[0][6])) + wd_ix = set_state(self.state_ix, self.state_paths, "/STATE/weekday", float(self.time_array[0][7])) + dt_ix = set_state(self.state_ix, self.state_paths, "/STATE/dt", float(self.time_array[0][8])) + jd_ix = set_state(self.state_ix, self.state_paths, "/STATE/jday", float(self.time_array[0][9])) + md_ix = set_state(self.state_ix, self.state_paths, "/STATE/modays", float(self.time_array[0][10])) + dts_ix = set_state(self.state_ix, self.state_paths, "/STATE/dts", float(self.time_array[0][8] * 60.0)) + self.date_path_ix = [year_ix, month_ix, day_ix, hr_ix, min_ix, sec_ix, wd_ix, dt_ix, jd_ix, md_ix, dts_ix] + self.dict_ix[self.ix] = self.time_array + + return self.ix + + def tokenize(self): + # call parent method which sets standard ops + # returns an array of data pointers + super().tokenize() # resets ops to common base + self.ops = self.ops + self.date_path_ix # adds timer specific items + + def add_op_tokens(self): + # this puts the tokens into the global simulation queue + # can be customized by subclasses to add multiple lines if needed. + #self.op_tokens[self.ix] = self.ops + super().add_op_tokens() + self.dict_ix[self.ix] = self.time_array + + def dti_to_time_array(self, siminfo): + dateindex = siminfo['tindex'] + dt = siminfo['delt'] + # sim timer is special, one entry for each time component for each timestep + # convert DateIndex to numbers [int(i) for i in dateindex.year] + tdi = { 0: dateindex.astype(np.int64), 1:[float(i) for i in dateindex.year], 2:[float(i) for i in dateindex.month], 3:[float(i) for i in dateindex.day], 4:[float(i) for i in dateindex.hour], 5:[float(i) for i in dateindex.minute], 6:[float(i) for i in dateindex.second], 7:[float(i) for i in dateindex.weekday], 8:[float(dt) for i in dateindex], 9:[float(i) for i in dateindex.day_of_year], 10:[float(i) for i in dateindex.daysinmonth], 11:[float(dt * 60.0) for i in dateindex] } + #tdi = { 0:dateindex.year, 1:dateindex.month, 2:dateindex.day, 3:dateindex.hour, 4:dateindex.minute, 5:dateindex.second } + tid = DataFrame(tdi) + h = 1 # added to increase row count for debug testing. + time_array = tid.to_numpy() + return time_array + +# Function for use during model simulations of tokenized objects +@njit +def step_sim_timer(op_token, state_ix, dict_ix, ts_ix, step): + # note: the op_token and state index are off by 1 since the dict_ix does not store type + #print("Exec step_sim_timer at step:", step, "jday", dict_ix[op_token[1]][step][9] ) + state_ix[op_token[1]] = dict_ix[op_token[1]][step][0] # unix timestamp here + state_ix[op_token[2]] = dict_ix[op_token[1]][step][1] # year + state_ix[op_token[3]] = dict_ix[op_token[1]][step][2] # month + state_ix[op_token[4]] = dict_ix[op_token[1]][step][3] # day + state_ix[op_token[5]] = dict_ix[op_token[1]][step][4] # hour + state_ix[op_token[6]] = dict_ix[op_token[1]][step][5] # minute + state_ix[op_token[7]] = dict_ix[op_token[1]][step][6] # second + state_ix[op_token[8]] = dict_ix[op_token[1]][step][7] # weekday + state_ix[op_token[9]] = dict_ix[op_token[1]][step][8] # dt + state_ix[op_token[10]] = dict_ix[op_token[1]][step][9] # julian day + state_ix[op_token[11]] = dict_ix[op_token[1]][step][10] # modays + state_ix[op_token[12]] = dict_ix[op_token[1]][step][11] # dts + return diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py new file mode 100644 index 00000000..4348946a --- /dev/null +++ b/HSP2/om_special_action.py @@ -0,0 +1,134 @@ +""" +The class SpecialAction is used to support original HSPF ACTIONS. +""" +from HSP2.state import * +from HSP2.om import * +from HSP2.om_model_object import ModelObject +from numba import njit +class SpecialAction(ModelObject): + def __init__(self, name, container = False, model_props = []): + super(SpecialAction, self).__init__(name, container, model_props) + + self.optype = 100 # Special Actions start indexing at 100 + + def parse_model_props(self, model_props, strict=False): + print("SpecialAction.parse_model_props() called") + # comes in as row from special ACTIONS table + # ex: { + # 'OPTYP': 'RCHRES', 'RANGE1': '1', 'RANGE2': '', 'DC': 'DY', 'DS': '', + # 'YR': '1986', 'MO': '3', 'DA': '1', 'HR': '12', 'MN': '', + # 'D': '2', 'T': 3, 'VARI': 'IVOL', 'S1': '', 'S2': '', + # 'AC': '+=', 'VALUE': 30.0, 'TC': '', 'TS': '', 'NUM': '', 'CURLVL': 0, + # defined by: + # - operand1, i.e. variable to access + update, path = /STATE/[OPTYP]_[op_abbrev][RANGE1]/[VARI] + # - action(operation) to perform = AC + # - operand2, a numeric value for simple ACTION = [VALUE] + # note: [op_abbrev] is *maybe* the first letter of the OPTYP? Not a very good idea to have a coded convention like that + self.op_type = self.handle_prop(model_props, 'OPTYP') + self.range1 = self.handle_prop(model_props, 'RANGE1') + self.range2 = self.handle_prop(model_props, 'RANGE2') + self.ac = self.handle_prop(model_props, 'AC') # must handle this before we handle the operand to check for DIV by Zero + self.vari = self.handle_prop(model_props, 'VARI') + self.op2_val = self.handle_prop(model_props, 'VALUE') + self.op2_ix = self.constant_or_path('op_val', self.op2_val) # constant values must be added to STATE and thus are referenced by their state_ix number + # now add the state value that we are operating on (the target) as an input, so that this gets executed AFTER this is set initially + self.add_input('op1', ('/STATE/' + self.op_type + '_' + self.op_type[0] + str(self.range1).zfill(3) + "/" + self.vari ), 2, True ) + # @tbd: support time enable/disable + # - check if time ops have been set and add as inputs like "year", or "month", etc could give explicit path /STATE/year ... + # - add the time values to match as constants i.e. self.constant_or_path() + + def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): + # Insure all values are legal ex: no DIV by Zero + prop_val = super().handle_prop(model_props, prop_name, strict, default_value ) + if (prop_name == 'VALUE') and (self.ac == '/='): + if (prop_val == 0) or (prop_val == None): + raise Exception("Error: in properties passed to "+ self.name + " AC must be non-zero or non-Null . Object creation halted. Path to object with error is " + self.state_path) + if (prop_name == 'AC'): + self.handle_ac(prop_val) + return prop_val + + def handle_ac(self, ac): + # cop_code 0: =/eq, 1: /gt, 3: <=/le, 4: >=/ge, 5: <>/ne + cop_codes = { + '=': 1, + '+=': 2, + '-=': 3, + '*=': 4, + '/=': 5, + 'MIN': 6 + } + # From HSPF UCI docs: + # 1 = T= A + # 2 += T= T+ A + # 3 -= T= T- A + # 4 *= T= T*A + # 5 /= T= T/A + # 6 MIN T= Min(T,A) + # 7 MAX T= Max(T,A) + # 8 ABS T= Abs(A) + # 9 INT T= Int(A) + # 10 ^= T= T^A + # 11 LN T= Ln(A) + # 12 LOG T= Log10(A) + # 13 MOD T= Mod(T,A) + if !is_float_digit(self.ac): + if !(self in cop_codes.values()) + raise Exception("Error: in "+ self.name + " AC (" + self.ac + ") not supported. Object creation halted. Path to object with error is " + self.state_path) + ac = self.ac + else: + # this will fail catastrophically if the requested function is not supported + # which is a good thing + ac = cop_codes[self.ac] + self.opid = ac + + def tokenize(self): + # call parent method to set basic ops common to all + super().tokenize() # sets self.ops = op_type, op_ix + self.ops = self.ops + [self.inputs_ix['op1'], self.opid, self.op2_ix] + # @tbd: check if time ops have been set and tokenize accordingly + print("Specl", self.name, "tokens", self.ops) + + def add_op_tokens(self): + # this puts the tokens into the global simulation queue + # can be customized by subclasses to add multiple lines if needed. + super().add_op_tokens() + + @staticmethod + def hdf5_load_all(hdf_source): + specla=hdf_source['/SPEC_ACTIONS/ACTIONS/table'] + for idx, x in np.ndenumerate(specla): + print(x[1].decode("utf-8"),x[2].decode("utf-8"), x[13].decode("utf-8"), x[16].decode("utf-8"), x[17]) + + +# njit functions for runtime + +@njit +def step_special_action(op, state_ix, dict_ix, step): + ix = op[1] # ID of this op + # these indices must be adjusted to reflect the number of common op tokens + # SpecialAction has: + # - type of condition (+=, -=, ...) + # - operand 1 (left side) + # - operand 2 (right side) + # @tbd: check if time ops have been set and enable/disable accordingly + # - 2 ops will be added for each time matching switch, the state_ix of the time element (year, month, ...) and the state_ix of the constant to match + # - matching should be as simple as if (state_ix[tix1] <> state_ix[vtix1]): return state_ix[ix1] (don't modify the value) + # + ix1 = op[2] # ID of source of data and destination of data + sop = op[3] + ix2 = op[4] + if sop == 1: + result = state_ix[ix2] + if sop == 2: + result = state_ix[ix1] + state_ix[ix2] + if sop == 3: + result = state_ix[ix1] - state_ix[ix2] + if sop == 4: + result = state_ix[ix1] * state_ix[ix2] + if sop == 5: + result = state_ix[ix1] / state_ix[ix2] + # set value in target + # tbd: handle this with a model linkage? cons: this makes a loop since the ix1 is source and destination + state_ix[ix1] = result + return result + diff --git a/HSP2/state.py b/HSP2/state.py index 67ac24a0..fa19c15e 100644 --- a/HSP2/state.py +++ b/HSP2/state.py @@ -23,6 +23,8 @@ def init_state_dicts(): # initialize state for hydr # now put all of these Dicts into the state Dict state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] = state_paths, state_ix, dict_ix, ts_ix + # add a generic place to stash model_data for dynamic components + state['model_data'] = {} return state @@ -117,6 +119,7 @@ def append_state(state_ix, var_value): return val_ix def state_context_hsp2(state, operation, segment, activity): + # this establishes domain info so that a module can know its paths state['operation'] = operation state['segment'] = segment # state['activity'] = activity @@ -130,6 +133,10 @@ def state_siminfo_hsp2(uci_obj, siminfo): siminfo['tindex'] = date_range(siminfo['start'], siminfo['stop'], freq=Minute(delt))[1:] siminfo['steps'] = len(siminfo['tindex']) +def state_load_hdf5_components(io_manager, siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache): + # Implement population of model_object_cache etc from components in a hdf5 such as Special ACTIONS + return + def state_load_dynamics_hsp2(state, io_manager, siminfo): # Load any dynamic components if present, and store variables on objects hsp2_local_py = load_dynamics(io_manager, siminfo) @@ -200,4 +207,5 @@ def load_dynamics(io_manager, siminfo): else: # print("state_step_hydr function not defined. Using default") return False - return hsp2_local_py \ No newline at end of file + return hsp2_local_py + diff --git a/tests/testcbp/HSP2results/example_manual_object.py b/tests/testcbp/HSP2results/example_manual_object.py new file mode 100644 index 00000000..021c10a8 --- /dev/null +++ b/tests/testcbp/HSP2results/example_manual_object.py @@ -0,0 +1,62 @@ +# this is a code remnant that lays out a manually created set of objects +# in order to use this appropriate libs must be loaded but this does not yet do so + + +# now add a simple table +data_table = np.asarray([ [ 0.0, 5.0, 10.0], [10.0, 15.0, 20.0], [20.0, 25.0, 30.0], [30.0, 35.0, 40.0] ], dtype= "float32") +dm = DataMatrix('dm', river, data_table) +dm.add_op_tokens() +# 2d lookup +dma = DataMatrixLookup('dma', river, dm.state_path, 2, 17.5, 1, 6.8, 1, 0.0) +dma.add_op_tokens() +# 1.5d lookup +#dma = DataMatrixLookup('dma', river, dm.state_path, 3, 17.5, 1, 1, 1, 0.0) +#dma.add_op_tokens() + +facility = ModelObject('facility', river) + +Qintake = Equation('Qintake', facility, "Qin * 1.0") +Qintake.add_op_tokens() +# a flowby +flowby = Equation('flowby', facility, "Qintake * 0.9") +flowby.add_op_tokens() +# add a withdrawal equation +# we use "3.0 + 0.0" because the equation parser fails on a single factor (number of variable) +# so we have to tweak that. However, we need to handle constants separately, and also if we see a +# single variable equation (such as Qup = Qhydr) we need to rewrite that to a input anyhow for speed +wd_mgd = Equation('wd_mgd', facility, "3.0 + 0.0") +wd_mgd.add_op_tokens() +# Runit - unit area runoff +Runit = Equation('Runit', facility, "Qin / 592.717") +Runit.add_op_tokens() +# add local subwatersheds to test scalability +""" +for k in range(10): + subshed_name = 'sw' + str(k) + upstream_name = 'sw' + str(k-1) + Qout_eqn = str(25*random.random()) + " * Runit " + if k > 0: + Qout_eqn = Qout_eqn + " + " + upstream_name + "_Qout" + Qout_ss = Equation(subshed_name + "_Qout", facility, eqn) + Qout_ss.add_op_tokens() +# now add the output of the final tributary to the inflow to this one +Qtotal = Equation("Qtotal", facility, "Qin + " + Qout_ss.name) +Qtotal.tokenize() +""" +# add random ops to test scalability +# add a series of rando equations +""" +c=["flowby", "wd_mgd", "Qintake"] +for k in range(10000): + eqn = str(25*random.random()) + " * " + c[round((2*random.random()))] + newq = Equation('eq' + str(k), facility, eqn) + newq.add_op_tokens() +""" +# now connect the wd_mgd back to the river with a direct link. +# This is not how we'll do it for most simulations as there may be multiple inputs but will do for now +hydr = ModelObject('HYDR', river) +hydr.add_op_tokens() +O1 = ModelLinkage('O1', hydr, wd_mgd.state_path, 2) +O1.add_op_tokens() + +return