From 857da88d79c02f0fbf407ad335f21dc64575e5b7 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Wed, 29 Nov 2023 05:41:01 -0500 Subject: [PATCH 01/35] added state path initialization for hydr --- HSP2/main.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/HSP2/main.py b/HSP2/main.py index a62497b9..adfcb873 100644 --- a/HSP2/main.py +++ b/HSP2/main.py @@ -66,6 +66,14 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None # Add support for dynamic functins to operate on STATE # - Load any dynamic components if present, and store variables on objects state_load_dynamics_hsp2(state, io_manager, siminfo) + # Iterate through all segments and add crucial paths to state + # before loading dynamic components that may reference them + for _, operation, segment, delt in opseq.itertuples(): + for activity, function in activities[operation].items(): + if activity == 'HYDR': + state_context_hsp2(state, operation, segment, activity) + print("Init HYDR state context for domain", state['domain']) + hydr_init_ix(state['state_ix'], state['state_paths'], state['domain']) # - finally stash specactions in state, not domain (segment) dependent so do it once state['specactions'] = specactions # stash the specaction dict in state ####################################################################################### From 8fd5775fd17416fac8ac625469f3355dca08b9b5 Mon Sep 17 00:00:00 2001 From: "Robert W. Burgholzer" Date: Wed, 29 Nov 2023 18:28:04 +0000 Subject: [PATCH 02/35] specl uci example and fix accidentally nullified specactions Dict --- .gitignore | 1 + HSP2/main.py | 2 - tests/testcbp/HSP2results/PL3_5250_specl.uci | 234 +++++++++++++++++++ 3 files changed, 235 insertions(+), 2 deletions(-) create mode 100644 tests/testcbp/HSP2results/PL3_5250_specl.uci diff --git a/.gitignore b/.gitignore index 29e7e907..783892de 100644 --- a/.gitignore +++ b/.gitignore @@ -64,4 +64,5 @@ tests/GLWACSO/HSP2results/hspp007.uci tests/test_report_conversion.html tests/land_spec/hwmA51800.h5 tests/testcbp/HSP2results/PL3_5250_0001.h5 +tests/testcbp/HSP2results/PL3_5250_specl.h5 tests/testcbp/HSP2results/*.csv diff --git a/HSP2/main.py b/HSP2/main.py index adfcb873..abd954ae 100644 --- a/HSP2/main.py +++ b/HSP2/main.py @@ -50,7 +50,6 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None ftables = uci_obj.ftables specactions = uci_obj.specactions monthdata = uci_obj.monthdata - specactions = {} # placeholder till added to uci parser start, stop = siminfo['start'], siminfo['stop'] @@ -77,7 +76,6 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None # - finally stash specactions in state, not domain (segment) dependent so do it once state['specactions'] = specactions # stash the specaction dict in state ####################################################################################### - # main processing loop msg(1, f'Simulation Start: {start}, Stop: {stop}') tscat = {} diff --git a/tests/testcbp/HSP2results/PL3_5250_specl.uci b/tests/testcbp/HSP2results/PL3_5250_specl.uci new file mode 100644 index 00000000..83696691 --- /dev/null +++ b/tests/testcbp/HSP2results/PL3_5250_specl.uci @@ -0,0 +1,234 @@ +RUN + +GLOBAL + PL3_5250_0 riv | P5 | hsp2_2022 | Occoquan + START 2001/01/01 END 2001/12/31 + RUN INTERP OUTPUT LEVEL 1 1 + RESUME 0 RUN 1 UNIT SYSTEM 1 +END GLOBAL + +FILES + ***<----FILE NAME-------------------------------------------------> +WDM1 21 met_A51059.wdm +WDM2 22 prad_A51059.wdm +WDM3 23 ps_sep_div_ams_hsp2_2022_PL3_5250_0001.wdm +WDM4 24 PL3_5250_0001.wdm +MESSU 25 PL3_5250_0001.ech + 26 PL3_5250_0001.out + 31 PL3_5250_0001.tau +END FILES + +OPN SEQUENCE + INGRP INDELT 01:00 + RCHRES 1 + PLTGEN 1 + END INGRP +END OPN SEQUENCE + +RCHRES + ACTIVITY + # - # HYFG ADFG CNFG HTFG SDFG GQFG OXFG NUFG PKFG PHFG *** + 1 1 1 0 0 0 0 0 0 0 0 + END ACTIVITY + + PRINT-INFO + # - # HYFG ADFG CNFG HTFG SDFG GQFG OXFG NUFG PKFG PHFG PIVL***PY + 1 5 5 0 0 0 0 0 0 0 0 0 12 + END PRINT-INFO + + GEN-INFO + RCHRES<-------Name------->Nexit Unit Systems Printer *** + # - # User t-series Engl Metr LKFG *** + 1 PL3_5250_0001 3 1 1 1 26 0 1 + END GEN-INFO + + HYDR-PARM1 + RCHRES Flags for HYDR section *** + # - # VC A1 A2 A3 ODFVFG for each ODGTFG for each *** FUNCT for each + FG FG FG FG possible exit possible exit *** possible exit + 1 2 3 4 5 1 2 3 4 5 *** 1 2 3 4 5 + VC A1 A2 A3 V1 V2 V3 V4 V5 G1 G2 G3 G4 G5 *** F1 F2 F3 F4 F5 + 1 0 1 1 1 0 0 4 0 0 1 2 0 0 0 0 0 0 0 0 + END HYDR-PARM1 + + HYDR-PARM2 + RCHRES *** + # - # FTABNO LEN DELTH STCOR KS DB50 *** + 1 1. 10. 2. 0.5 + END HYDR-PARM2 + + HYDR-INIT + RCHRES Initial conditions for HYDR section *** + # - # VOL Initial value of COLIND *** Initial value of OUTDGT + (ac-ft) for each possible exit *** for each possible exit + VOL CEX1 CEX2 CEX3 CEX4 CEX5 *** DEX1 DEX2 DEX3 DEX4 DEX5 + 1 12175.000 + END HYDR-INIT + + ADCALC-DATA + RCHRES Data for section ADCALC *** + # - # CRRAT VOL *** + 1 1.5 12175. + END ADCALC-DATA + +END RCHRES + +FTABLES + FTABLE 1 + ROWS COLS *** + 20 4 + DEPTH AREA VOLUME DISCH *** + (FT) (ACRES) (AC-FT) (CFS) *** + 0 0 0 0 + 20 124 1007 0 + 30 240 2781 0 + 40 444 6106 0 + 50 804 12175 0 + 52 909 13886 39 + 54 1024 15819 78 + 56 1155 17999 117 + 57 1226 19227 136 + 58 1296 20456 137 + 60 1413 23180 138 + 62 1524 26140 140 + 63 1586 27745 1922 + 64 1647 29351 5179 + 65 1701 31247 9398 + 66 1755 33143 14393 + 67 1803 34984 20645 + 69 1879 38705 36532 + 70 1908 40585 44603 + 76 2100 54000 103071 + END FTABLE 1 +END FTABLES + +EXT SOURCES +<-Volume-> SsysSgap<--Mult-->Tran <-Target vols> <-Grp> <-Member->*** + # # tem strg<-factor->strg # # # #*** +*** METEOROLOGY +WDM1 1000 EVAP ENGLZERO 1.000 SAME RCHRES 1 EXTNL POTEV +WDM1 1001 DEWP ENGLZERO SAME RCHRES 1 EXTNL DEWTMP +WDM1 1002 WNDH ENGLZERO SAME RCHRES 1 EXTNL WIND +WDM1 1003 RADH ENGLZERO SAME RCHRES 1 EXTNL SOLRAD +WDM1 1004 ATMP ENGLZERO SAME RCHRES 1 EXTNL GATMP +WDM1 1005 CLDC ENGLZERO SAME RCHRES 1 EXTNL CLOUD + +*** PRECIPITATION AND ATMOSPHERIC DEPOSITION LOADS +WDM2 2000 HPRC ENGLZERO SAME RCHRES 1 EXTNL PREC +WDM2 2001 NO23 ENGLZERO DIV RCHRES 1 EXTNL NUADFX 1 1 +WDM2 2002 NH4A ENGLZERO DIV RCHRES 1 EXTNL NUADFX 2 1 +WDM2 2003 NO3D ENGLZERO DIV RCHRES 1 EXTNL NUADFX 1 1 +WDM2 2004 NH4D ENGLZERO DIV RCHRES 1 EXTNL NUADFX 2 1 +WDM2 2005 ORGN ENGLZERO DIV RCHRES 1 EXTNL PLADFX 1 1 +WDM2 2006 PO4A ENGLZERO DIV RCHRES 1 EXTNL NUADFX 3 1 +WDM2 2007 ORGP ENGLZERO DIV RCHRES 1 EXTNL PLADFX 2 1 + +*** POINT SOURCE +WDM3 3000 FLOW ENGLZERO DIV RCHRES 1 INFLOW IVOL +WDM3 3001 HEAT ENGLZERO DIV RCHRES 1 INFLOW IHEAT +WDM3 3002 NH3X ENGLZERO DIV RCHRES 1 INFLOW NUIF1 2 +WDM3 3003 NO3X ENGLZERO DIV RCHRES 1 INFLOW NUIF1 1 +WDM3 3004 ORNX ENGLZERO DIV RCHRES 1 INFLOW PKIF 3 +WDM3 3005 PO4X ENGLZERO DIV RCHRES 1 INFLOW NUIF1 4 +WDM3 3006 ORPX ENGLZERO DIV RCHRES 1 INFLOW PKIF 4 +WDM3 3021 BODX ENGLZERO DIV RCHRES 1 INFLOW OXIF 2 +WDM3 3022 TSSX ENGLZERO 0.0005 DIV RCHRES 1 INFLOW ISED 3 +WDM3 3023 DOXX ENGLZERO DIV RCHRES 1 INFLOW OXIF 1 +WDM3 3024 TOCX ENGLZERO DIV RCHRES 1 INFLOW PKIF 5 + +*** DIVERSIONS +WDM3 3007 DIVR ENGLZERO SAME RCHRES 1 EXTNL OUTDGT 1 +WDM3 3008 DIVA ENGLZERO SAME RCHRES 1 EXTNL OUTDGT 2 + +*** SEPTIC +WDM3 3010 SNO3 ENGLZERO 1.0000 DIV RCHRES 1 INFLOW NUIF1 1 + +*** AEOLIAN SEDIMENT +WDM3 3061 SFAS ENGLZERO 7.027e-06DIV RCHRES 1 INFLOW ISED 2 +WDM3 3062 SFAC ENGLZERO 7.027e-06DIV RCHRES 1 INFLOW ISED 3 + +*** UPSTREAM and EOS INPUT *** +WDM4 11 WATR ENGLZERO SAME RCHRES 1 INFLOW IVOL +WDM4 12 HEAT ENGLZERO SAME RCHRES 1 INFLOW IHEAT +WDM4 13 DOXY ENGLZERO SAME RCHRES 1 INFLOW OXIF 1 +WDM4 21 SAND ENGLZERO SAME RCHRES 1 INFLOW ISED 1 +WDM4 22 SILT ENGLZERO SAME RCHRES 1 INFLOW ISED 2 +WDM4 23 CLAY ENGLZERO SAME RCHRES 1 INFLOW ISED 3 +WDM4 31 NO3D ENGLZERO SAME RCHRES 1 INFLOW NUIF1 1 +WDM4 32 NH3D ENGLZERO SAME RCHRES 1 INFLOW NUIF1 2 +WDM4 33 NH3A ENGLZERO SAME RCHRES 1 INFLOW NUIF2 1 1 +WDM4 34 NH3I ENGLZERO SAME RCHRES 1 INFLOW NUIF2 2 1 +WDM4 35 NH3C ENGLZERO SAME RCHRES 1 INFLOW NUIF2 3 1 +WDM4 36 RORN ENGLZERO SAME RCHRES 1 INFLOW PKIF 3 +WDM4 41 PO4D ENGLZERO SAME RCHRES 1 INFLOW NUIF1 4 +WDM4 42 PO4A ENGLZERO SAME RCHRES 1 INFLOW NUIF2 1 2 +WDM4 43 PO4I ENGLZERO SAME RCHRES 1 INFLOW NUIF2 2 2 +WDM4 44 PO4C ENGLZERO SAME RCHRES 1 INFLOW NUIF2 3 2 +WDM4 45 RORP ENGLZERO SAME RCHRES 1 INFLOW PKIF 4 +WDM4 51 BODA ENGLZERO SAME RCHRES 1 INFLOW OXIF 2 +WDM4 52 TORC ENGLZERO SAME RCHRES 1 INFLOW PKIF 5 +WDM4 53 PHYT ENGLZERO SAME RCHRES 1 INFLOW PKIF 1 +END EXT SOURCES + +EXT TARGETS +<-Volume-> <-Grp> <-Member-><--Mult-->Tran <-Volume-> Tsys Tgap Amd *** + # # #<-factor->strg # # tem strg strg*** +RCHRES 1 OFLOW OVOL 3 SAME WDM4 111 WATR ENGL REPL +RCHRES 1 OFLOW OHEAT 3 SAME WDM4 112 HEAT ENGL REPL +RCHRES 1 OFLOW OXCF2 3 1 SAME WDM4 113 DOXY ENGL REPL +RCHRES 1 OFLOW OSED 3 1 SAME WDM4 121 SAND ENGL REPL +RCHRES 1 OFLOW OSED 3 2 SAME WDM4 122 SILT ENGL REPL +RCHRES 1 OFLOW OSED 3 3 SAME WDM4 123 CLAY ENGL REPL +RCHRES 1 OFLOW NUCF9 3 1 SAME WDM4 131 NO3D ENGL REPL +RCHRES 1 OFLOW NUCF9 3 2 SAME WDM4 132 NH3D ENGL REPL +RCHRES 1 OFLOW OSNH4 3 1 SAME WDM4 133 NH3A ENGL REPL +RCHRES 1 OFLOW OSNH4 3 2 SAME WDM4 134 NH3I ENGL REPL +RCHRES 1 OFLOW OSNH4 3 3 SAME WDM4 135 NH3C ENGL REPL +RCHRES 1 OFLOW PKCF2 3 3 SAME WDM4 136 RORN ENGL REPL +RCHRES 1 OFLOW NUCF9 3 4 SAME WDM4 141 PO4D ENGL REPL +RCHRES 1 OFLOW OSPO4 3 1 SAME WDM4 142 PO4A ENGL REPL +RCHRES 1 OFLOW OSPO4 3 2 SAME WDM4 143 PO4I ENGL REPL +RCHRES 1 OFLOW OSPO4 3 3 SAME WDM4 144 PO4C ENGL REPL +RCHRES 1 OFLOW PKCF2 3 4 SAME WDM4 145 RORP ENGL REPL +RCHRES 1 OFLOW OXCF2 3 2 SAME WDM4 151 BODA ENGL REPL +RCHRES 1 OFLOW PKCF2 3 5 SAME WDM4 152 TORC ENGL REPL +RCHRES 1 OFLOW PKCF2 3 1 SAME WDM4 153 PHYT ENGL REPL +END EXT TARGETS + +NETWORK +<-Volume-> <-Grp> <-Member-><--Mult-->Tran <-Target vols> <-Grp> <-Member-> *** + # # #<-factor->strg # # # # *** +RCHRES 1 HYDR TAU AVER PLTGEN 1 INPUT MEAN 1 +END NETWORK + +PLTGEN + PLOTINFO + # - # FILE NPT NMN LABL PYR PIVL *** + 1 31 1 12 24 + END PLOTINFO + + GEN-LABELS + # - #<----------------Title-----------------> *** + 1 PL3_5250_0001 daily_shear_stress_lbsft2 + END GEN-LABELS + + SCALING + #thru# YMIN YMAX IVLIN THRESH *** + 1 99 0. 100000. 20. + END SCALING + + CURV-DATA + <-Curve label--> Line Intg Col Tran *** + # - # type eqv code code *** + 1 daily_shear_stre 1 1 AVER + END CURV-DATA +END PLTGEN + +SPEC-ACTIONS +*** ACTIONS +***optyp range dc ds yr mo da hr mn d t vari s1 s2 s3 ac value tc ts num + <****><-><--><><-><--><-><-><-><-><><> <----><-><-><-><-><--------> <> <-><-> + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. +END SPEC-ACTIONS + +END RUN From 84d71b093dca5bc238448ae9973ef676c45d59ac Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Wed, 20 Dec 2023 11:09:28 -0500 Subject: [PATCH 03/35] basic object declaration for specal ACTIONS --- HSP2/om.py | 625 ++++++++++++++++++++++++++++++++++++++ HSP2/om_model_object.py | 340 +++++++++++++++++++++ HSP2/om_special_action.py | 54 ++++ 3 files changed, 1019 insertions(+) create mode 100644 HSP2/om.py create mode 100644 HSP2/om_model_object.py create mode 100644 HSP2/om_special_action.py diff --git a/HSP2/om.py b/HSP2/om.py new file mode 100644 index 00000000..3ead410a --- /dev/null +++ b/HSP2/om.py @@ -0,0 +1,625 @@ +# set up libraries to import for the load_sim_dicts function +# later, this will be drawing from the hdf5, but for now we +# are hard-wiring a set of components for testing. +# Note: these import calls must be done down here AFTER the helper functions +# defined aove that are called by the object classes +import random # this is only used for a demo so may be deprecated +import json +import requests +from requests.auth import HTTPBasicAuth +import csv +import pandas as pd +import numpy as np +import time +from numba.typed import Dict +from numpy import zeros +from numba import int8, float32, njit, types, typed # import the types +import random # this is only used for a demo so may be deprecated +from HSP2.state import * + + +def get_exec_order(model_exec_list, var_ix): + """ + Find the integer key of a variable name in state_ix + """ + model_exec_list = dict(enumerate(model_exec_list.flatten(), 1)) + for exec_order, ix in model_exec_list.items(): + if var_ix == ix: + # we need to add this to the state + return exec_order + return False + +def init_op_tokens(op_tokens, tops, eq_ix): + """ + Iinitialize the op_tokens Dict + This contains the runtime op code for every dynamic operation to be used + """ + for j in range(len(tops)): + if isinstance(tops[j], str): + # must add this to the state array as a constant + s_ix = append_state(state_ix, float(tops[j])) + tops[j] = s_ix + + op_tokens[eq_ix] = np.asarray(tops, dtype="i8") + +def is_float_digit(n: str) -> bool: + """ + Helper Function to determine if a variable is numeric + """ + try: + float(n) + return True + except ValueError: + return False + +from HSP2.om_model_object import * +from HSP2.om_sim_timer import * +from HSP2.om_equation import * +from HSP2.om_model_linkage import * +from HSP2.om_data_matrix import * +from HSP2.om_model_broadcast import * +from HSP2.om_simple_channel import * +from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries + +def init_om_dicts(): + """ + The base dictionaries used to store model object info + """ + op_tokens = Dict.empty(key_type=types.int64, value_type=types.i8[:]) + model_object_cache = {} # this does not need to be a special Dict as it is not used in numba + return op_tokens, model_object_cache + +# This is deprecated but kept to support legacy demo code +# Function is not splot between 2 functions: +# - init_state_dicts() (from state.py) +# - init_om_dicts() from om.py +def init_sim_dicts(): + """ + We should get really good at using docstrings... + Agree. they are dope. + """ + op_tokens = Dict.empty(key_type=types.int64, value_type=types.i8[:]) + state_paths = Dict.empty(key_type=types.unicode_type, value_type=types.int64) + state_ix = Dict.empty(key_type=types.int64, value_type=types.float64) + dict_ix = Dict.empty(key_type=types.int64, value_type=types.float64[:,:]) + ts_ix = Dict.empty(key_type=types.int64, value_type=types.float64[:]) + model_object_cache = {} # this does not need to be a special Dict as it is not used in numba + return op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache + + +def load_sim_dicts(siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache): + # by setting the state_parhs, opt_tokens, state_ix etc on the abstract class ModelObject + # all objects that we create share this as a global referenced variable. + # this may be a good thing or it may be bad? For now, we leverage this to reduce settings props + # but at some point we move all prop setting into a function and this maybe doesn't seem so desirable + # since there could be some unintended consequences if we actually *wanted* them to have separate copies + # tho since the idea is that they are global registries, maybe that is not a valid concern. + ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache = (op_tokens, state_paths, state_ix, dict_ix, model_object_cache) + # set up the timer as the first element + timer = SimTimer('timer', False, siminfo) + #timer.add_op_tokens() + river = ModelObject('RCHRES_R001') + # upon object creation river gets added to state with path "/STATE/RCHRES_R001" + river.add_input("Qin", f'{river.state_path}/HYDR/IVOL', 2) + # alternative, using TIMESERIES: + # river.add_input("Qin", "/TIMESERIES/TS011", 3) + # river.add_input("ps_mgd", "/TIMESERIES/TS3000", 3) + river.add_op_tokens() # formally adds this to the simulation + + # now add a simple table + data_table = np.asarray([ [ 0.0, 5.0, 10.0], [10.0, 15.0, 20.0], [20.0, 25.0, 30.0], [30.0, 35.0, 40.0] ], dtype= "float32") + dm = DataMatrix('dm', river, data_table) + dm.add_op_tokens() + # 2d lookup + dma = DataMatrixLookup('dma', river, dm.state_path, 2, 17.5, 1, 6.8, 1, 0.0) + dma.add_op_tokens() + # 1.5d lookup + #dma = DataMatrixLookup('dma', river, dm.state_path, 3, 17.5, 1, 1, 1, 0.0) + #dma.add_op_tokens() + + facility = ModelObject('facility', river) + + Qintake = Equation('Qintake', facility, "Qin * 1.0") + Qintake.add_op_tokens() + # a flowby + flowby = Equation('flowby', facility, "Qintake * 0.9") + flowby.add_op_tokens() + # add a withdrawal equation + # we use "3.0 + 0.0" because the equation parser fails on a single factor (number of variable) + # so we have to tweak that. However, we need to handle constants separately, and also if we see a + # single variable equation (such as Qup = Qhydr) we need to rewrite that to a input anyhow for speed + wd_mgd = Equation('wd_mgd', facility, "3.0 + 0.0") + wd_mgd.add_op_tokens() + # Runit - unit area runoff + Runit = Equation('Runit', facility, "Qin / 592.717") + Runit.add_op_tokens() + # add local subwatersheds to test scalability + """ + for k in range(10): + subshed_name = 'sw' + str(k) + upstream_name = 'sw' + str(k-1) + Qout_eqn = str(25*random.random()) + " * Runit " + if k > 0: + Qout_eqn = Qout_eqn + " + " + upstream_name + "_Qout" + Qout_ss = Equation(subshed_name + "_Qout", facility, eqn) + Qout_ss.add_op_tokens() + # now add the output of the final tributary to the inflow to this one + Qtotal = Equation("Qtotal", facility, "Qin + " + Qout_ss.name) + Qtotal.tokenize() + """ + # add random ops to test scalability + # add a series of rando equations + """ + c=["flowby", "wd_mgd", "Qintake"] + for k in range(10000): + eqn = str(25*random.random()) + " * " + c[round((2*random.random()))] + newq = Equation('eq' + str(k), facility, eqn) + newq.add_op_tokens() + """ + # now connect the wd_mgd back to the river with a direct link. + # This is not how we'll do it for most simulations as there may be multiple inputs but will do for now + hydr = ModelObject('HYDR', river) + hydr.add_op_tokens() + O1 = ModelLinkage('O1', hydr, wd_mgd.state_path, 2) + O1.add_op_tokens() + + return + + +def load_om_components(io_manager, siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache): + # set up OM base dcits + op_tokens, model_object_cache = init_om_dicts() + # set globals on ModelObject + ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache = (op_tokens, state_paths, state_ix, dict_ix, model_object_cache) + # Create the base that everything is added to. + # this object does nothing except host the rest. + # it has no name so that all paths can be relative to it. + model_root_object = ModelObject("") + # set up the timer as the first element + timer = SimTimer('timer', model_root_object, siminfo) + #timer.add_op_tokens() + #print("siminfo:", siminfo) + #river = ModelObject('RCHRES_R001') + # upon object creation river gets added to state with path "/STATE/RCHRES_R001" + #river.add_input("Qivol", f'{river.state_path}/HYDR/IVOL', 2, True) + # a json NHD from R parser + # Opening JSON file + # load the json data from a pre-generated json file on github + + # this allows this function to be called without an hdf5 + if io_manager != False: + # try this + local_path = os.getcwd() + print("Path:", local_path) + (fbase, fext) = os.path.splitext(hdf5_path) + # see if there is a code module with custom python + print("Looking for custom python code ", (fbase + ".py")) + print("calling dynamic_module_import(",fbase, local_path + "/" + fbase + ".py", ", 'hsp2_local_py')") + hsp2_local_py = dynamic_module_import(fbase, local_path + "/" + fbase + ".py", "hsp2_local_py") + # Load a function from code if it exists + if 'om_init_model' in dir(hsp2_local_py): + hsp2_local_py.om_init_model(io_manager, siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache) + if 'om_step_hydr' in dir(hsp2_local_py): + siminfo['om_step_hydr'] = True + if 'state_step_hydr' in dir(hsp2_local_py): + siminfo['state_step_hydr'] = True + + # see if there is custom json + fjson = fbase + ".json" + model_data = {} + if (os.path.isfile(fjson)): + print("Found local json file", fjson) + jfile = open(fjson) + model_data = json.load(jfile) + #print("Loaded json with keys:", model_data.keys()) + print("hdf5_path=", hdf5_path) + # Opening JSON file from remote url + # json_url = "https://raw.githubusercontent.com/HARPgroup/vahydro/master/R/modeling/nhd/nhd_simple_8566737.json" + #jraw = requests.get(json_url, verify=False) + #model_json = jraw.content.decode('utf-8') + # returns JSON object as Dict + # returns JSON object as Dict + #model_exec_list = np.asarray([]) + #container = False + # call it! + model_loader_recursive(model_data, model_root_object) + print("Loaded the following objects & paths") + print("Insuring all paths are valid, and connecting models as inputs") + model_path_loader(model_object_cache) + # len() will be 1 if we only have a simtimer, but > 1 if we have a river being added + print("Tokenizing models") + model_exec_list = [] + model_tokenizer_recursive(model_root_object, model_object_cache, model_exec_list) + #print("model_exec_list:", model_exec_list) + # This is used to stash the model_exec_list -- is this used? + op_tokens[0] = np.asarray(model_exec_list, dtype="i8") + ivol_state_path = '/STATE/RCHRES_R001' + "/IVOLin" + if (ivol_state_path in state_paths): + ivol_ix = state_paths[ivol_state_path] + #print("IVOLin found. state_paths = ", ivol_ix) + print("IVOLin op_tokens ", op_tokens[ivol_ix]) + print("IVOLin state_ix = ", state_ix[ivol_ix]) + else: + print("Could not find",ivol_state_path) + #print("Could not find",ivol_state_path,"in", state_paths) + return + # the resulting set of objects is returned. + state['model_object_cache'] = model_object_cache + state['op_tokens'] = op_tokens + state['state_step_om'] = 'disabled' + if len(op_tokens) > 1: + state['state_step_om'] = 'enabled' + return + +def state_load_dynamics_om(state, io_manager, siminfo): + # this function will check to see if any of the multiple paths to loading + # dynamic operational model objects has been supplied for the model. + # - function "om_init_model": This function can be defined in the [model h5 base].py file containing things to be done early in the model loading, like setting up model objects. This file will already have been loaded by the state module, and will be present in the module variable hsp2_local_py (we should rename to state_local_py?) + # - model objects defined in file named '[model h5 base].json -- this will populate an array of object definitions that will be loadable by "model_loader_recursive()" + # Grab globals from state for easy handling + op_tokens, model_object_cache = init_om_dicts() + state_paths, state_ix, dict_ix, ts_ix = state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] + # set globals on ModelObject + ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache = (op_tokens, state_paths, state_ix, dict_ix, model_object_cache) + # Create the base that everything is added to. + # this object does nothing except host the rest. + model_root_object = ModelObject("") + # set up the timer as the first element + timer = SimTimer('timer', model_root_object, siminfo) + # Opening JSON file + # load the json data from a pre-generated json file on github + + local_path = os.getcwd() + print("Path:", local_path) + # try this + hdf5_path = io_manager._input.file_path + (fbase, fext) = os.path.splitext(hdf5_path) + # see if there is a code module with custom python + print("Looking for custom om loader in python code ", (fbase + ".py")) + hsp2_local_py = state['hsp2_local_py'] + # Load a function from code if it exists + if 'om_init_model' in dir(hsp2_local_py): + hsp2_local_py.om_init_model(io_manager, siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache) + + # see if there is custom json + fjson = fbase + ".json" + print("Looking for custom om json ", fjson) + model_data = {} + if (os.path.isfile(fjson)): + print("Found local json file", fjson) + jfile = open(fjson) + model_data = json.load(jfile) + # now parse this json/dict into model objects + model_loader_recursive(model_data, model_root_object) + print("Loaded objects & paths: insures all paths are valid, connects models as inputs") + model_path_loader(model_object_cache) + # len() will be 1 if we only have a simtimer, but > 1 if we have a river being added + print("Tokenizing models") + model_exec_list = [] + model_tokenizer_recursive(model_root_object, model_object_cache, model_exec_list) + print("model_exec_list:", model_exec_list) + # This is used to stash the model_exec_list -- is this used? + op_tokens[0] = np.asarray(model_exec_list, dtype="i8") + # the resulting set of objects is returned. + state['model_object_cache'] = model_object_cache + state['op_tokens'] = op_tokens + state['state_step_om'] = 'disabled' + if len(op_tokens) > 0: + state['state_step_om'] = 'enabled' + return + +# model class reader +# get model class to guess object type in this lib +# the parent object must be known +def model_class_loader(model_name, model_props, container = False): + # todo: check first to see if the model_name is an attribute on the container + # Use: if hasattr(container, model_name): + # if so, we set the value on the container, if not, we create a new subcomp on the container + if model_props == None: + return False + if type(model_props) is str: + if is_float_digit(model_props): + model_object = ModelConstant(model_name, container, float(model_props) ) + return model_object + else: + return False + elif type(model_props) is dict: + object_class = model_props.get('object_class') + if object_class == None: + # return as this is likely an attribute that is used for the containing class as attribute + # and is handled by the container + # todo: we may want to handle this here? Or should this be a method on the class? + # Use: if hasattr(container, model_name): + return False + model_object = False + # Note: this routine uses the ".get()" method of the dict class type + # for attributes to pass in. + # ".get()" will return NoValue if it does not exist or the value. + if object_class == 'Equation': + eqn = model_props.get('equation') + if type(eqn) is str: + eqn_str = eqn + else: + if eqn == None: + # try for equation stored as normal propcode + eqn_str = model_props.get('value') + else: + eqn_str = eqn.get('value') + if eqn_str == None: + raise Exception("Equation object", model_name, "does not have a valid equation string. Halting. ") + return False + model_object = Equation(model_props.get('name'), container, eqn_str ) + #remove_used_keys(model_props, + elif object_class == 'SimpleChannel': + model_object = SimpleChannel(model_props.get('name'), container, model_props ) + elif object_class == 'Constant': + model_object = ModelConstant(model_props.get('name'), container, model_props.get('value') ) + elif ( object_class.lower() == 'datamatrix'): + # add a matrix with the data, then add a matrix accessor for each required variable + has_props = DataMatrix.check_properties(model_props) + if has_props == False: + print("Matrix object must have", DataMatrix.required_properties()) + return False + # create it + model_object = DataMatrix(model_props.get('name'), container, model_props) + elif object_class == 'ModelBroadcast': + # add a matrix with the data, then add a matrix accessor for each required variable + #print("Loading ModelBroadcast class ") + has_props = ModelBroadcast.check_properties(model_props) + if has_props == False: + print("ModelBroadcast object must have", ModelBroadcast.required_properties()) + return False + # create it + model_object = ModelBroadcast(model_props.get('name'), container, model_props) + elif object_class == 'MicroWatershedModel': + # add a matrix with the data, then add a matrix accessor for each required variable + has_props = MicroWatershedModel.check_properties(model_props) + if has_props == False: + print("MicroWatershedModel object must have", MicroWatershedModel.required_properties()) + return False + # create it + model_object = DataMatrix(model_props.get('name'), container, model_props) + + elif object_class == 'ModelLinkage': + right_path = '' + link_type = False + left_path = False + if 'right_path' in model_props.keys(): + right_path = model_props['right_path'] + if 'link_type' in model_props.keys(): + link_type = model_props['link_type'] + if 'left_path' in model_props.keys(): + left_path = model_props['left_path'] + model_object = ModelLinkage(model_props.get('name'), container, right_path, link_type, left_path) + else: + print("Loading", model_props.get('name'), "with object_class", object_class,"as ModelObject") + model_object = ModelObject(model_props.get('name'), container) + # one way to insure no class attributes get parsed as sub-comps is: + # model_object.remove_used_keys() + if len(model_object.model_props_parsed) == 0: + # attach these to the object for posterity + model_object.model_props_parsed = model_props + # better yet to just NOT send those attributes as typed object_class arrays, instead just name : value + return model_object + +def model_class_translate(model_props, object_class): + # make adjustments to non-standard items + # this might better be moved to methods on the class handlers + if object_class == 'hydroImpoundment': + # special handling of matrix/storage_stage_area column + # we need to test to see if the storage table has been renamed + # make table from matrix or storage_stage_area + # then make accessors from + storage_stage_area = model_props.get('storage_stage_area') + matrix = model_props.get('matrix') + if ( (storage_stage_area == None) and (matrix != None)): + model_props['storage_stage_area'] = matrix + del model_props['matrix'] + if object_class == 'broadCastObject': + model_props['object_class'] = 'ModelBroadcast' + model_props['broadcast_channel'] = model_props['broadcast_class'] + if object_class == 'USGSChannelGeomObject_sub': + model_props['object_class'] = 'SimpleChannel' + print("Handling USGSChannelGeomObject_sub as SimpleChannel") + if object_class == 'hydroImpoundment': + model_props['object_class'] = 'SimpleImpoundment' + print("Handling hydroImpoundment as SimpleImpoundment") + if object_class == 'hydroImpSmall': + model_props['object_class'] = 'SimpleImpoundment' + print("Handling hydroImpSmall as SimpleImpoundment") + +def model_loader_recursive(model_data, container): + k_list = model_data.keys() + object_names = dict.fromkeys(k_list , 1) + if type(object_names) is not dict: + return False + for object_name in object_names: + #print("Handling", object_name) + if object_name in {'name', 'object_class', 'id', 'value', 'default'}: + # we should ask the class what properties are part of the class and also skips these + # therefore, we can assume that anything else must be a child object that needs to + # be handled first -- but how to do this? + continue + model_props = model_data[object_name] + if type(model_props) is not dict: + # this is a constant, the loader is built to handle this, but this causes errors with + # properties on the class that are expected so we just skip and trust that all constants + # are formally declared as type Constant + continue + if type(model_props) is dict: + if not ('object_class' in model_props): + # this is either a class attribute or an un-handleable meta-data + # if the class atttribute exists, we should pass it to container to load + #print("Skipping un-typed", object_name) + continue + #print("Translating", object_name) + # this is a kludge, but can be important + object_class = model_props['object_class'] + model_class_translate(model_props, object_class) + # now we either have a constant (key and value), or a + # fully defined object. Either one should work OK. + #print("Trying to load", object_name) + model_object = model_class_loader(object_name, model_props, container) + if model_object == False: + print("Could not load", object_name) + continue # not handled, but for now we will continue, tho later we should bail? + # now for container type objects, go through its properties and handle + if type(model_props) is dict: + model_loader_recursive(model_props, model_object) + +def model_path_loader(model_object_cache): + k_list = model_object_cache.keys() + model_names = dict.fromkeys(k_list , 1) + for model_name in model_names: + #print("Loading paths for", model_name) + model_object = model_object_cache[model_name] + model_object.find_paths() + + +def model_tokenizer_recursive(model_object, model_object_cache, model_exec_list, model_touch_list = []): + """ + Given a root model_object, trace the inputs to load things in order + Store this order in model_exec_list + Note: All ordering is as-needed organic, except Broadcasts + - read from children is completed after all other inputs + - read from parent is completed before all other inputs + - could this be accomplished by more sophisticated handling of read + broadcasts? + - When loading a read broadcast, can we iterate through items + that are sending to that broadcast? + - Or is it better to let it as it is, + """ + if model_object.ix in model_exec_list: + return + if model_object.ix in model_touch_list: + #print("Already touched", model_object.name, model_object.ix, model_object.state_path) + return + # record as having been called, and will ultimately return, to prevent recursions + model_touch_list.append(model_object.ix) + k_list = model_object.inputs.keys() + input_names = dict.fromkeys(k_list , 1) + if type(input_names) is not dict: + return + # isolate broadcasts, and sort out -- what happens if an equation references a broadcast var? + # is this a limitation of treating all children as inputs? + # alternative, leave broadcasts organic, but load children first? + # children first, then local sub-comps is old method? old method: + # - read parent broadcasts + # - get inputs (essentially, linked vars) + # - send child broadcasts (will send current step parent reads, last step local proc data) + # - execute children + # - execute local sub-comps + for input_name in input_names: + #print("Checking input", input_name) + input_path = model_object.inputs[input_name] + if input_path in model_object_cache.keys(): + input_object = model_object_cache[input_path] + model_tokenizer_recursive(input_object, model_object_cache, model_exec_list, model_touch_list) + else: + if input_path in model_object.state_paths.keys(): + # this is a valid state reference without an object + # thus, it is likely part of internals that are manually added + # which should be fine. tho perhaps we should have an object for these too. + continue + print("Problem loading input", input_name, "input_path", input_path, "not in model_object_cache.keys()") + return + # now after tokenizing all inputs this should be OK to tokenize + model_object.add_op_tokens() + model_exec_list.append(model_object.ix) + + +def save_object_ts(io_manager, siminfo, op_tokens, ts_ix, ts): + # Decide on using from utilities.py: + # - save_timeseries(io_manager, ts, savedict, siminfo, saveall, operation, segment, activity, compress=True) + # Or, skip the save_timeseries wrapper and call write_ts() directly in io.py: + # write_ts(self, data_frame:pd.DataFrame, save_columns: List[str], category:Category, operation:Union[str,None]=None, segment:Union[str,None]=None, activity:Union[str,None]=None) + # see line 317 in utilities.py for use example of write_ts() + x = 0 # dummy + return + +@njit +def iterate_models(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps): + checksum = 0.0 + for step in range(steps): + pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) + step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) + return checksum + +@njit +def pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + for i in model_exec_list: + if op_tokens[i][0] == 1: + pass + elif op_tokens[i][0] == 2: + pass + elif op_tokens[i][0] == 3: + pass + elif op_tokens[i][0] == 4: + pass + elif op_tokens[i][0] == 5: + pass + elif op_tokens[i][0] == 11: + pre_step_register(op_tokens[i], state_ix, dict_ix) + return + +@njit +def step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + val = 0 + for i in model_exec_list: + step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) + return + +@njit +def post_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + return + +@njit +def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): + # op_tokens is passed in for ops like matrices that have lookups from other + # locations. All others rely only on ops + # todo: decide if all step_[class() functions should set value in state_ix instead of returning value? + val = 0 + if debug == 1: + print("DEBUG: Operator ID", ops[1], "is op type", ops[0]) + if ops[0] == 1: + state_ix[ops[1]] = step_equation(ops, state_ix) + elif ops[0] == 2: + # todo: this should be moved into a single function, + # with the conforming name step_matrix(op_tokens, ops, state_ix, dict_ix) + if (ops[1] == ops[2]): + if debug == 1: + print("DEBUG: Calling exec_tbl_values", ops) + # this insures a matrix with variables in it is up to date + # only need to do this if the matrix data and matrix config are on same object + # otherwise, the matrix data is an input and has already been evaluated + state_ix[ops[1]] = exec_tbl_values(ops, state_ix, dict_ix) + if (ops[3] > 0): + # this evaluates a single value from a matrix if the matrix is configured to do so. + if debug == 1: + print("DEBUG: Calling exec_tbl_eval", ops) + state_ix[ops[1]] = exec_tbl_eval(op_tokens, ops, state_ix, dict_ix) + elif ops[0] == 3: + step_model_link(ops, state_ix, ts_ix, step) + elif ops[0] == 4: + val = 0 + elif ops[0] == 5: + step_sim_timer(ops, state_ix, dict_ix, ts_ix, step) + elif ops[0] == 9: + val = 0 + elif ops[0] == 13: + step_simple_channel(ops, state_ix, dict_ix, step) + # Op 100 is Basic ACTION in Special Actions + elif ops[0] == 100: + state_ix[ops[1]] = step_saction(ops, state_ix, dict_ix) + return + + +@njit +def test_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + val = 0 + for i in model_exec_list: + print(i) + print(op_tokens[i][0]) + print(op_tokens[i]) + step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) + return diff --git a/HSP2/om_model_object.py b/HSP2/om_model_object.py new file mode 100644 index 00000000..caecc7d9 --- /dev/null +++ b/HSP2/om_model_object.py @@ -0,0 +1,340 @@ +""" +The class ModelObject is the base class upon which all other dynamic model objects are built on. +It handles all Dict management functions, but provides for no runtime execution of it's own. +All runtime exec is done by child classes. +""" +from HSP2.om import * +from HSP2.state import * +from pandas import Series, DataFrame, concat, HDFStore, set_option, to_numeric +from pandas import Timestamp, Timedelta, read_hdf, read_csv + +class ModelObject: + state_ix = {} # Shared Dict with the numerical state of each object + state_paths = {} # Shared Dict with the hdf5 path of each object + dict_ix = {} # Shared Dict with the hdf5 path of each object + ts_ix = {} # Shared Dict with the hdf5 path of each object + op_tokens = {} # Shared Dict with the tokenized representation of each object + model_object_cache = {} # Shared with actual objects, keyed by their path + model_exec_list = {} # Shared with actual objects, keyed by their path + + def __init__(self, name, container = False): + self.name = name + self.container = container # will be a link to another object + self.log_path = "" # Ex: "/RESULTS/RCHRES_001/SPECL" + self.attribute_path = "" # + self.model_props_parsed = {} # a place to stash parse record for debugging + if (hasattr(self,'state_path') == False): + # if the state_path has already been set, we accept it. + # this allows sub-classes to override the standard path guessing approach. + self.state_path = "" # Ex: "/STATE/RCHRES_001" # the pointer to this object state + self.inputs = {} # associative array with key=local_variable_name, value=hdf5_path Ex: [ 'Qin' : '/STATE/RCHRES_001/IVOL' ] + self.inputs_ix = {} # associative array with key=local_variable_name, value=state_ix integer key + self.ix = False + self.paths_found = False # this should be False at start + self.default_value = 0.0 + self.ops = [] + self.optype = 0 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - input/ModelLinkage, 4 - broadcastChannel, 5 - SimTimer, 6 - Conditional, 7 - ModelConstant (numeric), 8 - matrix accessor, 9 - MicroWatershedModel, 10 - MicroWatershedNetwork, 11 - ModelTimeseries, 12 - ModelRegister, 13 - SimpleChannel, 14 - SimpleImpoundment + # this is replaceable. to replace state_path/re-register the index : + # - remove the old PATH from state_paths: del state_paths[self.state_path] + # you should never create an object without knowing its container, but if you do + # you can TRY to do the following: + # - set this objects new path based on containment and call: + # [my_object].make_paths() + # - add this manually to state_paths: + # state_paths[[my_object].state_path] = [my_object].ix + # - call [my_object].register_path() + self.register_path() + + @staticmethod + def required_properties(): + # returns a list or minimum properties to create. + # see ModelConstant below for how to call this in a sub-class + # note: + # req_props = super(DataMatrix, DataMatrix).required_properties() + req_props = ['name'] + return req_props + + @classmethod + def check_properties(cls, model_props): + # this is for pre-screening properties for validity in model creation routines + # returns True or False and can be as simple as checking the list of required_properties + # or a more detailed examination of suitability of what those properties contain + req_props = cls.required_properties() + matching_props = set(model_props).intersection(set(req_props)) + if len(matching_props) < len(req_props): + return False + return True + + def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): + # this checks to see if the prop is in dict with value form, or just a value + # strict = True causes an exception if property is missing from model_props dict + prop_val = model_props.get(prop_name) + if type(prop_val) == list: + prop_val = prop_val.get('value') + elif type(prop_val) == dict: + prop_val = prop_val.get('value') + if strict and (prop_val == None): + raise Exception("Cannot find property " + prop_name + " in properties passed to "+ self.name + " and strict = True. Object creation halted. Path to object with error is " + self.state_path) + return prop_val + + def parse_model_props(self, model_props, strict = False ): + # sub-classes will allow an create argument "model_props" and handle them here. + # see also: handle_prop(), which will be called y parse_model_props + # for all attributes supported by the class + self.model_props_parsed = model_props + return True + + def set_state(self, set_value): + var_ix = set_state(self.state_ix, self.state_paths, self.state_path, set_value) + return var_ix + + def load_state_dicts(self, op_tokens, state_paths, state_ix, dict_ix): + self.op_tokens = op_tokens + self.state_paths = state_paths + self.state_ix = state_ix + self.dict_ix = dict_ix + + def save_object_hdf(self, hdfname, overwrite = False ): + # save the object in the full hdf5 path + # if overwrite = True replace this and all children, otherwise, just save this. + # note: "with" statement helps prevent unclosed resources, see: https://www.geeksforgeeks.org/with-statement-in-python/ + with HDFStore(hdfname, mode = 'a') as store: + dummy_var = True + + def make_paths(self, base_path = False): + if base_path == False: # we are NOT forcing paths + if not (self.container == False): + self.state_path = self.container.state_path + "/" + self.name + self.attribute_path = self.container.attribute_path + "/" + self.name + elif self.name == "": + self.state_path = "/STATE" + self.attribute_path = "/OBJECTS" + else: + self.state_path = "/STATE/" + self.name + self.attribute_path = "/OBJECTS/" + self.name + else: + # base_path is a Dict with state_path and attribute_path set + self.state_path = base_path['STATE'] + self.name + self.attribute_path = base_path['OBJECTS'] + self.name + return self.state_path + + def get_state(self, var_name = False): + if var_name == False: + return self.state_ix[self.ix] + else: + var_path = self.find_var_path(var_name) + var_ix = get_state_ix(self.state_ix, self.state_paths, var_path) + if (var_ix == False): + return False + return self.state_ix[var_ix] + + def get_exec_order(self, var_name = False): + if var_name == False: + var_ix = self.ix + else: + var_path = self.find_var_path(var_name) + var_ix = get_state_ix(self.state_ix, self.state_paths, var_path) + exec_order = get_exec_order(self.model_exec_list,var_ix) + return exec_order + + def get_object(self, var_name = False): + if var_name == False: + return self.model_object_cache[self.state_path] + else: + var_path = self.find_var_path(var_name) + return self.model_object_cache[var_path] + + + def find_var_path(self, var_name, local_only = False): + # check local inputs for name + if var_name in self.inputs.keys(): + #print("Found", var_name, "on ", self.name, "path=", self.inputs[var_name]) + return self.inputs[var_name] + if local_only: + return False # we are limiting the scope, so just return + # check parent for name + if not (self.container == False): + #print(self.name,"looking to parent", self.container.name, "for", var_name) + return self.container.find_var_path(var_name) + # check for root state vars STATE + var_name + if ("/STATE/" + var_name) in self.state_paths.keys(): + #return self.state_paths[("/STATE/" + var_name)] + return ("/STATE/" + var_name) + # check for root state vars + if var_name in self.state_paths.keys(): + #return self.state_paths[var_name] + return var_name + #print(self.name, "could not find", var_name) + return False + + def constant_or_path(self, keyname, keyval, trust = False): + #print("Called constant_or_path with", keyname, " = ", keyval) + if is_float_digit(keyval): + # we are given a constant value, not a variable reference + #print("Creating constant ", keyname, " = ", keyval) + k = ModelConstant(keyname, self, float(keyval)) + kix = k.ix + else: + #print("Adding input ", keyname, " = ", keyval) + kix = self.add_input(keyname, keyval, 2, trust) + return kix + + def register_path(self): + # initialize the path variable if not already set + #print(self.name,"called register_path()") + if self.state_path == '': + self.make_paths() + #print("Setting ", self.name, "state to", self.default_value) + self.ix = set_state(self.state_ix, self.state_paths, self.state_path, self.default_value) + # store object in model_object_cache + if not (self.state_path in self.model_object_cache.keys()): + self.model_object_cache[self.state_path] = self + # this should check to see if this object has a parent, and if so, register the name on the parent + # default is as a child object. + if not (self.container == False): + #print("Adding", self.name,"as input to", self.container.name) + # since this is a request to actually create a new path, we instruct trust = True as last argument + return self.container.add_input(self.name, self.state_path, 1, True) + return self.ix + + def add_input(self, var_name, var_path, input_type = 1, trust = False): + # this will add to the inputs, but also insure that this + # requested path gets added to the state/exec stack via an input object if it does + # not already exist. + # - var_name = the local name for this linked entity/attribute + # - var_path = the full path of the entity/attribute we are linking to + # - input types: 1: parent-child link, 2: state property link, 3: timeseries object property link + # - trust = False means fail if the path does not already exist, True means assume it will be OK which is bad policy, except for the case where the path points to an existing location + # do we have a path here already or can we find on the parent? + # how do we check if this is a path already, in which case we trust it? + # todo: we should be able to alias a var_name to a var_path, for example + # calling add_input('movar', 'month', 1, True) + # this *should* search for month and find the STATE/month variable + # BUT this only works if both var_name and var_path are month + # so add_input('month', 'month', 1, True) works. + found_path = self.find_var_path(var_path) + #print("Searched", var_name, "with path", var_path,"found", found_path) + var_ix = get_state_ix(self.state_ix, self.state_paths, found_path) + if var_ix == False: + if (trust == False): + raise Exception("Cannot find variable path: " + var_path + " when adding input to object " + self.name + " as input named " + var_name + " ... process terminated. Path to object with error is " + self.state_path) + var_ix = self.insure_path(var_path) + else: + # if we are to trust the path, this might be a child property just added, + # and therefore, we don't look further than this + # otherwise, we use found_path, whichever it is, as + # we know that this path is better, as we may have been given a simple variable name + # and so found_path will look more like /STATE/RCHRES_001/... + if trust == False: + var_path = found_path + self.inputs[var_name] = var_path + self.inputs_ix[var_name] = var_ix + return self.inputs_ix[var_name] + + def add_object_input(self, var_name, var_object, link_type = 1): + # See above for details. + # this adds an object as a link to another object + self.inputs[var_name] = var_object.state_path + self.inputs_ix[var_name] = var_object.ix + return self.inputs_ix[var_name] + + def create_parent_var(self, parent_var_name, source_object): + # see decision points: https://github.com/HARPgroup/HSPsquared/issues/78 + # This is used when an object sets an additional property on its parent + # Like in simple_channel sets [channel prop name]_Qout on its parent + # Generally, this should have 2 components. + # 1 - a state variable on the child (this could be an implicit sub-comp, or a constant sub-comp, the child handles the setup of this) see constant_or_path() + # 2 - an input link + self.container.add_object_input(parent_var_name, source_object, 1) + + def insure_path(self, var_path): + # if this path can be found in the hdf5 make sure that it is registered in state + # and that it has needed object class to render it at runtime (some are automatic) + # RIGHT NOW THIS DOES NOTHING TO CHECK IF THE VAR EXISTS THIS MUST BE FIXED + var_ix = set_state(self.state_ix, self.state_paths, var_path, 0.0) + return var_ix + + def get_dict_state(self, ix = -1): + if ix >= 0: + return self.dict_ix[ix] + return self.dict_ix[self.ix] + + def find_paths(self): + # Note: every single piece of data used by objects, even constants, are resolved to a PATH in the hdf5 + # find_paths() is called to insure that all of these can be found, and then, are added to inputs/inputs_ix + # - We wait to find the index values for those variables after all things have been loaded + # - base ModelObject does not have any "implicit" inputs, since all of its inputs are + # explicitly added children objects, thus we default to True + self.paths_found = True + # - But children such as Equation and DataMatrix, etc + # so they mark paths_found = False and then + # should go through their own locally defined data + # and call add_input() for any data variables encountered + # - add_input() will handle searching for the paths and ix values + # and should also handle deciding if this is a constant, like a numeric value + # or a variable data and should handle them accordingly + return True + + def tokenize(self): + # renders tokens for high speed execution + if (self.paths_found == False): + raise Exception("path_found False for object" + self.name + "(" + self.state_path + "). " + "Tokens cannot be generated until method '.find_paths()' is run for all model objects ... process terminated. (see function `model_path_loader(model_object_cache)`)") + self.ops = [self.optype, self.ix] + + def add_op_tokens(self): + # this puts the tokens into the global simulation queue + # can be customized by subclasses to add multiple lines if needed. + if self.ops == []: + self.tokenize() + #print(self.name, "tokens", self.ops) + self.op_tokens[self.ix] = np.asarray(self.ops, dtype="i8") + + def step(self, step): + # this tests the model for a single timestep. + # this is not the method that is used for high-speed runs, but can theoretically be used for + # easier to understand demonstrations + step_one(self.op_tokens, self.op_tokens[self.ix], self.state_ix, self.dict_ix, self.ts_ix, step) + #step_model({self.op_tokens[self.ix]}, self.state_ix, self.dict_ix, self.ts_ix, step) + + def dddstep_model(op_tokens, state_ix, dict_ix, ts_ix, step): + for i in op_tokens.keys(): + if op_tokens[i][0] == 1: + state_ix[i] = step_equation(op_tokens[i], state_ix) + elif op_tokens[i][0] == 2: + state_ix[i] = exec_tbl_eval(op_tokens[i], state_ix, dict_ix) + elif op_tokens[i][0] == 3: + step_model_link(op_tokens[i], state_ix, ts_ix, step) + elif op_tokens[i][0] == 4: + return False + elif op_tokens[i][0] == 5: + step_sim_timer(op_tokens[i], state_ix, dict_ix, ts_ix, step) + return + +""" +The class ModelConstant is for storing constants. It must be loaded here because ModelObject calls it. +Is this useful or just clutter? Useful I think since there are numerical constants... +""" +class ModelConstant(ModelObject): + def __init__(self, name, container = False, value = 0.0, state_path = False): + if (state_path != False): + # this allows us to mandate the location. useful for placeholders, broadcasts, etc. + self.state_path = state_path + super(ModelConstant, self).__init__(name, container) + self.default_value = float(value) + self.optype = 7 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - input, 4 - broadcastChannel, 5 - SimTimer, 6 - Conditional, 7 - ModelConstant (numeric) + #print("ModelConstant named",self.name, "with path", self.state_path,"and ix", self.ix, "value", value) + var_ix = self.set_state(float(value)) + self.paths_found = True + # self.state_ix[self.ix] = self.default_value + + def required_properties(): + req_props = super(ModelConstant, ModelConstant).required_properties() + req_props.extend(['value']) + return req_props + +# njit functions for runtime + +@njit +def exec_model_object( op, state_ix, dict_ix): + ix = op[1] + return 0.0 \ No newline at end of file diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py new file mode 100644 index 00000000..6d78ed59 --- /dev/null +++ b/HSP2/om_special_action.py @@ -0,0 +1,54 @@ +""" +The class SpecialAction is used to support original HSPF ACTIONS. +""" +from HSP2.state import * +from HSP2.om import * +from HSP2.om_model_object import ModelObject +from numba import njit +class SpecialAction(ModelObject): + def __init__(self, name, container = False, model_props = []): + super(ModelObject, self).__init__(name, container) + self.src_op = self.handle_prop(model_props, 'input') + self.dest_op = self.handle_prop(model_props, 'target') + self.cop = self.handle_prop(model_props, 'op') + + self.optype = 100 # Special Actions start indexing at 100 + + def tokenize(self): + # call parent method to set basic ops common to all + super().tokenize() + # cop_code 0: =/eq, 1: /gt, 3: <=/le, 4: >=/ge, 5: <>/ne + cop_codes = [ + '+=': 0, + '-=': 1, + '*=': 2, + '/=': 3, + ] + self.ops = self.ops + [self.left_ix, cop_codes[self.cop], self.right_ix] + + def add_op_tokens(self): + # this puts the tokens into the global simulation queue + # can be customized by subclasses to add multiple lines if needed. + super().add_op_tokens() + +# njit functions for runtime + +@njit +def exec_saction(op, state_ix, dict_ix): + ix = op[1] # ID of this op + dix = op[2] # ID of place to store data + # these indices must be adjusted to reflect the number of common op tokens + # SpecialAction has: + # - type of condition (+=, -=, ...) + # - operand 1 (left side) + # - operand 2 (right side) + op = op[3] + ix1 = op[4] + ix2 = op[5] + if op == 0: + result = state_ix[ix1] + state_ix[ix2] + if op == 1: + result = state_ix[ix1] - state_ix[ix2] + + return result + From 6c4f4e3f9353d8cb0610c4cf7453b9013e69d24f Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Thu, 21 Dec 2023 12:47:23 -0500 Subject: [PATCH 04/35] add asic om comps --- HSP2/main.py | 1 + HSP2/om.py | 21 ++---- HSP2/om_model_linkage.py | 134 +++++++++++++++++++++++++++++++++++++++ HSP2/om_model_object.py | 11 +--- HSP2/state.py | 4 ++ 5 files changed, 146 insertions(+), 25 deletions(-) create mode 100644 HSP2/om_model_linkage.py diff --git a/HSP2/main.py b/HSP2/main.py index abd954ae..1fbfa164 100644 --- a/HSP2/main.py +++ b/HSP2/main.py @@ -75,6 +75,7 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None hydr_init_ix(state['state_ix'], state['state_paths'], state['domain']) # - finally stash specactions in state, not domain (segment) dependent so do it once state['specactions'] = specactions # stash the specaction dict in state + state_load_dynamics_om(state, io_manager, siminfo) ####################################################################################### # main processing loop msg(1, f'Simulation Start: {start}, Stop: {stop}') diff --git a/HSP2/om.py b/HSP2/om.py index 3ead410a..74affcc5 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -53,12 +53,12 @@ def is_float_digit(n: str) -> bool: return False from HSP2.om_model_object import * -from HSP2.om_sim_timer import * -from HSP2.om_equation import * +#from HSP2.om_sim_timer import * +#from HSP2.om_equation import * from HSP2.om_model_linkage import * -from HSP2.om_data_matrix import * -from HSP2.om_model_broadcast import * -from HSP2.om_simple_channel import * +#from HSP2.om_data_matrix import * +#from HSP2.om_model_broadcast import * +#from HSP2.om_simple_channel import * from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries def init_om_dicts(): @@ -381,16 +381,7 @@ def model_class_loader(model_name, model_props, container = False): model_object = DataMatrix(model_props.get('name'), container, model_props) elif object_class == 'ModelLinkage': - right_path = '' - link_type = False - left_path = False - if 'right_path' in model_props.keys(): - right_path = model_props['right_path'] - if 'link_type' in model_props.keys(): - link_type = model_props['link_type'] - if 'left_path' in model_props.keys(): - left_path = model_props['left_path'] - model_object = ModelLinkage(model_props.get('name'), container, right_path, link_type, left_path) + model_object = ModelLinkage(model_props.get('name'), container, model_props) else: print("Loading", model_props.get('name'), "with object_class", object_class,"as ModelObject") model_object = ModelObject(model_props.get('name'), container) diff --git a/HSP2/om_model_linkage.py b/HSP2/om_model_linkage.py new file mode 100644 index 00000000..ed798d8c --- /dev/null +++ b/HSP2/om_model_linkage.py @@ -0,0 +1,134 @@ +""" +The class ModelLinkage is used to translate copy data from one state location to another. +It is also used to make an implicit parent child link to insure that an object is loaded +during a model simulation. +""" +from HSP2.state import * +from HSP2.om import * +from HSP2.om_model_object import ModelObject +from numba import njit +class ModelLinkage(ModelObject): + def __init__(self, name, container = False, model_props = []): + super(ModelLinkage, self).__init__(name, container) + # ModelLinkage copies a values from right to left + # right_path: is the data source for the link + # left_path: is the destination of the link + # left_path: is implicit in types 1-3, i.e., the ModelLinkage object path itself is the left_path + # - left_path parameter is only needed for pushes (type 4 and 5) + # - the push is functionally equivalent to a pull whose path resolves to the specified left_path + # - but the push allows the potential for multiple objects to set a single state + # This can be dangerous or difficult to debug, but essential to replicate old HSPF behaviour + # especially in the case of If/Then type structures. + # it is also useful for the broadcast objects, see om_model_broadcast for those + # link_type: 1 - local parent-child, 2 - local property link (state data), 3 - remote linkage (ts data only), 4 - push to accumulator (like a hub), 5 - overwrite remote value + self.optype = 3 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - ModelLinkage, 4 - + if container == False: + # this is required + print("Error: a link must have a container object to serve as the destination") + return False + right_path = self.handle_prop(model_props, 'right_path') + link_type = self.handle_prop(model_props, 'link_type', False, 0) + left_path = self.handle_prop(model_props, 'left_path') + + if self.left_path == False: + # self.state_path gets set when creating at the parent level + self.left_path = self.state_path + # this breaks for some reason, doesn't like the input name being different than the variable path ending? + self.add_input(self.right_path, self.right_path) + + def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): + # this checks to see if the prop is in dict with value form, or just a value + # strict = True causes an exception if property is missing from model_props dict + prop_val = super().handle_prop(model_props, prop_name, strict, default_value) + if ( (prop_name == 'right_value') and (prop_val == None) or (prop_val == '')): + raise Exception("right_path cannot be empty. Object creation halted. Path to object with error is " + self.state_path) + return prop_val + + @staticmethod + def required_properties(): + # returns a list or minimum properties to create. + # see ModelConstant below for how to call this in a sub-class + # note: + # req_props = super(DataMatrix, DataMatrix).required_properties() + req_props = ['name', 'right_path'] + return req_props + + def find_paths(self): + # this should be needed if this is a PUSH link_type = 4 or 5 + super().find_paths() + self.paths_found = False # override parent setting until we verify everything + # do we need to do this, or just trust it exists? + #self.insure_path(self, self.right_path) + # the left path, if this is type 4 or 5, is a push, so we must require it + if ( (self.link_type == 4) or (self.link_type == 5) ): + self.insure_path(self.left_path) + self.paths_found = True + return + + def tokenize(self): + super().tokenize() + # - if this is a data property link then we add op codes to do a copy of data from one state address to another + # - if this is simply a parent-child connection, we do not render op-codes, but we do use this for assigning + # - execution hierarchy + if self.link_type in (2, 3): + src_ix = get_state_ix(self.state_ix, self.state_paths, self.right_path) + if not (src_ix == False): + self.ops = self.ops + [src_ix, self.link_type] + else: + print("Error: link ", self.name, "does not have a valid source path") + #print("tokenize() result", self.ops) + if (self.link_type == 4) or (self.link_type == 5): + # we push to the remote path in this one + left_ix = get_state_ix(self.state_ix, self.state_paths, self.left_path) + right_ix = get_state_ix(self.state_ix, self.state_paths, self.right_path) + if (left_ix != False) and (right_ix != False): + self.ops = self.ops + [left_ix, self.link_type, right_ix] + else: + print("Error: link ", self.name, "does not have valid paths", "(left = ", self.left_path, left_ix, "right = ", self.right_path, right_ix, ")") + #print("tokenize() result", self.ops) + +# Function for use during model simulations of tokenized objects +@njit +def step_model_link(op_token, state_ix, ts_ix, step): + if step == 2: + print("step_model_link() called at step 2 with op_token=", op_token) + if op_token[3] == 1: + return True + elif op_token[3] == 2: + state_ix[op_token[1]] = state_ix[op_token[2]] + elif op_token[3] == 3: + # read from ts variable TBD + # state_ix[op_token[1]] = ts_ix[op_token[2]][step] + return True + elif op_token[3] == 4: + # add value in local state to the remote broadcast hub+register state + state_ix[op_token[2]] = state_ix[op_token[2]] + state_ix[op_token[4]] + return True + elif op_token[3] == 5: + # overwrite remote variable state with value in another paths state + if step == 2: + print("Setting state_ix[", op_token[2], "] =", state_ix[op_token[4]]) + state_ix[op_token[2]] = state_ix[op_token[4]] + return True + + +def test_model_link(op_token, state_ix, ts_ix, step): + if op_token[3] == 1: + return True + elif op_token[3] == 2: + state_ix[op_token[1]] = state_ix[op_token[2]] + elif op_token[3] == 3: + # read from ts variable TBD + # state_ix[op_token[1]] = ts_ix[op_token[2]][step] + return True + elif op_token[3] == 4: + print("Remote Broadcast accumulator type link.") + print("Setting op ID", str(op_token[2]), "to value from ID", str(op_token[4]), "with value of ") + # add value in local state to the remote broadcast hub+register state + state_ix[op_token[2]] = state_ix[op_token[2]] + state_ix[op_token[4]] + print(str(state_ix[op_token[2]]) + " = ", str(state_ix[op_token[2]]) + "+" + str(state_ix[op_token[4]])) + return True + elif op_token[3] == 5: + # push value in local state to the remote broadcast hub+register state + state_ix[op_token[2]] = state_ix[op_token[4]] + return True \ No newline at end of file diff --git a/HSP2/om_model_object.py b/HSP2/om_model_object.py index caecc7d9..022bafc6 100644 --- a/HSP2/om_model_object.py +++ b/HSP2/om_model_object.py @@ -17,7 +17,7 @@ class ModelObject: model_object_cache = {} # Shared with actual objects, keyed by their path model_exec_list = {} # Shared with actual objects, keyed by their path - def __init__(self, name, container = False): + def __init__(self, name, container = False, model_props = []): self.name = name self.container = container # will be a link to another object self.log_path = "" # Ex: "/RESULTS/RCHRES_001/SPECL" @@ -148,13 +148,11 @@ def get_object(self, var_name = False): def find_var_path(self, var_name, local_only = False): # check local inputs for name if var_name in self.inputs.keys(): - #print("Found", var_name, "on ", self.name, "path=", self.inputs[var_name]) return self.inputs[var_name] if local_only: return False # we are limiting the scope, so just return # check parent for name if not (self.container == False): - #print(self.name,"looking to parent", self.container.name, "for", var_name) return self.container.find_var_path(var_name) # check for root state vars STATE + var_name if ("/STATE/" + var_name) in self.state_paths.keys(): @@ -164,27 +162,21 @@ def find_var_path(self, var_name, local_only = False): if var_name in self.state_paths.keys(): #return self.state_paths[var_name] return var_name - #print(self.name, "could not find", var_name) return False def constant_or_path(self, keyname, keyval, trust = False): - #print("Called constant_or_path with", keyname, " = ", keyval) if is_float_digit(keyval): # we are given a constant value, not a variable reference - #print("Creating constant ", keyname, " = ", keyval) k = ModelConstant(keyname, self, float(keyval)) kix = k.ix else: - #print("Adding input ", keyname, " = ", keyval) kix = self.add_input(keyname, keyval, 2, trust) return kix def register_path(self): # initialize the path variable if not already set - #print(self.name,"called register_path()") if self.state_path == '': self.make_paths() - #print("Setting ", self.name, "state to", self.default_value) self.ix = set_state(self.state_ix, self.state_paths, self.state_path, self.default_value) # store object in model_object_cache if not (self.state_path in self.model_object_cache.keys()): @@ -192,7 +184,6 @@ def register_path(self): # this should check to see if this object has a parent, and if so, register the name on the parent # default is as a child object. if not (self.container == False): - #print("Adding", self.name,"as input to", self.container.name) # since this is a request to actually create a new path, we instruct trust = True as last argument return self.container.add_input(self.name, self.state_path, 1, True) return self.ix diff --git a/HSP2/state.py b/HSP2/state.py index 67ac24a0..35a94778 100644 --- a/HSP2/state.py +++ b/HSP2/state.py @@ -130,6 +130,10 @@ def state_siminfo_hsp2(uci_obj, siminfo): siminfo['tindex'] = date_range(siminfo['start'], siminfo['stop'], freq=Minute(delt))[1:] siminfo['steps'] = len(siminfo['tindex']) +def state_load_hdf5_components(io_manager, siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache): + # Implement population of model_object_cache etc from components in a hdf5 such as Special ACTIONS + return + def state_load_dynamics_hsp2(state, io_manager, siminfo): # Load any dynamic components if present, and store variables on objects hsp2_local_py = load_dynamics(io_manager, siminfo) From c76510218f568ba8980589870ae8549ce7178f12 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Fri, 22 Dec 2023 10:13:52 -0500 Subject: [PATCH 05/35] added documentation and modularized the object loading and tokenizing code --- HSP2/SPECL.py | 12 ++++++ HSP2/main.py | 5 ++- HSP2/om.py | 83 +++++++++++++++++++++++--------------- HSP2/om_model_object.py | 2 +- HSP2/om_sim_timer.py | 84 +++++++++++++++++++++++++++++++++++++++ HSP2/om_special_action.py | 7 ++++ HSP2/state.py | 3 +- 7 files changed, 162 insertions(+), 34 deletions(-) create mode 100644 HSP2/om_sim_timer.py diff --git a/HSP2/SPECL.py b/HSP2/SPECL.py index c7cf5e23..9b798d94 100644 --- a/HSP2/SPECL.py +++ b/HSP2/SPECL.py @@ -10,6 +10,18 @@ ''' from numba import njit +import h5py + +def specl_load_actions(state, io_manager, siminfo) + for speca in state['specactions']['ACTIONS'].items(): + + +def state_load_dynamics_specl(state, io_manager, siminfo): + specl_load_actions(state, io_manager, siminfo) + # others defined below, like: + # specl_load_uvnames(state, io_manager, siminfo) + # ... + return @njit def specl(ui, ts, step, state_info, state_paths, state_ix, specactions): diff --git a/HSP2/main.py b/HSP2/main.py index 1fbfa164..73e75d84 100644 --- a/HSP2/main.py +++ b/HSP2/main.py @@ -12,6 +12,7 @@ from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries from HSP2.configuration import activities, noop, expand_masslinks from HSP2.state import * +from HSP2.om import * from HSP2IO.io import IOManager, SupportsReadTS, Category @@ -50,7 +51,9 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None ftables = uci_obj.ftables specactions = uci_obj.specactions monthdata = uci_obj.monthdata - + print("Special Actions", specactions) + keysList = list(specactions['ACTIONS'].keys()) + print(keysList) start, stop = siminfo['start'], siminfo['stop'] copy_instances = {} diff --git a/HSP2/om.py b/HSP2/om.py index 74affcc5..ae03a344 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -53,7 +53,7 @@ def is_float_digit(n: str) -> bool: return False from HSP2.om_model_object import * -#from HSP2.om_sim_timer import * +from HSP2.om_sim_timer import * #from HSP2.om_equation import * from HSP2.om_model_linkage import * #from HSP2.om_data_matrix import * @@ -251,52 +251,73 @@ def load_om_components(io_manager, siminfo, op_tokens, state_paths, state_ix, di state['state_step_om'] = 'enabled' return +def state_load_json(state, io_manager, siminfo): + # - model objects defined in file named '[model h5 base].json -- this will populate an array of object definitions that will + # be loadable by "model_loader_recursive()" + model_data = {} + # JSON file would be in same path as hdf5 + hdf5_path = io_manager._input.file_path + print("Looking for custom om json ", fjson) + (fbase, fext) = os.path.splitext(hdf5_path) + # see if there is custom json + fjson = fbase + ".json" + if (os.path.isfile(fjson)): + print("Found local json file", fjson) + jfile = open(fjson) + model_data = json.load(jfile) + state['model_data'] = model_data + return + +def state_load_om_python(state, io_manager, siminfo): + # Look for a [hdf5 file base].py file with specific named functions + # - function "om_init_model": This function can be defined in the [model h5 base].py file containing things to be done + # early in the model loading, like setting up model objects. This file will already have been loaded by the state module, + # and will be present in the module variable hsp2_local_py (we should rename to state_local_py?) + # - this file may also contain other dynamically redefined functions such as state_step_hydr() + # which can contain code that is executed every timestep inside the _hydr_() function + # and can literally supply hooks for any desired user customizable code + hdf5_path = io_manager._input.file_path + (fbase, fext) = os.path.splitext(hdf5_path) + # see if there is a code module with custom python + print("Looking for custom om loader in python code ", (fbase + ".py")) + hsp2_local_py = state['hsp2_local_py'] + # Load a function from code if it exists + if 'om_init_model' in dir(hsp2_local_py): + hsp2_local_py.om_init_model(io_manager, siminfo, state['op_tokens'], state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'], state['model_object_cache']) + + def state_load_dynamics_om(state, io_manager, siminfo): # this function will check to see if any of the multiple paths to loading # dynamic operational model objects has been supplied for the model. - # - function "om_init_model": This function can be defined in the [model h5 base].py file containing things to be done early in the model loading, like setting up model objects. This file will already have been loaded by the state module, and will be present in the module variable hsp2_local_py (we should rename to state_local_py?) - # - model objects defined in file named '[model h5 base].json -- this will populate an array of object definitions that will be loadable by "model_loader_recursive()" # Grab globals from state for easy handling op_tokens, model_object_cache = init_om_dicts() state_paths, state_ix, dict_ix, ts_ix = state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] # set globals on ModelObject - ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache = (op_tokens, state_paths, state_ix, dict_ix, model_object_cache) - # Create the base that everything is added to. - # this object does nothing except host the rest. + ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache = ( + op_tokens, state_paths, state_ix, dict_ix, model_object_cache + ) + # load dynamic coding libraries if defined by user + # note: this used to be inside this function, I think that the loaded module should be no problem + # occuring within this function call, since this function is also called from another runtime engine + # but if things fail post develop-specact-1 pull requests we may investigate here + state_load_om_python(state, io_manager, siminfo) + + # Create the base that everything is added to. this object does nothing except host the rest. model_root_object = ModelObject("") # set up the timer as the first element timer = SimTimer('timer', model_root_object, siminfo) - # Opening JSON file - # load the json data from a pre-generated json file on github - - local_path = os.getcwd() - print("Path:", local_path) - # try this - hdf5_path = io_manager._input.file_path - (fbase, fext) = os.path.splitext(hdf5_path) - # see if there is a code module with custom python - print("Looking for custom om loader in python code ", (fbase + ".py")) - hsp2_local_py = state['hsp2_local_py'] - # Load a function from code if it exists - if 'om_init_model' in dir(hsp2_local_py): - hsp2_local_py.om_init_model(io_manager, siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache) - # see if there is custom json - fjson = fbase + ".json" - print("Looking for custom om json ", fjson) - model_data = {} - if (os.path.isfile(fjson)): - print("Found local json file", fjson) - jfile = open(fjson) - model_data = json.load(jfile) - # now parse this json/dict into model objects - model_loader_recursive(model_data, model_root_object) + # now instantiate and link objects + # state['model_data'] has alread been prepopulated from json, .py files, hdf5, etc. + model_loader_recursive(state['model_data'], model_root_object) print("Loaded objects & paths: insures all paths are valid, connects models as inputs") model_path_loader(model_object_cache) # len() will be 1 if we only have a simtimer, but > 1 if we have a river being added - print("Tokenizing models") model_exec_list = [] + # put all objects in token form for fast runtime execution and sort according to dependency order + print("Tokenizing models") model_tokenizer_recursive(model_root_object, model_object_cache, model_exec_list) + # model_exec_list is the ordered list of component operations print("model_exec_list:", model_exec_list) # This is used to stash the model_exec_list -- is this used? op_tokens[0] = np.asarray(model_exec_list, dtype="i8") diff --git a/HSP2/om_model_object.py b/HSP2/om_model_object.py index 022bafc6..60d1dd9c 100644 --- a/HSP2/om_model_object.py +++ b/HSP2/om_model_object.py @@ -3,8 +3,8 @@ It handles all Dict management functions, but provides for no runtime execution of it's own. All runtime exec is done by child classes. """ -from HSP2.om import * from HSP2.state import * +from HSP2.om import * from pandas import Series, DataFrame, concat, HDFStore, set_option, to_numeric from pandas import Timestamp, Timedelta, read_hdf, read_csv diff --git a/HSP2/om_sim_timer.py b/HSP2/om_sim_timer.py new file mode 100644 index 00000000..8257ca32 --- /dev/null +++ b/HSP2/om_sim_timer.py @@ -0,0 +1,84 @@ +""" +The class SimTimer is used to translate copy data from one state location to another. +It is also used to make an implicit parent child link to insure that an object is loaded +during a model simulation. +""" +from HSP2.state import * +from HSP2.om import * +from HSP2.om_model_object import ModelObject +from pandas import DataFrame, DatetimeIndex +from numba import njit + +class SimTimer(ModelObject): + def __init__(self, name, container, siminfo): + super(SimTimer, self).__init__(name, container) + self.state_path = '/STATE/timer' + self.time_array = self.dti_to_time_array(siminfo) # creates numpy formatted array of year, mo, day, ... for each timestep + self.date_path_ix = [] # where are the are components stored in the state_ix Dict + self.optype = 5 # 0 - ModelObject, 1 - Equation, 2 - datamatrix, 3 - ModelLinkage, 4 - BroadcastChannel, 5 - SimTimer + self.register_components() # now that we have a time array, we set the basic state and all time comps into state + + def register_components(self): + # initialize the path variable if not already set + self.ix = set_state(self.state_ix, self.state_paths, self.state_path, float(self.time_array[0][0])) + # now register all other paths. + # register "year", "month" "day", ... + year_ix = set_state(self.state_ix, self.state_paths, "/STATE/year", float(self.time_array[0][1])) + month_ix = set_state(self.state_ix, self.state_paths, "/STATE/month", float(self.time_array[0][2])) + day_ix = set_state(self.state_ix, self.state_paths, "/STATE/day", float(self.time_array[0][3])) + hr_ix = set_state(self.state_ix, self.state_paths, "/STATE/hour", float(self.time_array[0][4])) + min_ix = set_state(self.state_ix, self.state_paths, "/STATE/minute", float(self.time_array[0][5])) + sec_ix = set_state(self.state_ix, self.state_paths, "/STATE/second", float(self.time_array[0][6])) + wd_ix = set_state(self.state_ix, self.state_paths, "/STATE/weekday", float(self.time_array[0][7])) + dt_ix = set_state(self.state_ix, self.state_paths, "/STATE/dt", float(self.time_array[0][8])) + jd_ix = set_state(self.state_ix, self.state_paths, "/STATE/jday", float(self.time_array[0][9])) + md_ix = set_state(self.state_ix, self.state_paths, "/STATE/modays", float(self.time_array[0][10])) + dts_ix = set_state(self.state_ix, self.state_paths, "/STATE/dts", float(self.time_array[0][8] * 60.0)) + self.date_path_ix = [year_ix, month_ix, day_ix, hr_ix, min_ix, sec_ix, wd_ix, dt_ix, jd_ix, md_ix, dts_ix] + self.dict_ix[self.ix] = self.time_array + + return self.ix + + def tokenize(self): + # call parent method which sets standard ops + # returns an array of data pointers + super().tokenize() # resets ops to common base + self.ops = self.ops + self.date_path_ix # adds timer specific items + + def add_op_tokens(self): + # this puts the tokens into the global simulation queue + # can be customized by subclasses to add multiple lines if needed. + #self.op_tokens[self.ix] = self.ops + super().add_op_tokens() + self.dict_ix[self.ix] = self.time_array + + def dti_to_time_array(self, siminfo): + dateindex = siminfo['tindex'] + dt = siminfo['delt'] + # sim timer is special, one entry for each time component for each timestep + # convert DateIndex to numbers [int(i) for i in dateindex.year] + tdi = { 0: dateindex.astype(np.int64), 1:[float(i) for i in dateindex.year], 2:[float(i) for i in dateindex.month], 3:[float(i) for i in dateindex.day], 4:[float(i) for i in dateindex.hour], 5:[float(i) for i in dateindex.minute], 6:[float(i) for i in dateindex.second], 7:[float(i) for i in dateindex.weekday], 8:[float(dt) for i in dateindex], 9:[float(i) for i in dateindex.day_of_year], 10:[float(i) for i in dateindex.daysinmonth], 11:[float(dt * 60.0) for i in dateindex] } + #tdi = { 0:dateindex.year, 1:dateindex.month, 2:dateindex.day, 3:dateindex.hour, 4:dateindex.minute, 5:dateindex.second } + tid = DataFrame(tdi) + h = 1 # added to increase row count for debug testing. + time_array = tid.to_numpy() + return time_array + +# Function for use during model simulations of tokenized objects +@njit +def step_sim_timer(op_token, state_ix, dict_ix, ts_ix, step): + # note: the op_token and state index are off by 1 since the dict_ix does not store type + #print("Exec step_sim_timer at step:", step, "jday", dict_ix[op_token[1]][step][9] ) + state_ix[op_token[1]] = dict_ix[op_token[1]][step][0] # unix timestamp here + state_ix[op_token[2]] = dict_ix[op_token[1]][step][1] # year + state_ix[op_token[3]] = dict_ix[op_token[1]][step][2] # month + state_ix[op_token[4]] = dict_ix[op_token[1]][step][3] # day + state_ix[op_token[5]] = dict_ix[op_token[1]][step][4] # hour + state_ix[op_token[6]] = dict_ix[op_token[1]][step][5] # minute + state_ix[op_token[7]] = dict_ix[op_token[1]][step][6] # second + state_ix[op_token[8]] = dict_ix[op_token[1]][step][7] # weekday + state_ix[op_token[9]] = dict_ix[op_token[1]][step][8] # dt + state_ix[op_token[10]] = dict_ix[op_token[1]][step][9] # julian day + state_ix[op_token[11]] = dict_ix[op_token[1]][step][10] # modays + state_ix[op_token[12]] = dict_ix[op_token[1]][step][11] # dts + return \ No newline at end of file diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index 6d78ed59..d307b511 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -30,6 +30,13 @@ def add_op_tokens(self): # this puts the tokens into the global simulation queue # can be customized by subclasses to add multiple lines if needed. super().add_op_tokens() + + @staticmethod + def hdf5_load_all(hdf_source): + specla=hdf_source['/SPEC_ACTIONS/ACTIONS/table'] + for idx, x in np.ndenumerate(specla): + print(x[1].decode("utf-8"),x[2].decode("utf-8"), x[13].decode("utf-8"), x[16].decode("utf-8"), x[17]) + # njit functions for runtime diff --git a/HSP2/state.py b/HSP2/state.py index 35a94778..fe7e268d 100644 --- a/HSP2/state.py +++ b/HSP2/state.py @@ -204,4 +204,5 @@ def load_dynamics(io_manager, siminfo): else: # print("state_step_hydr function not defined. Using default") return False - return hsp2_local_py \ No newline at end of file + return hsp2_local_py + From 02cc1ab9984350f35ef20e0cf1a103c6379af2b4 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Tue, 26 Dec 2023 21:04:56 -0500 Subject: [PATCH 06/35] finished parser for specal basic, added function support for testing --- HSP2/HYDR.py | 42 +++- HSP2/SPECL.py | 51 +++- HSP2/main.py | 8 +- HSP2/om.py | 229 +++--------------- HSP2/om_model_linkage.py | 3 +- HSP2/om_model_object.py | 10 +- HSP2/om_special_action.py | 71 ++++-- HSP2/state.py | 3 + .../HSP2results/example_manual_object.py | 62 +++++ 9 files changed, 231 insertions(+), 248 deletions(-) create mode 100644 tests/testcbp/HSP2results/example_manual_object.py diff --git a/HSP2/HYDR.py b/HSP2/HYDR.py index 0ea0bb68..d79aa1f2 100644 --- a/HSP2/HYDR.py +++ b/HSP2/HYDR.py @@ -20,6 +20,14 @@ from HSP2.utilities import initm, make_numba_dict from HSP2.state import * from HSP2.SPECL import specl +from HSP2.om import * +from HSP2.om_model_object import * +from HSP2.om_sim_timer import * +from HSP2.om_special_action import * +#from HSP2.om_equation import * +from HSP2.om_model_linkage import * +#from HSP2.om_data_matrix import * +#from HSP2.om_model_broadcast import * ERRMSGS =('HYDR: SOLVE equations are indeterminate', #ERRMSG0 @@ -123,7 +131,7 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): Olabels.append(f'O{i+1}') OVOLlabels.append(f'OVOL{i+1}') - # state_info is some generic things about the simulation + # state_info is some generic things about the simulation - must be numba safe, so we don't just pass the whole state which is not state_info = Dict.empty(key_type=types.unicode_type, value_type=types.unicode_type) state_info['operation'], state_info['segment'], state_info['activity'] = state['operation'], state['segment'], state['activity'] state_info['domain'], state_info['state_step_hydr'] = state['domain'], state['state_step_hydr'] @@ -141,14 +149,14 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): hydr_init_ix(state_ix, state_paths, state['domain']) ########################################################################### - # specactions - special actions code TBD + # OM - load the tokens to pass in. ########################################################################### - specactions = make_numba_dict(state['specactions']) # Note: all values coverted to float automatically - + op_tokens = state['op_tokens'] + ########################################################################### # Do the simulation with _hydr_() ########################################################################### - errors = _hydr_(ui, ts, COLIND, OUTDGT, rchtab, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, specactions, state_step_hydr) # run reaches simulation code + errors = _hydr_(ui, ts, COLIND, OUTDGT, rchtab, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens) # run reaches simulation code ########################################################################### if 'O' in ts: del ts['O'] @@ -163,7 +171,7 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): @njit(cache=True) -def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, specactions, state_step_hydr): +def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens): errors = zeros(int(ui['errlen'])).astype(int64) steps = int(ui['steps']) # number of simulation steps @@ -309,7 +317,6 @@ def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_inf # HYDR (except where noted) for step in range(steps): # call specl - specl(ui, ts, step, state_info, state_paths, state_ix, specactions) convf = CONVF[step] outdgt[:] = OUTDGT[step, :] colind[:] = COLIND[step, :] @@ -323,17 +330,34 @@ def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_inf state_ix[out_ix[oi]] = outdgt[oi] state_ix[vol_ix], state_ix[ivol_ix] = vol, IVOL0[step] state_ix[volev_ix] = volev + # ***************************************** # Execute dynamic code if enabled + # - these if statements may be irrelevant if default functions simply return + # when no objects are defined. + if (state_info['state_step_om'] == 'enabled'): + pre_step_model(op_tokens[0], op_tokens, state_ix, dict_ix, ts_ix, step) if (state_info['state_step_hydr'] == 'enabled'): state_step_hydr(state_info, state_paths, state_ix, dict_ix, ts_ix, hydr_ix, step) + # Execute dynamic code if enabled + if (state_info['state_step_om'] == 'enabled'): + #print("trying to execute state_step_om()") + # op_tokens[0] contains the model exec list. Later we may amend this + # perhaps even storing a domain specific exec list under domain/exec_list? + step_model(op_tokens[0], op_tokens, state_ix, dict_ix, ts_ix, step) + # Execute dynamic code if enabled + if ( (state_info['state_step_hydr'] == 'enabled') + or (state_info['state_step_om'] == 'enabled') + ): # Do write-backs for editable STATE variables # OUTDGT is writeable for oi in range(nexits): outdgt[oi] = state_ix[out_ix[oi]] - # IVOL is writeable. - # Note: we must convert IVOL to the units expected in _hydr_ + # IVOL is writeable. + # Note: we must convert IVOL to the units expected in _hydr_ # maybe routines should do this, and this is not needed (but pass VFACT in state) IVOL[step] = state_ix[ivol_ix] * VFACT + # End dynamic code step() + # ***************************************** # vols, sas variables and their initializations not needed. if irexit >= 0: # irrigation exit is set, zero based number if rirwdl > 0.0: # equivalent to OVOL for the irrigation exit diff --git a/HSP2/SPECL.py b/HSP2/SPECL.py index 9b798d94..2f4345ec 100644 --- a/HSP2/SPECL.py +++ b/HSP2/SPECL.py @@ -1,20 +1,37 @@ ''' process special actions in this domain - -CALL: specl(io_manager, siminfo, uci, ts, state, specl_actions) - store is the Pandas/PyTable open store - siminfo is a dictionary with simulation level infor (OP_SEQUENCE for example) - ui is a dictionary with RID specific HSPF UCI like data - ts is a dictionary with RID specific timeseries - state is a dictionary with value of variables at ts[step - 1] - specl_actions is a dictionary with all SPEC-ACTIONS entries +Notes: + - code for parsing UCI SPEC-ACTIONS is in HSP2tools/readUCI.py + - code for object classes that transform parsed data into OP codes for OM and STATE support + is in this directory tree as om_special_[action type].py, + - Ex: om_special_action.py contains object support and runtime functions for classic ACTIONS ''' from numba import njit +from pandas import DataFrame, date_range import h5py -def specl_load_actions(state, io_manager, siminfo) - for speca in state['specactions']['ACTIONS'].items(): - +def specl_load_actions(state, io_manager, siminfo): + dc = state['specactions']['ACTIONS'] + #print(dc.index) + #print("speca entry 0:0", dc[0:0]) + #print("speca entry 0:1", dc[0:1]) + #print("speca entry 1:2", dc[1:2]) + #print("speca entry 0:", dc[0:]) + #print("speca entry 1:", dc[1:]) + #print("speca entry 1:1", dc[1:1]) + for ix in dc.index: + # add the items to the state['model_data'] dict + speca = dc[ix:(ix+1)] + # need to add a name attribute + opname = 'SPEC' + 'ACTION' + str(ix) + state['model_data'][opname] = {} + state['model_data'][opname]['name'] = opname + for ik in speca.keys(): + #print("looking for speca key ", ik) + state['model_data'][opname][ik] = speca.to_dict()[ik][ix] + state['model_data'][opname]['object_class'] = 'SpecialAction' + #print("model_data", ix, " = ", state['model_data'][opname]) + return def state_load_dynamics_specl(state, io_manager, siminfo): specl_load_actions(state, io_manager, siminfo) @@ -23,6 +40,18 @@ def state_load_dynamics_specl(state, io_manager, siminfo): # ... return +''' +# the code specl() is deprecated in favor of execution inside OM +# see om_special_action.py for example of object support and runtime functions for classic ACTIONS +CALL: specl(ui, ts, step, state_info, state_paths, state_ix, specactions) + store is the Pandas/PyTable open store + siminfo is a dictionary with simulation level infor (OP_SEQUENCE for example) + ui is a dictionary with RID specific HSPF UCI like data + ts is a dictionary with RID specific timeseries + state is a dictionary with value of variables at ts[step - 1] + specl_actions is a dictionary with all SPEC-ACTIONS entries +''' + @njit def specl(ui, ts, step, state_info, state_paths, state_ix, specactions): # ther eis no need for _specl_ because this code must already be njit diff --git a/HSP2/main.py b/HSP2/main.py index 73e75d84..a21b9803 100644 --- a/HSP2/main.py +++ b/HSP2/main.py @@ -13,6 +13,7 @@ from HSP2.configuration import activities, noop, expand_masslinks from HSP2.state import * from HSP2.om import * +from HSP2.SPECL import * from HSP2IO.io import IOManager, SupportsReadTS, Category @@ -51,9 +52,7 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None ftables = uci_obj.ftables specactions = uci_obj.specactions monthdata = uci_obj.monthdata - print("Special Actions", specactions) - keysList = list(specactions['ACTIONS'].keys()) - print(keysList) + start, stop = siminfo['start'], siminfo['stop'] copy_instances = {} @@ -78,7 +77,10 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None hydr_init_ix(state['state_ix'], state['state_paths'], state['domain']) # - finally stash specactions in state, not domain (segment) dependent so do it once state['specactions'] = specactions # stash the specaction dict in state + state_load_dynamics_specl(state, io_manager, siminfo) state_load_dynamics_om(state, io_manager, siminfo) + # finalize all dynamically loaded components and prepare to run the model + state_om_model_run_prep(state, io_manager, siminfo) ####################################################################################### # main processing loop msg(1, f'Simulation Start: {start}, Stop: {stop}') diff --git a/HSP2/om.py b/HSP2/om.py index ae03a344..31cb48a9 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -52,10 +52,12 @@ def is_float_digit(n: str) -> bool: except ValueError: return False +# Import Code Classes from HSP2.om_model_object import * from HSP2.om_sim_timer import * #from HSP2.om_equation import * from HSP2.om_model_linkage import * +from HSP2.om_special_action import * #from HSP2.om_data_matrix import * #from HSP2.om_model_broadcast import * #from HSP2.om_simple_channel import * @@ -69,202 +71,23 @@ def init_om_dicts(): model_object_cache = {} # this does not need to be a special Dict as it is not used in numba return op_tokens, model_object_cache -# This is deprecated but kept to support legacy demo code -# Function is not splot between 2 functions: -# - init_state_dicts() (from state.py) -# - init_om_dicts() from om.py -def init_sim_dicts(): - """ - We should get really good at using docstrings... - Agree. they are dope. - """ - op_tokens = Dict.empty(key_type=types.int64, value_type=types.i8[:]) - state_paths = Dict.empty(key_type=types.unicode_type, value_type=types.int64) - state_ix = Dict.empty(key_type=types.int64, value_type=types.float64) - dict_ix = Dict.empty(key_type=types.int64, value_type=types.float64[:,:]) - ts_ix = Dict.empty(key_type=types.int64, value_type=types.float64[:]) - model_object_cache = {} # this does not need to be a special Dict as it is not used in numba - return op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache - - -def load_sim_dicts(siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache): - # by setting the state_parhs, opt_tokens, state_ix etc on the abstract class ModelObject - # all objects that we create share this as a global referenced variable. - # this may be a good thing or it may be bad? For now, we leverage this to reduce settings props - # but at some point we move all prop setting into a function and this maybe doesn't seem so desirable - # since there could be some unintended consequences if we actually *wanted* them to have separate copies - # tho since the idea is that they are global registries, maybe that is not a valid concern. - ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache = (op_tokens, state_paths, state_ix, dict_ix, model_object_cache) - # set up the timer as the first element - timer = SimTimer('timer', False, siminfo) - #timer.add_op_tokens() - river = ModelObject('RCHRES_R001') - # upon object creation river gets added to state with path "/STATE/RCHRES_R001" - river.add_input("Qin", f'{river.state_path}/HYDR/IVOL', 2) - # alternative, using TIMESERIES: - # river.add_input("Qin", "/TIMESERIES/TS011", 3) - # river.add_input("ps_mgd", "/TIMESERIES/TS3000", 3) - river.add_op_tokens() # formally adds this to the simulation - - # now add a simple table - data_table = np.asarray([ [ 0.0, 5.0, 10.0], [10.0, 15.0, 20.0], [20.0, 25.0, 30.0], [30.0, 35.0, 40.0] ], dtype= "float32") - dm = DataMatrix('dm', river, data_table) - dm.add_op_tokens() - # 2d lookup - dma = DataMatrixLookup('dma', river, dm.state_path, 2, 17.5, 1, 6.8, 1, 0.0) - dma.add_op_tokens() - # 1.5d lookup - #dma = DataMatrixLookup('dma', river, dm.state_path, 3, 17.5, 1, 1, 1, 0.0) - #dma.add_op_tokens() - - facility = ModelObject('facility', river) - - Qintake = Equation('Qintake', facility, "Qin * 1.0") - Qintake.add_op_tokens() - # a flowby - flowby = Equation('flowby', facility, "Qintake * 0.9") - flowby.add_op_tokens() - # add a withdrawal equation - # we use "3.0 + 0.0" because the equation parser fails on a single factor (number of variable) - # so we have to tweak that. However, we need to handle constants separately, and also if we see a - # single variable equation (such as Qup = Qhydr) we need to rewrite that to a input anyhow for speed - wd_mgd = Equation('wd_mgd', facility, "3.0 + 0.0") - wd_mgd.add_op_tokens() - # Runit - unit area runoff - Runit = Equation('Runit', facility, "Qin / 592.717") - Runit.add_op_tokens() - # add local subwatersheds to test scalability - """ - for k in range(10): - subshed_name = 'sw' + str(k) - upstream_name = 'sw' + str(k-1) - Qout_eqn = str(25*random.random()) + " * Runit " - if k > 0: - Qout_eqn = Qout_eqn + " + " + upstream_name + "_Qout" - Qout_ss = Equation(subshed_name + "_Qout", facility, eqn) - Qout_ss.add_op_tokens() - # now add the output of the final tributary to the inflow to this one - Qtotal = Equation("Qtotal", facility, "Qin + " + Qout_ss.name) - Qtotal.tokenize() - """ - # add random ops to test scalability - # add a series of rando equations - """ - c=["flowby", "wd_mgd", "Qintake"] - for k in range(10000): - eqn = str(25*random.random()) + " * " + c[round((2*random.random()))] - newq = Equation('eq' + str(k), facility, eqn) - newq.add_op_tokens() - """ - # now connect the wd_mgd back to the river with a direct link. - # This is not how we'll do it for most simulations as there may be multiple inputs but will do for now - hydr = ModelObject('HYDR', river) - hydr.add_op_tokens() - O1 = ModelLinkage('O1', hydr, wd_mgd.state_path, 2) - O1.add_op_tokens() - - return - - -def load_om_components(io_manager, siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache): - # set up OM base dcits - op_tokens, model_object_cache = init_om_dicts() - # set globals on ModelObject - ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache = (op_tokens, state_paths, state_ix, dict_ix, model_object_cache) - # Create the base that everything is added to. - # this object does nothing except host the rest. - # it has no name so that all paths can be relative to it. - model_root_object = ModelObject("") - # set up the timer as the first element - timer = SimTimer('timer', model_root_object, siminfo) - #timer.add_op_tokens() - #print("siminfo:", siminfo) - #river = ModelObject('RCHRES_R001') - # upon object creation river gets added to state with path "/STATE/RCHRES_R001" - #river.add_input("Qivol", f'{river.state_path}/HYDR/IVOL', 2, True) - # a json NHD from R parser - # Opening JSON file - # load the json data from a pre-generated json file on github - - # this allows this function to be called without an hdf5 - if io_manager != False: - # try this - local_path = os.getcwd() - print("Path:", local_path) - (fbase, fext) = os.path.splitext(hdf5_path) - # see if there is a code module with custom python - print("Looking for custom python code ", (fbase + ".py")) - print("calling dynamic_module_import(",fbase, local_path + "/" + fbase + ".py", ", 'hsp2_local_py')") - hsp2_local_py = dynamic_module_import(fbase, local_path + "/" + fbase + ".py", "hsp2_local_py") - # Load a function from code if it exists - if 'om_init_model' in dir(hsp2_local_py): - hsp2_local_py.om_init_model(io_manager, siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache) - if 'om_step_hydr' in dir(hsp2_local_py): - siminfo['om_step_hydr'] = True - if 'state_step_hydr' in dir(hsp2_local_py): - siminfo['state_step_hydr'] = True - - # see if there is custom json - fjson = fbase + ".json" - model_data = {} - if (os.path.isfile(fjson)): - print("Found local json file", fjson) - jfile = open(fjson) - model_data = json.load(jfile) - #print("Loaded json with keys:", model_data.keys()) - print("hdf5_path=", hdf5_path) - # Opening JSON file from remote url - # json_url = "https://raw.githubusercontent.com/HARPgroup/vahydro/master/R/modeling/nhd/nhd_simple_8566737.json" - #jraw = requests.get(json_url, verify=False) - #model_json = jraw.content.decode('utf-8') - # returns JSON object as Dict - # returns JSON object as Dict - #model_exec_list = np.asarray([]) - #container = False - # call it! - model_loader_recursive(model_data, model_root_object) - print("Loaded the following objects & paths") - print("Insuring all paths are valid, and connecting models as inputs") - model_path_loader(model_object_cache) - # len() will be 1 if we only have a simtimer, but > 1 if we have a river being added - print("Tokenizing models") - model_exec_list = [] - model_tokenizer_recursive(model_root_object, model_object_cache, model_exec_list) - #print("model_exec_list:", model_exec_list) - # This is used to stash the model_exec_list -- is this used? - op_tokens[0] = np.asarray(model_exec_list, dtype="i8") - ivol_state_path = '/STATE/RCHRES_R001' + "/IVOLin" - if (ivol_state_path in state_paths): - ivol_ix = state_paths[ivol_state_path] - #print("IVOLin found. state_paths = ", ivol_ix) - print("IVOLin op_tokens ", op_tokens[ivol_ix]) - print("IVOLin state_ix = ", state_ix[ivol_ix]) - else: - print("Could not find",ivol_state_path) - #print("Could not find",ivol_state_path,"in", state_paths) - return - # the resulting set of objects is returned. - state['model_object_cache'] = model_object_cache - state['op_tokens'] = op_tokens - state['state_step_om'] = 'disabled' - if len(op_tokens) > 1: - state['state_step_om'] = 'enabled' - return -def state_load_json(state, io_manager, siminfo): +def state_load_om_json(state, io_manager, siminfo): # - model objects defined in file named '[model h5 base].json -- this will populate an array of object definitions that will # be loadable by "model_loader_recursive()" - model_data = {} + model_data = state['model_data'] # JSON file would be in same path as hdf5 hdf5_path = io_manager._input.file_path - print("Looking for custom om json ", fjson) (fbase, fext) = os.path.splitext(hdf5_path) # see if there is custom json fjson = fbase + ".json" + print("Looking for custom om json ", fjson) if (os.path.isfile(fjson)): print("Found local json file", fjson) jfile = open(fjson) - model_data = json.load(jfile) + json_data = json.load(jfile) + # dict.update() combines the arg dict with the base + model_data.update(json_data) state['model_data'] = model_data return @@ -283,7 +106,7 @@ def state_load_om_python(state, io_manager, siminfo): hsp2_local_py = state['hsp2_local_py'] # Load a function from code if it exists if 'om_init_model' in dir(hsp2_local_py): - hsp2_local_py.om_init_model(io_manager, siminfo, state['op_tokens'], state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'], state['model_object_cache']) + hsp2_local_py.om_init_model(io_manager, siminfo, state['op_tokenModelObject.model_object_caches'], state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'], state['model_object_cache']) def state_load_dynamics_om(state, io_manager, siminfo): @@ -292,16 +115,21 @@ def state_load_dynamics_om(state, io_manager, siminfo): # Grab globals from state for easy handling op_tokens, model_object_cache = init_om_dicts() state_paths, state_ix, dict_ix, ts_ix = state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] - # set globals on ModelObject + # set globals on ModelObject, this makes them persistent throughout all subsequent object instantiation and use ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache = ( op_tokens, state_paths, state_ix, dict_ix, model_object_cache ) + state['op_tokens'], state['model_object_cache'] = op_tokens, model_object_cache # load dynamic coding libraries if defined by user # note: this used to be inside this function, I think that the loaded module should be no problem # occuring within this function call, since this function is also called from another runtime engine # but if things fail post develop-specact-1 pull requests we may investigate here + # also, it may be that this should be loaded elsewhere? state_load_om_python(state, io_manager, siminfo) - + state_load_om_json(state, io_manager, siminfo) + return + +def state_om_model_run_prep(state, io_manager, siminfo): # Create the base that everything is added to. this object does nothing except host the rest. model_root_object = ModelObject("") # set up the timer as the first element @@ -311,6 +139,12 @@ def state_load_dynamics_om(state, io_manager, siminfo): # state['model_data'] has alread been prepopulated from json, .py files, hdf5, etc. model_loader_recursive(state['model_data'], model_root_object) print("Loaded objects & paths: insures all paths are valid, connects models as inputs") + # both state['model_object_cache'] and the model_object_cache property of the ModelObject class def + # will hold a global repo for this data this may be redundant? They DO point to the same datset? + # since this is a function that accepts state as an argument and these were both set in state_load_dynamics_om + # we can assume they are there and functioning + model_object_cache = state['model_object_cache'] + op_tokens = state['op_tokens'] model_path_loader(model_object_cache) # len() will be 1 if we only have a simtimer, but > 1 if we have a river being added model_exec_list = [] @@ -400,9 +234,10 @@ def model_class_loader(model_name, model_props, container = False): return False # create it model_object = DataMatrix(model_props.get('name'), container, model_props) - elif object_class == 'ModelLinkage': model_object = ModelLinkage(model_props.get('name'), container, model_props) + elif object_class == 'SpecialAction': + model_object = SpecialAction(model_props.get('name'), container, model_props) else: print("Loading", model_props.get('name'), "with object_class", object_class,"as ModelObject") model_object = ModelObject(model_props.get('name'), container) @@ -570,8 +405,10 @@ def pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): pass elif op_tokens[i][0] == 5: pass - elif op_tokens[i][0] == 11: - pre_step_register(op_tokens[i], state_ix, dict_ix) + elif op_tokens[i][0] == 12: + # register type data (like broadcast accumulators) + # disabled till broadcasts are defined pre_step_register(op_tokens[i], state_ix, dict_ix) + pass return @njit @@ -594,7 +431,7 @@ def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): if debug == 1: print("DEBUG: Operator ID", ops[1], "is op type", ops[0]) if ops[0] == 1: - state_ix[ops[1]] = step_equation(ops, state_ix) + pass #state_ix[ops[1]] = step_equation(ops, state_ix) elif ops[0] == 2: # todo: this should be moved into a single function, # with the conforming name step_matrix(op_tokens, ops, state_ix, dict_ix) @@ -604,12 +441,12 @@ def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): # this insures a matrix with variables in it is up to date # only need to do this if the matrix data and matrix config are on same object # otherwise, the matrix data is an input and has already been evaluated - state_ix[ops[1]] = exec_tbl_values(ops, state_ix, dict_ix) + pass# state_ix[ops[1]] = exec_tbl_values(ops, state_ix, dict_ix) if (ops[3] > 0): # this evaluates a single value from a matrix if the matrix is configured to do so. if debug == 1: print("DEBUG: Calling exec_tbl_eval", ops) - state_ix[ops[1]] = exec_tbl_eval(op_tokens, ops, state_ix, dict_ix) + pass# state_ix[ops[1]] = exec_tbl_eval(op_tokens, ops, state_ix, dict_ix) elif ops[0] == 3: step_model_link(ops, state_ix, ts_ix, step) elif ops[0] == 4: @@ -619,10 +456,10 @@ def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): elif ops[0] == 9: val = 0 elif ops[0] == 13: - step_simple_channel(ops, state_ix, dict_ix, step) + pass #step_simple_channel(ops, state_ix, dict_ix, step) # Op 100 is Basic ACTION in Special Actions elif ops[0] == 100: - state_ix[ops[1]] = step_saction(ops, state_ix, dict_ix) + pass # state_ix[ops[1]] = step_saction(ops, state_ix, dict_ix) return diff --git a/HSP2/om_model_linkage.py b/HSP2/om_model_linkage.py index ed798d8c..b005743a 100644 --- a/HSP2/om_model_linkage.py +++ b/HSP2/om_model_linkage.py @@ -37,8 +37,7 @@ def __init__(self, name, container = False, model_props = []): self.add_input(self.right_path, self.right_path) def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): - # this checks to see if the prop is in dict with value form, or just a value - # strict = True causes an exception if property is missing from model_props dict + # parent method handles most cases, but subclass handles special situations. prop_val = super().handle_prop(model_props, prop_name, strict, default_value) if ( (prop_name == 'right_value') and (prop_val == None) or (prop_val == '')): raise Exception("right_path cannot be empty. Object creation halted. Path to object with error is " + self.state_path) diff --git a/HSP2/om_model_object.py b/HSP2/om_model_object.py index 60d1dd9c..999ee419 100644 --- a/HSP2/om_model_object.py +++ b/HSP2/om_model_object.py @@ -34,16 +34,8 @@ def __init__(self, name, container = False, model_props = []): self.default_value = 0.0 self.ops = [] self.optype = 0 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - input/ModelLinkage, 4 - broadcastChannel, 5 - SimTimer, 6 - Conditional, 7 - ModelConstant (numeric), 8 - matrix accessor, 9 - MicroWatershedModel, 10 - MicroWatershedNetwork, 11 - ModelTimeseries, 12 - ModelRegister, 13 - SimpleChannel, 14 - SimpleImpoundment - # this is replaceable. to replace state_path/re-register the index : - # - remove the old PATH from state_paths: del state_paths[self.state_path] - # you should never create an object without knowing its container, but if you do - # you can TRY to do the following: - # - set this objects new path based on containment and call: - # [my_object].make_paths() - # - add this manually to state_paths: - # state_paths[[my_object].state_path] = [my_object].ix - # - call [my_object].register_path() self.register_path() + self.parse_model_props(model_props) @staticmethod def required_properties(): diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index d307b511..4befea6c 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -7,24 +7,53 @@ from numba import njit class SpecialAction(ModelObject): def __init__(self, name, container = False, model_props = []): - super(ModelObject, self).__init__(name, container) - self.src_op = self.handle_prop(model_props, 'input') - self.dest_op = self.handle_prop(model_props, 'target') - self.cop = self.handle_prop(model_props, 'op') + super(SpecialAction, self).__init__(name, container, model_props) self.optype = 100 # Special Actions start indexing at 100 + def parse_model_props(self, model_props, strict=False): + print("SpecialAction.parse_model_props() called") + # comes in as row from special ACTIONS table + # ex: { + # 'OPTYP': 'RCHRES', 'RANGE1': '1', 'RANGE2': '', 'DC': 'DY', 'DS': '', + # 'YR': '1986', 'MO': '3', 'DA': '1', 'HR': '12', 'MN': '', + # 'D': '2', 'T': 3, 'VARI': 'IVOL', 'S1': '', 'S2': '', + # 'AC': '+=', 'VALUE': 30.0, 'TC': '', 'TS': '', 'NUM': '', 'CURLVL': 0, + # defined by: + # - operand1, i.e. variable to access + update, path = /STATE/[OPTYP]_[op_abbrev][RANGE1]/[VARI] + # - action(operation) to perform = AC + # - operand2, a numeric value for simple ACTION = [VALUE] + # note: [op_abbrev] is *maybe* the first letter of the OPTYP? Not a very good idea to have a coded convention like that + self.op_type = self.handle_prop(model_props, 'OPTYP') + self.range1 = self.handle_prop(model_props, 'RANGE1') + self.range2 = self.handle_prop(model_props, 'RANGE2') + self.ac = self.handle_prop(model_props, 'AC') # must handle this before we handle the operand to check for DIV by Zero + self.vari = self.handle_prop(model_props, 'VARI') + self.op2_val = self.handle_prop(model_props, 'VALUE') + self.op2_ix = self.constant_or_path('op_val', self.op2_val) # constant values must be added to STATE and thus are referenced by their state_ix number + # now add the module state value that we are operating on (the target) as an input, so that this gets executed AFTER this is set initially + self.add_input('op1', ('/STATE/' + self.op_type + '_' + self.op_type[0] + str(self.range1).zfill(3) + "/" + self.vari ), 2, True ) + + def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): + # Insure all values are legal ex: no DIV by Zero + prop_val = super().handle_prop(model_props, prop_name, strict, default_value ) + if (prop_name == 'VALUE') and (self.ac == '/='): + if (prop_val == 0) or (prop_val == None): + raise Exception("Error: in properties passed to "+ self.name + " AC must be non-zero or non-Null . Object creation halted. Path to object with error is " + self.state_path) + return prop_val + def tokenize(self): # call parent method to set basic ops common to all - super().tokenize() + super().tokenize() # sets self.ops = op_type, op_ix # cop_code 0: =/eq, 1: /gt, 3: <=/le, 4: >=/ge, 5: <>/ne - cop_codes = [ + cop_codes = { '+=': 0, '-=': 1, '*=': 2, - '/=': 3, - ] - self.ops = self.ops + [self.left_ix, cop_codes[self.cop], self.right_ix] + '/=': 3 + } + self.ops = self.ops + [self.inputs_ix['op1'], cop_codes[self.ac], self.op2_ix] + print("Specl", self.name, "tokens", self.ops) def add_op_tokens(self): # this puts the tokens into the global simulation queue @@ -35,27 +64,33 @@ def add_op_tokens(self): def hdf5_load_all(hdf_source): specla=hdf_source['/SPEC_ACTIONS/ACTIONS/table'] for idx, x in np.ndenumerate(specla): - print(x[1].decode("utf-8"),x[2].decode("utf-8"), x[13].decode("utf-8"), x[16].decode("utf-8"), x[17]) + print(x[1].decode("utf-8"),x[2].decode("utf-8"), x[13].decode("utf-8"), x[16].decode("utf-8"), x[17]) # njit functions for runtime @njit -def exec_saction(op, state_ix, dict_ix): +def step_saction(op, state_ix, dict_ix): + print("specal", op) ix = op[1] # ID of this op - dix = op[2] # ID of place to store data # these indices must be adjusted to reflect the number of common op tokens # SpecialAction has: # - type of condition (+=, -=, ...) # - operand 1 (left side) # - operand 2 (right side) - op = op[3] - ix1 = op[4] - ix2 = op[5] - if op == 0: + ix1 = op[2] # ID of source of data and destination of data + sop = op[3] + ix2 = op[4] + if sop == 0: result = state_ix[ix1] + state_ix[ix2] - if op == 1: + if sop == 1: result = state_ix[ix1] - state_ix[ix2] - + if sop == 2: + result = state_ix[ix1] * state_ix[ix2] + if sop == 3: + result = state_ix[ix1] / state_ix[ix2] + # set value in target + # tbd: handle this with a link? + state_ix[ix1] = result return result diff --git a/HSP2/state.py b/HSP2/state.py index fe7e268d..cfcc2e83 100644 --- a/HSP2/state.py +++ b/HSP2/state.py @@ -23,6 +23,8 @@ def init_state_dicts(): # initialize state for hydr # now put all of these Dicts into the state Dict state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] = state_paths, state_ix, dict_ix, ts_ix + # add a generic place to stash model_data for dynamic components + state['model_data'] = {} return state @@ -122,6 +124,7 @@ def state_context_hsp2(state, operation, segment, activity): state['activity'] = activity # give shortcut to state path for the upcoming function state['domain'] = "/STATE/" + operation + "_" + segment + "/" + activity + state['model_data'] = {} def state_siminfo_hsp2(uci_obj, siminfo): # Add crucial simulation info for dynamic operation support diff --git a/tests/testcbp/HSP2results/example_manual_object.py b/tests/testcbp/HSP2results/example_manual_object.py new file mode 100644 index 00000000..021c10a8 --- /dev/null +++ b/tests/testcbp/HSP2results/example_manual_object.py @@ -0,0 +1,62 @@ +# this is a code remnant that lays out a manually created set of objects +# in order to use this appropriate libs must be loaded but this does not yet do so + + +# now add a simple table +data_table = np.asarray([ [ 0.0, 5.0, 10.0], [10.0, 15.0, 20.0], [20.0, 25.0, 30.0], [30.0, 35.0, 40.0] ], dtype= "float32") +dm = DataMatrix('dm', river, data_table) +dm.add_op_tokens() +# 2d lookup +dma = DataMatrixLookup('dma', river, dm.state_path, 2, 17.5, 1, 6.8, 1, 0.0) +dma.add_op_tokens() +# 1.5d lookup +#dma = DataMatrixLookup('dma', river, dm.state_path, 3, 17.5, 1, 1, 1, 0.0) +#dma.add_op_tokens() + +facility = ModelObject('facility', river) + +Qintake = Equation('Qintake', facility, "Qin * 1.0") +Qintake.add_op_tokens() +# a flowby +flowby = Equation('flowby', facility, "Qintake * 0.9") +flowby.add_op_tokens() +# add a withdrawal equation +# we use "3.0 + 0.0" because the equation parser fails on a single factor (number of variable) +# so we have to tweak that. However, we need to handle constants separately, and also if we see a +# single variable equation (such as Qup = Qhydr) we need to rewrite that to a input anyhow for speed +wd_mgd = Equation('wd_mgd', facility, "3.0 + 0.0") +wd_mgd.add_op_tokens() +# Runit - unit area runoff +Runit = Equation('Runit', facility, "Qin / 592.717") +Runit.add_op_tokens() +# add local subwatersheds to test scalability +""" +for k in range(10): + subshed_name = 'sw' + str(k) + upstream_name = 'sw' + str(k-1) + Qout_eqn = str(25*random.random()) + " * Runit " + if k > 0: + Qout_eqn = Qout_eqn + " + " + upstream_name + "_Qout" + Qout_ss = Equation(subshed_name + "_Qout", facility, eqn) + Qout_ss.add_op_tokens() +# now add the output of the final tributary to the inflow to this one +Qtotal = Equation("Qtotal", facility, "Qin + " + Qout_ss.name) +Qtotal.tokenize() +""" +# add random ops to test scalability +# add a series of rando equations +""" +c=["flowby", "wd_mgd", "Qintake"] +for k in range(10000): + eqn = str(25*random.random()) + " * " + c[round((2*random.random()))] + newq = Equation('eq' + str(k), facility, eqn) + newq.add_op_tokens() +""" +# now connect the wd_mgd back to the river with a direct link. +# This is not how we'll do it for most simulations as there may be multiple inputs but will do for now +hydr = ModelObject('HYDR', river) +hydr.add_op_tokens() +O1 = ModelLinkage('O1', hydr, wd_mgd.state_path, 2) +O1.add_op_tokens() + +return From 31942f7350972ac785711f36abf9db246f81b884 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Tue, 26 Dec 2023 21:49:03 -0500 Subject: [PATCH 07/35] tested basic specl, works thus far with no time matching, and need to sort out state paths since ACTIONS dont give module aka HYDR, PERLND etc. --- HSP2/HYDR.py | 3 ++- HSP2/om.py | 2 +- HSP2/om_sim_timer.py | 2 +- HSP2/om_special_action.py | 13 ++++++++++--- 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/HSP2/HYDR.py b/HSP2/HYDR.py index d79aa1f2..d972ccb6 100644 --- a/HSP2/HYDR.py +++ b/HSP2/HYDR.py @@ -134,7 +134,7 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): # state_info is some generic things about the simulation - must be numba safe, so we don't just pass the whole state which is not state_info = Dict.empty(key_type=types.unicode_type, value_type=types.unicode_type) state_info['operation'], state_info['segment'], state_info['activity'] = state['operation'], state['segment'], state['activity'] - state_info['domain'], state_info['state_step_hydr'] = state['domain'], state['state_step_hydr'] + state_info['domain'], state_info['state_step_hydr'], state_info['state_step_om'] = state['domain'], state['state_step_hydr'], state['state_step_om'] hsp2_local_py = state['hsp2_local_py'] # It appears necessary to load this here, instead of from main.py, otherwise, # _hydr_() does not recognize the function state_step_hydr()? @@ -148,6 +148,7 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): # initialize the hydr paths in case they don't already reside here hydr_init_ix(state_ix, state_paths, state['domain']) + ########################################################################### # OM - load the tokens to pass in. ########################################################################### diff --git a/HSP2/om.py b/HSP2/om.py index 31cb48a9..b14518ea 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -459,7 +459,7 @@ def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): pass #step_simple_channel(ops, state_ix, dict_ix, step) # Op 100 is Basic ACTION in Special Actions elif ops[0] == 100: - pass # state_ix[ops[1]] = step_saction(ops, state_ix, dict_ix) + state_ix[ops[1]] = step_saction(ops, state_ix, dict_ix, step) return diff --git a/HSP2/om_sim_timer.py b/HSP2/om_sim_timer.py index 8257ca32..33056fa6 100644 --- a/HSP2/om_sim_timer.py +++ b/HSP2/om_sim_timer.py @@ -81,4 +81,4 @@ def step_sim_timer(op_token, state_ix, dict_ix, ts_ix, step): state_ix[op_token[10]] = dict_ix[op_token[1]][step][9] # julian day state_ix[op_token[11]] = dict_ix[op_token[1]][step][10] # modays state_ix[op_token[12]] = dict_ix[op_token[1]][step][11] # dts - return \ No newline at end of file + return diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index 4befea6c..96fcaeb7 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -31,8 +31,11 @@ def parse_model_props(self, model_props, strict=False): self.vari = self.handle_prop(model_props, 'VARI') self.op2_val = self.handle_prop(model_props, 'VALUE') self.op2_ix = self.constant_or_path('op_val', self.op2_val) # constant values must be added to STATE and thus are referenced by their state_ix number - # now add the module state value that we are operating on (the target) as an input, so that this gets executed AFTER this is set initially + # now add the state value that we are operating on (the target) as an input, so that this gets executed AFTER this is set initially self.add_input('op1', ('/STATE/' + self.op_type + '_' + self.op_type[0] + str(self.range1).zfill(3) + "/" + self.vari ), 2, True ) + # @tbd: support time enable/disable + # - check if time ops have been set and add as inputs like "year", or "month", etc could give explicit path /STATE/year ... + # - add the time values to match as constants i.e. self.constant_or_path() def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): # Insure all values are legal ex: no DIV by Zero @@ -53,6 +56,7 @@ def tokenize(self): '/=': 3 } self.ops = self.ops + [self.inputs_ix['op1'], cop_codes[self.ac], self.op2_ix] + # @tbd: check if time ops have been set and tokenize accordingly print("Specl", self.name, "tokens", self.ops) def add_op_tokens(self): @@ -70,14 +74,17 @@ def hdf5_load_all(hdf_source): # njit functions for runtime @njit -def step_saction(op, state_ix, dict_ix): - print("specal", op) +def step_saction(op, state_ix, dict_ix, step): ix = op[1] # ID of this op # these indices must be adjusted to reflect the number of common op tokens # SpecialAction has: # - type of condition (+=, -=, ...) # - operand 1 (left side) # - operand 2 (right side) + # @tbd: check if time ops have been set and enable/disable accordingly + # - 2 ops will be added for each time matching switch, the state_ix of the time element (year, month, ...) and the state_ix of the constant to match + # - matching should be as simple as if (state_ix[tix1] <> state_ix[vtix1]): return state_ix[ix1] (don't modify the value) + # ix1 = op[2] # ID of source of data and destination of data sop = op[3] ix2 = op[4] From b1edb246336fbe6a96485d1bfc740fe422a7773c Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Tue, 26 Dec 2023 21:55:25 -0500 Subject: [PATCH 08/35] domt wipe the model_data in the context function as this stuff should be persistent --- HSP2/state.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HSP2/state.py b/HSP2/state.py index cfcc2e83..fa19c15e 100644 --- a/HSP2/state.py +++ b/HSP2/state.py @@ -119,12 +119,12 @@ def append_state(state_ix, var_value): return val_ix def state_context_hsp2(state, operation, segment, activity): + # this establishes domain info so that a module can know its paths state['operation'] = operation state['segment'] = segment # state['activity'] = activity # give shortcut to state path for the upcoming function state['domain'] = "/STATE/" + operation + "_" + segment + "/" + activity - state['model_data'] = {} def state_siminfo_hsp2(uci_obj, siminfo): # Add crucial simulation info for dynamic operation support From ffa484cdcb8ce601c37f17c5964b473ed520b062 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Wed, 27 Dec 2023 08:50:37 -0500 Subject: [PATCH 09/35] screen for valid action and support integer AC code. not yet tested. --- HSP2/om_special_action.py | 46 ++++++++++++++++++++++++++++++--------- 1 file changed, 36 insertions(+), 10 deletions(-) diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index 96fcaeb7..8f84b247 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -50,12 +50,36 @@ def tokenize(self): super().tokenize() # sets self.ops = op_type, op_ix # cop_code 0: =/eq, 1: /gt, 3: <=/le, 4: >=/ge, 5: <>/ne cop_codes = { - '+=': 0, - '-=': 1, - '*=': 2, - '/=': 3 + '=': 1, + '+=': 2, + '-=': 3, + '*=': 4, + '/=': 5, + 'MIN': 6 } - self.ops = self.ops + [self.inputs_ix['op1'], cop_codes[self.ac], self.op2_ix] + # From HSPF UCI docs: + # 1 = T= A + # 2 += T= T+ A + # 3 -= T= T- A + # 4 *= T= T*A + # 5 /= T= T/A + # 6 MIN T= Min(T,A) + # 7 MAX T= Max(T,A) + # 8 ABS T= Abs(A) + # 9 INT T= Int(A) + # 10 ^= T= T^A + # 11 LN T= Ln(A) + # 12 LOG T= Log10(A) + # 13 MOD T= Mod(T,A) + if !is_float_digit(self.ac): + if !(self in cop_codes.values()) + raise Exception("Error: in "+ self.name + " AC (" + self.ac + ") not supported. Object creation halted. Path to object with error is " + self.state_path) + ac = self.ac + else: + # this will fail catastriphically if the requested function is not supported + # which is a good thing + ac = cop_codes[self.ac] + self.ops = self.ops + [self.inputs_ix['op1'], ac, self.op2_ix] # @tbd: check if time ops have been set and tokenize accordingly print("Specl", self.name, "tokens", self.ops) @@ -88,16 +112,18 @@ def step_saction(op, state_ix, dict_ix, step): ix1 = op[2] # ID of source of data and destination of data sop = op[3] ix2 = op[4] - if sop == 0: - result = state_ix[ix1] + state_ix[ix2] if sop == 1: - result = state_ix[ix1] - state_ix[ix2] + result = state_ix[ix2] if sop == 2: - result = state_ix[ix1] * state_ix[ix2] + result = state_ix[ix1] + state_ix[ix2] if sop == 3: + result = state_ix[ix1] - state_ix[ix2] + if sop == 4: + result = state_ix[ix1] * state_ix[ix2] + if sop == 5: result = state_ix[ix1] / state_ix[ix2] # set value in target - # tbd: handle this with a link? + # tbd: handle this with a model linkage? cons: this makes a loop since the ix1 is source and destination state_ix[ix1] = result return result From 6f1685191e083fbf9ff36d2dfc497db359ad92e4 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Wed, 27 Dec 2023 11:19:18 -0500 Subject: [PATCH 10/35] Move validation to handle_prop and rename runtime to follow convention --- HSP2/om.py | 2 +- HSP2/om_special_action.py | 17 +++++++++++------ 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/HSP2/om.py b/HSP2/om.py index b14518ea..39a267e9 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -459,7 +459,7 @@ def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): pass #step_simple_channel(ops, state_ix, dict_ix, step) # Op 100 is Basic ACTION in Special Actions elif ops[0] == 100: - state_ix[ops[1]] = step_saction(ops, state_ix, dict_ix, step) + state_ix[ops[1]] = step_special_action(ops, state_ix, dict_ix, step) return diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index 8f84b247..4348946a 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -43,11 +43,11 @@ def handle_prop(self, model_props, prop_name, strict = False, default_value = No if (prop_name == 'VALUE') and (self.ac == '/='): if (prop_val == 0) or (prop_val == None): raise Exception("Error: in properties passed to "+ self.name + " AC must be non-zero or non-Null . Object creation halted. Path to object with error is " + self.state_path) + if (prop_name == 'AC'): + self.handle_ac(prop_val) return prop_val - def tokenize(self): - # call parent method to set basic ops common to all - super().tokenize() # sets self.ops = op_type, op_ix + def handle_ac(self, ac): # cop_code 0: =/eq, 1: /gt, 3: <=/le, 4: >=/ge, 5: <>/ne cop_codes = { '=': 1, @@ -76,10 +76,15 @@ def tokenize(self): raise Exception("Error: in "+ self.name + " AC (" + self.ac + ") not supported. Object creation halted. Path to object with error is " + self.state_path) ac = self.ac else: - # this will fail catastriphically if the requested function is not supported + # this will fail catastrophically if the requested function is not supported # which is a good thing ac = cop_codes[self.ac] - self.ops = self.ops + [self.inputs_ix['op1'], ac, self.op2_ix] + self.opid = ac + + def tokenize(self): + # call parent method to set basic ops common to all + super().tokenize() # sets self.ops = op_type, op_ix + self.ops = self.ops + [self.inputs_ix['op1'], self.opid, self.op2_ix] # @tbd: check if time ops have been set and tokenize accordingly print("Specl", self.name, "tokens", self.ops) @@ -98,7 +103,7 @@ def hdf5_load_all(hdf_source): # njit functions for runtime @njit -def step_saction(op, state_ix, dict_ix, step): +def step_special_action(op, state_ix, dict_ix, step): ix = op[1] # ID of this op # these indices must be adjusted to reflect the number of common op tokens # SpecialAction has: From 35c85e6bcafee0fcf4c10ba650eb0d02b6116574 Mon Sep 17 00:00:00 2001 From: Paul Duda Date: Wed, 3 Jan 2024 17:23:41 -0500 Subject: [PATCH 11/35] small clean-ups to SPECL functionality --- HSP2/SPECL.py | 41 ++++++++++++++++++++------------------- HSP2/main.py | 18 +++++++++-------- HSP2/om_special_action.py | 15 +++++++------- 3 files changed, 38 insertions(+), 36 deletions(-) diff --git a/HSP2/SPECL.py b/HSP2/SPECL.py index 2f4345ec..efd63196 100644 --- a/HSP2/SPECL.py +++ b/HSP2/SPECL.py @@ -11,26 +11,27 @@ import h5py def specl_load_actions(state, io_manager, siminfo): - dc = state['specactions']['ACTIONS'] - #print(dc.index) - #print("speca entry 0:0", dc[0:0]) - #print("speca entry 0:1", dc[0:1]) - #print("speca entry 1:2", dc[1:2]) - #print("speca entry 0:", dc[0:]) - #print("speca entry 1:", dc[1:]) - #print("speca entry 1:1", dc[1:1]) - for ix in dc.index: - # add the items to the state['model_data'] dict - speca = dc[ix:(ix+1)] - # need to add a name attribute - opname = 'SPEC' + 'ACTION' + str(ix) - state['model_data'][opname] = {} - state['model_data'][opname]['name'] = opname - for ik in speca.keys(): - #print("looking for speca key ", ik) - state['model_data'][opname][ik] = speca.to_dict()[ik][ix] - state['model_data'][opname]['object_class'] = 'SpecialAction' - #print("model_data", ix, " = ", state['model_data'][opname]) + if 'ACTIONS' in state['specactions']: + dc = state['specactions']['ACTIONS'] + #print(dc.index) + #print("speca entry 0:0", dc[0:0]) + #print("speca entry 0:1", dc[0:1]) + #print("speca entry 1:2", dc[1:2]) + #print("speca entry 0:", dc[0:]) + #print("speca entry 1:", dc[1:]) + #print("speca entry 1:1", dc[1:1]) + for ix in dc.index: + # add the items to the state['model_data'] dict + speca = dc[ix:(ix+1)] + # need to add a name attribute + opname = 'SPEC' + 'ACTION' + str(ix) + state['model_data'][opname] = {} + state['model_data'][opname]['name'] = opname + for ik in speca.keys(): + #print("looking for speca key ", ik) + state['model_data'][opname][ik] = speca.to_dict()[ik][ix] + state['model_data'][opname]['object_class'] = 'SpecialAction' + #print("model_data", ix, " = ", state['model_data'][opname]) return def state_load_dynamics_specl(state, io_manager, siminfo): diff --git a/HSP2/main.py b/HSP2/main.py index 95903873..2ab5f7fe 100644 --- a/HSP2/main.py +++ b/HSP2/main.py @@ -59,22 +59,23 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None gener_instances = {} ####################################################################################### - # initilize STATE dicts + # initialize STATE dicts ####################################################################################### - # Set up Things in state that will be used in all modular activitis like SPECL + # Set up Things in state that will be used in all modular activities like SPECL state = init_state_dicts() state_siminfo_hsp2(uci_obj, siminfo) - # Add support for dynamic functins to operate on STATE + # Add support for dynamic functions to operate on STATE # - Load any dynamic components if present, and store variables on objects state_load_dynamics_hsp2(state, io_manager, siminfo) # Iterate through all segments and add crucial paths to state # before loading dynamic components that may reference them for _, operation, segment, delt in opseq.itertuples(): - for activity, function in activities[operation].items(): - if activity == 'HYDR': - state_context_hsp2(state, operation, segment, activity) - print("Init HYDR state context for domain", state['domain']) - hydr_init_ix(state['state_ix'], state['state_paths'], state['domain']) + if operation != 'GENER' and operation != 'COPY': + for activity, function in activities[operation].items(): + if activity == 'HYDR': + state_context_hsp2(state, operation, segment, activity) + print("Init HYDR state context for domain", state['domain']) + hydr_init_ix(state['state_ix'], state['state_paths'], state['domain']) # - finally stash specactions in state, not domain (segment) dependent so do it once state['specactions'] = specactions # stash the specaction dict in state state_load_dynamics_specl(state, io_manager, siminfo) @@ -82,6 +83,7 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None # finalize all dynamically loaded components and prepare to run the model state_om_model_run_prep(state, io_manager, siminfo) ####################################################################################### + # main processing loop msg(1, f'Simulation Start: {start}, Stop: {stop}') tscat = {} diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index 4348946a..344bfd0f 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -71,14 +71,13 @@ def handle_ac(self, ac): # 11 LN T= Ln(A) # 12 LOG T= Log10(A) # 13 MOD T= Mod(T,A) - if !is_float_digit(self.ac): - if !(self in cop_codes.values()) - raise Exception("Error: in "+ self.name + " AC (" + self.ac + ") not supported. Object creation halted. Path to object with error is " + self.state_path) - ac = self.ac - else: - # this will fail catastrophically if the requested function is not supported - # which is a good thing - ac = cop_codes[self.ac] + if not is_float_digit(ac): + if not ac in cop_codes: + raise Exception("Error: in "+ self.name + " AC (" + ac + ") not supported. Object creation halted. Path to object with error is " + self.state_path) + else: + # this will fail catastrophically if the requested function is not supported + # which is a good thing + ac = cop_codes[ac] self.opid = ac def tokenize(self): From e271c3988dc64fa4ba23ee0283c7287dd6772a91 Mon Sep 17 00:00:00 2001 From: Paul Duda Date: Fri, 5 Jan 2024 12:16:28 -0500 Subject: [PATCH 12/35] state.py -- revised domain name, leaving off the activity name ('HYDR') for now, may reconsider later. --- HSP2/state.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HSP2/state.py b/HSP2/state.py index fa19c15e..0e914a42 100644 --- a/HSP2/state.py +++ b/HSP2/state.py @@ -124,7 +124,7 @@ def state_context_hsp2(state, operation, segment, activity): state['segment'] = segment # state['activity'] = activity # give shortcut to state path for the upcoming function - state['domain'] = "/STATE/" + operation + "_" + segment + "/" + activity + state['domain'] = "/STATE/" + operation + "_" + segment # + "/" + activity def state_siminfo_hsp2(uci_obj, siminfo): # Add crucial simulation info for dynamic operation support From 429cbd60f3bdd5c7e55ed0025684957b25628467 Mon Sep 17 00:00:00 2001 From: Paul Duda Date: Fri, 5 Jan 2024 16:40:07 -0500 Subject: [PATCH 13/35] HYDR.py and main.py -- minor changes to comments only --- HSP2/HYDR.py | 52 ++++++++++++++++++++++++++++------------------------ HSP2/main.py | 6 +++--- 2 files changed, 31 insertions(+), 27 deletions(-) diff --git a/HSP2/HYDR.py b/HSP2/HYDR.py index d972ccb6..c3ae9341 100644 --- a/HSP2/HYDR.py +++ b/HSP2/HYDR.py @@ -18,6 +18,8 @@ from numba import njit from numba.typed import List from HSP2.utilities import initm, make_numba_dict + +# the following imports added by rb to handle dynamic code and special actions from HSP2.state import * from HSP2.SPECL import specl from HSP2.om import * @@ -44,7 +46,7 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): ''' find the state of the reach/reservoir at the end of the time interval and the outflows during the interval - CALL: hydr(store, general, ui, ts, specactions) + CALL: hydr(store, general, ui, ts, state) store is the Pandas/PyTable open store general is a dictionary with simulation level infor (OP_SEQUENCE for example) ui is a dictionary with RID specific HSPF UCI like data @@ -130,8 +132,12 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): for i in range(nexits): Olabels.append(f'O{i+1}') OVOLlabels.append(f'OVOL{i+1}') - - # state_info is some generic things about the simulation - must be numba safe, so we don't just pass the whole state which is not + + ####################################################################################### + # the following section (1 of 3) added to HYDR by rb to handle dynamic code and special actions + ####################################################################################### + # state_info is some generic things about the simulation + # must be numba safe, so we don't just pass the whole state which is not state_info = Dict.empty(key_type=types.unicode_type, value_type=types.unicode_type) state_info['operation'], state_info['segment'], state_info['activity'] = state['operation'], state['segment'], state['activity'] state_info['domain'], state_info['state_step_hydr'], state_info['state_step_om'] = state['domain'], state['state_step_hydr'], state['state_step_om'] @@ -147,18 +153,12 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): state_paths = state['state_paths'] # initialize the hydr paths in case they don't already reside here hydr_init_ix(state_ix, state_paths, state['domain']) - - - ########################################################################### - # OM - load the tokens to pass in. - ########################################################################### op_tokens = state['op_tokens'] + ####################################################################################### - ########################################################################### - # Do the simulation with _hydr_() - ########################################################################### - errors = _hydr_(ui, ts, COLIND, OUTDGT, rchtab, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens) # run reaches simulation code - ########################################################################### + # Do the simulation with _hydr_ (ie run reaches simulation code) + errors = _hydr_(ui, ts, COLIND, OUTDGT, rchtab, funct, Olabels, OVOLlabels, + state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens) if 'O' in ts: del ts['O'] if 'OVOL' in ts: del ts['OVOL'] @@ -301,8 +301,11 @@ def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_inf # other initial vars rovol = 0.0 volev = 0.0 - IVOL0 = ts['IVOL'] # the actual inflow in simulation native units - # prepare for dynamic state + IVOL0 = ts['IVOL'] # the actual inflow in simulation native units + + ####################################################################################### + # the following section (2 of 3) added by rb to HYDR, this one to prepare for dynamic state including special actions + ####################################################################################### hydr_ix = hydr_get_ix(state_ix, state_paths, state_info['domain']) # these are integer placeholders faster than calling the array look each timestep o1_ix, o2_ix, o3_ix, ivol_ix = hydr_ix['O1'], hydr_ix['O2'], hydr_ix['O3'], hydr_ix['IVOL'] @@ -315,14 +318,19 @@ def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_inf out_ix[1] = o2_ix if nexits > 2: out_ix[2] = o3_ix + ####################################################################################### + # HYDR (except where noted) for step in range(steps): - # call specl convf = CONVF[step] outdgt[:] = OUTDGT[step, :] colind[:] = COLIND[step, :] roseff = ro oseff[:] = o[:] + + ####################################################################################### + # the following section (3 of 3) added by rb to accommodate dynamic code, operations models, and special actions + ####################################################################################### # set state_ix with value of local state variables and/or needed vars # Note: we pass IVOL0, not IVOL here since IVOL has been converted to different units state_ix[ro_ix], state_ix[rovol_ix] = ro, rovol @@ -331,24 +339,19 @@ def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_inf state_ix[out_ix[oi]] = outdgt[oi] state_ix[vol_ix], state_ix[ivol_ix] = vol, IVOL0[step] state_ix[volev_ix] = volev - # ***************************************** - # Execute dynamic code if enabled # - these if statements may be irrelevant if default functions simply return # when no objects are defined. if (state_info['state_step_om'] == 'enabled'): pre_step_model(op_tokens[0], op_tokens, state_ix, dict_ix, ts_ix, step) if (state_info['state_step_hydr'] == 'enabled'): state_step_hydr(state_info, state_paths, state_ix, dict_ix, ts_ix, hydr_ix, step) - # Execute dynamic code if enabled if (state_info['state_step_om'] == 'enabled'): #print("trying to execute state_step_om()") # op_tokens[0] contains the model exec list. Later we may amend this # perhaps even storing a domain specific exec list under domain/exec_list? - step_model(op_tokens[0], op_tokens, state_ix, dict_ix, ts_ix, step) - # Execute dynamic code if enabled + step_model(op_tokens[0], op_tokens, state_ix, dict_ix, ts_ix, step) # traditional 'ACTIONS' done in here if ( (state_info['state_step_hydr'] == 'enabled') - or (state_info['state_step_om'] == 'enabled') - ): + or (state_info['state_step_om'] == 'enabled') ): # Do write-backs for editable STATE variables # OUTDGT is writeable for oi in range(nexits): @@ -358,7 +361,8 @@ def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_inf # maybe routines should do this, and this is not needed (but pass VFACT in state) IVOL[step] = state_ix[ivol_ix] * VFACT # End dynamic code step() - # ***************************************** + ####################################################################################### + # vols, sas variables and their initializations not needed. if irexit >= 0: # irrigation exit is set, zero based number if rirwdl > 0.0: # equivalent to OVOL for the irrigation exit diff --git a/HSP2/main.py b/HSP2/main.py index 2ab5f7fe..51d24079 100644 --- a/HSP2/main.py +++ b/HSP2/main.py @@ -78,8 +78,8 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None hydr_init_ix(state['state_ix'], state['state_paths'], state['domain']) # - finally stash specactions in state, not domain (segment) dependent so do it once state['specactions'] = specactions # stash the specaction dict in state - state_load_dynamics_specl(state, io_manager, siminfo) - state_load_dynamics_om(state, io_manager, siminfo) + state_load_dynamics_specl(state, io_manager, siminfo) # traditional special actions + state_load_dynamics_om(state, io_manager, siminfo) # operational model for custom python # finalize all dynamically loaded components and prepare to run the model state_om_model_run_prep(state, io_manager, siminfo) ####################################################################################### @@ -130,7 +130,7 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None continue msg(3, f'{activity}') - # Set context for dynamic executables. + # Set context for dynamic executables and special actions state_context_hsp2(state, operation, segment, activity) ui = uci[(operation, activity, segment)] # ui is a dictionary From 798f0499dcd71edf7613f1f64be16afd2e15f9fb Mon Sep 17 00:00:00 2001 From: Paul Duda Date: Wed, 10 Jan 2024 17:26:09 -0500 Subject: [PATCH 14/35] first cut at changes required to implement sample SEDTRN special actions --- HSP2/SEDTRN.py | 58 ++++++++++++++++++++++++++++++++++++++++++++++---- HSP2/SPECL.py | 9 ++++++-- HSP2/main.py | 5 +++++ HSP2/state.py | 23 ++++++++++++++++++-- 4 files changed, 87 insertions(+), 8 deletions(-) diff --git a/HSP2/SEDTRN.py b/HSP2/SEDTRN.py index f78b8f83..082aa38b 100644 --- a/HSP2/SEDTRN.py +++ b/HSP2/SEDTRN.py @@ -3,12 +3,16 @@ License: LGPL2 ''' -from numpy import array, zeros, where, int64 +from numpy import array, zeros, where, int64, arange from math import log10, exp from numba import njit from HSP2.ADCALC import advect from HSP2.utilities import make_numba_dict +# the following imports added to handle special actions +from HSP2.om import * +from HSP2.om_model_linkage import * + ERRMSGS =('SEDTRN: Warning -- bed storage of sediment size fraction sand is empty', #ERRMSG0 'SEDTRN: Warning -- bed storage of sediment size fraction silt is empty', #ERRMSG1 'SEDTRN: Warning -- bed storage of sediment size fraction clay is empty', #ERRMSG2 @@ -17,7 +21,7 @@ 'SEDTRN: Simulation of sediment requires all 3 "auxiliary flags" (AUX1FG, etc) in section HYDR must be turned on', #ERRMSG5 'SEDTRN: When specifying the initial composition of the bed, the fraction of sand, silt, and clay must sum to a value close to 1.0.') #ERRMSG6 -def sedtrn(io_manager, siminfo, uci, ts): +def sedtrn(io_manager, siminfo, uci, ts, state): ''' Simulate behavior of inorganic sediment''' # simlen = siminfo['steps'] @@ -68,8 +72,31 @@ def sedtrn(io_manager, siminfo, uci, ts): ui['clay_taucs'] = ui_clay['TAUCS'] ui['clay_m'] = ui_clay['M'] * delt60 / 24.0 * 4.880 # convert erodibility coeff from /day to /ivl + ####################################################################################### + # the following section (1 of 3) added to SEDTRN by pbd to handle special actions + ####################################################################################### + # state_info is some generic things about the simulation + # must be numba safe, so we don't just pass the whole state which is not + state_info = Dict.empty(key_type=types.unicode_type, value_type=types.unicode_type) + state_info['operation'], state_info['segment'], state_info['activity'] = state['operation'], state['segment'], state['activity'] + state_info['domain'], state_info['state_step_hydr'], state_info['state_step_om'] = state['domain'], state['state_step_hydr'], state['state_step_om'] + # hsp2_local_py = state['hsp2_local_py'] + # # It appears necessary to load this here, instead of from main.py, otherwise, + # # _hydr_() does not recognize the function state_step_hydr()? + # if (hsp2_local_py != False): + # from hsp2_local_py import state_step_hydr + # else: + # from HSP2.state_fn_defaults import state_step_hydr + # must split dicts out of state Dict since numba cannot handle mixed-type nested Dicts + state_ix, dict_ix, ts_ix = state['state_ix'], state['dict_ix'], state['ts_ix'] + state_paths = state['state_paths'] + # initialize the sedtrn paths in case they don't already reside here + sedtrn_init_ix(state_ix, state_paths, state['domain']) + op_tokens = state['op_tokens'] + ####################################################################################### + ############################################################################ - errors = _sedtrn_(ui, ts) # run SEDTRN simulation code + errors = _sedtrn_(ui, ts, state_info, state_paths, state_ix, dict_ix, ts_ix, op_tokens) # run SEDTRN simulation code ############################################################################ if nexits > 1: @@ -91,7 +118,7 @@ def sedtrn(io_manager, siminfo, uci, ts): return errors, ERRMSGS @njit(cache=True) -def _sedtrn_(ui, ts): +def _sedtrn_(ui, ts, state_info, state_paths, state_ix, dict_ix, ts_ix, op_tokens): ''' Simulate behavior of inorganic sediment''' errorsV = zeros(int(ui['errlen'])).astype(int64) @@ -291,8 +318,31 @@ def _sedtrn_(ui, ts): #################### END PSED + ####################################################################################### + # the following section (2 of 3) added by pbd to SEDTRN, this one to prepare for special actions + ####################################################################################### + sedtrn_ix = sedtrn_get_ix(state_ix, state_paths, state_info['domain']) + # these are integer placeholders faster than calling the array look each timestep + rsed4_ix, rsed5_ix, rsed6_ix = sedtrn_ix['RSED4'], sedtrn_ix['RSED5'], sedtrn_ix['RSED6'] + ####################################################################################### + for loop in range(simlen): + ####################################################################################### + # the following section (3 of 3) added by pbd to accommodate special actions + ####################################################################################### + # set state_ix with value of local state variables and/or needed vars + state_ix[rsed4_ix] = sand_wt_rsed4 + state_ix[rsed5_ix] = silt_wt_rsed5 + state_ix[rsed6_ix] = clay_wt_rsed6 + if (state_info['state_step_om'] == 'enabled'): + step_model(op_tokens[0], op_tokens, state_ix, dict_ix, ts_ix, loop) # traditional 'ACTIONS' done in here + # Do write-backs for editable STATE variables + sand_wt_rsed4 = state_ix[rsed4_ix] + silt_wt_rsed5 = state_ix[rsed5_ix] + clay_wt_rsed6 = state_ix[rsed6_ix] + ####################################################################################### + # perform any necessary unit conversions if uunits == 2: # uci is in metric units avvele = AVVEL[loop] * 3.28 diff --git a/HSP2/SPECL.py b/HSP2/SPECL.py index efd63196..cde37346 100644 --- a/HSP2/SPECL.py +++ b/HSP2/SPECL.py @@ -2,7 +2,7 @@ Notes: - code for parsing UCI SPEC-ACTIONS is in HSP2tools/readUCI.py - code for object classes that transform parsed data into OP codes for OM and STATE support - is in this directory tree as om_special_[action type].py, + is in this directory tree as om_special_[action type].py, - Ex: om_special_action.py contains object support and runtime functions for classic ACTIONS ''' @@ -29,7 +29,12 @@ def specl_load_actions(state, io_manager, siminfo): state['model_data'][opname]['name'] = opname for ik in speca.keys(): #print("looking for speca key ", ik) - state['model_data'][opname][ik] = speca.to_dict()[ik][ix] + state['model_data'][opname][ik] = speca.to_dict()[ik][ix] # add subscripts? + if ik == 'VARI': + if len(speca.to_dict()['S1'][ix]) > 0: + state['model_data'][opname][ik] += speca.to_dict()['S1'][ix] + if len(speca.to_dict()['S2'][ix]) > 0: + state['model_data'][opname][ik] += speca.to_dict()['S2'][ix] state['model_data'][opname]['object_class'] = 'SpecialAction' #print("model_data", ix, " = ", state['model_data'][opname]) return diff --git a/HSP2/main.py b/HSP2/main.py index 51d24079..194e2b86 100644 --- a/HSP2/main.py +++ b/HSP2/main.py @@ -76,6 +76,9 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None state_context_hsp2(state, operation, segment, activity) print("Init HYDR state context for domain", state['domain']) hydr_init_ix(state['state_ix'], state['state_paths'], state['domain']) + elif activity == 'SEDTRN': + state_context_hsp2(state, operation, segment, activity) + sedtrn_init_ix(state['state_ix'], state['state_paths'], state['domain']) # - finally stash specactions in state, not domain (segment) dependent so do it once state['specactions'] = specactions # stash the specaction dict in state state_load_dynamics_specl(state, io_manager, siminfo) # traditional special actions @@ -243,6 +246,8 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None if operation not in ['COPY','GENER']: if (activity == 'HYDR'): errors, errmessages = function(io_manager, siminfo, ui, ts, ftables, state) + elif (activity == 'SEDTRN'): + errors, errmessages = function(io_manager, siminfo, ui, ts, state) elif (activity != 'RQUAL'): errors, errmessages = function(io_manager, siminfo, ui, ts) else: diff --git a/HSP2/state.py b/HSP2/state.py index 0e914a42..34160c2b 100644 --- a/HSP2/state.py +++ b/HSP2/state.py @@ -124,7 +124,7 @@ def state_context_hsp2(state, operation, segment, activity): state['segment'] = segment # state['activity'] = activity # give shortcut to state path for the upcoming function - state['domain'] = "/STATE/" + operation + "_" + segment # + "/" + activity + state['domain'] = "/STATE/" + operation + "_" + segment # + "/" + activity # may want to comment out activity? def state_siminfo_hsp2(uci_obj, siminfo): # Add crucial simulation info for dynamic operation support @@ -152,7 +152,17 @@ def hydr_init_ix(state_ix, state_paths, domain): #var_path = f'{domain}/{i}' var_path = domain + "/" + i hydr_ix[i] = set_state(state_ix, state_paths, var_path, 0.0) - return hydr_ix + return hydr_ix + +def sedtrn_init_ix(state_ix, state_paths, domain): + # get a list of keys for all sedtrn state variables + sedtrn_state = ["RSED4","RSED5","RSED6"] + sedtrn_ix = Dict.empty(key_type=types.unicode_type, value_type=types.int64) + for i in sedtrn_state: + #var_path = f'{domain}/{i}' + var_path = domain + "/" + i + sedtrn_ix[i] = set_state(state_ix, state_paths, var_path, 0.0) + return sedtrn_ix @njit def hydr_get_ix(state_ix, state_paths, domain): @@ -165,6 +175,15 @@ def hydr_get_ix(state_ix, state_paths, domain): hydr_ix[i] = state_paths[var_path] return hydr_ix +def sedtrn_get_ix(state_ix, state_paths, domain): + # get a list of keys for all sedtrn state variables + sedtrn_state = ["RSED4", "RSED5", "RSED6"] + sedtrn_ix = Dict.empty(key_type=types.unicode_type, value_type=types.int64) + for i in sedtrn_state: + var_path = domain + "/" + i + sedtrn_ix[i] = state_paths[var_path] + return sedtrn_ix + # function to dynamically load module, based on "Using imp module" in https://www.tutorialspoint.com/How-I-can-dynamically-import-Python-module# #def dynamic_module_import(module_name, class_name): def dynamic_module_import(local_name, local_path, module_name): From dd21d9e73bda39eb01c12255c203514046e63eef Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Wed, 24 Jan 2024 10:23:47 -0500 Subject: [PATCH 15/35] added partial code support for specl counter and date to begin --- HSP2/om_special_action.py | 77 +++++++++++++++++++++++++++------------ 1 file changed, 54 insertions(+), 23 deletions(-) diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index 344bfd0f..44f99c24 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -6,13 +6,13 @@ from HSP2.om_model_object import ModelObject from numba import njit class SpecialAction(ModelObject): - def __init__(self, name, container = False, model_props = []): + def __init__(self, name, container = False, model_props = {}): super(SpecialAction, self).__init__(name, container, model_props) self.optype = 100 # Special Actions start indexing at 100 def parse_model_props(self, model_props, strict=False): - print("SpecialAction.parse_model_props() called") + super().parse_model_props(model_props, strict) # comes in as row from special ACTIONS table # ex: { # 'OPTYP': 'RCHRES', 'RANGE1': '1', 'RANGE2': '', 'DC': 'DY', 'DS': '', @@ -24,13 +24,18 @@ def parse_model_props(self, model_props, strict=False): # - action(operation) to perform = AC # - operand2, a numeric value for simple ACTION = [VALUE] # note: [op_abbrev] is *maybe* the first letter of the OPTYP? Not a very good idea to have a coded convention like that + print("Creating ACTION with props", model_props) self.op_type = self.handle_prop(model_props, 'OPTYP') self.range1 = self.handle_prop(model_props, 'RANGE1') self.range2 = self.handle_prop(model_props, 'RANGE2') - self.ac = self.handle_prop(model_props, 'AC') # must handle this before we handle the operand to check for DIV by Zero + self.ac = '=' # set the default, and also adds a property for later testing. + self.ac = self.handle_prop(model_props, 'AC') # must handle this before we handle the operand VALUE to check for DIV by Zero self.vari = self.handle_prop(model_props, 'VARI') self.op2_val = self.handle_prop(model_props, 'VALUE') self.op2_ix = self.constant_or_path('op_val', self.op2_val) # constant values must be added to STATE and thus are referenced by their state_ix number + self.num = self.handle_prop(model_props, 'NUM', False, 1) # number of times to perform action + self.timer_ix = self.handle_prop(model_props, 'when', False, 1) # when to begin the first attempt at action + self.ctr_ix = self.constant_or_path('ctr', 0) # this initializes the counter for how many times an action has been performed # now add the state value that we are operating on (the target) as an input, so that this gets executed AFTER this is set initially self.add_input('op1', ('/STATE/' + self.op_type + '_' + self.op_type[0] + str(self.range1).zfill(3) + "/" + self.vari ), 2, True ) # @tbd: support time enable/disable @@ -45,6 +50,13 @@ def handle_prop(self, model_props, prop_name, strict = False, default_value = No raise Exception("Error: in properties passed to "+ self.name + " AC must be non-zero or non-Null . Object creation halted. Path to object with error is " + self.state_path) if (prop_name == 'AC'): self.handle_ac(prop_val) + if (prop_name == 'when'): + # when to perform this? timestamp or time-step index + # TODO: find the timestep matching the date supplied for now we just do 0, + # does SimTimer or oter HSP2 code have a way of translating date or Unix timestamp to step? + prop_val = 0 + if (prop_name == 'NUM') and (pop_val == ''): + prop_val = default_value return prop_val def handle_ac(self, ac): @@ -71,19 +83,24 @@ def handle_ac(self, ac): # 11 LN T= Ln(A) # 12 LOG T= Log10(A) # 13 MOD T= Mod(T,A) - if not is_float_digit(ac): - if not ac in cop_codes: + if not (is_float_digit(ac)): + if not (ac in cop_codes.keys()): raise Exception("Error: in "+ self.name + " AC (" + ac + ") not supported. Object creation halted. Path to object with error is " + self.state_path) - else: - # this will fail catastrophically if the requested function is not supported - # which is a good thing - ac = cop_codes[ac] - self.opid = ac + opid = cop_codes[ac] + self.ac = ac + else: + # this will fail catastrophically if the requested function is not supported + # which is a good thing + if not (ac in cop_codes.values()): + raise Exception("Error: in "+ self.name + "numeric AC (" + ac + ") not supported. Object creation halted. Path to object with error is " + self.state_path) + opid = ac + self.ac = list(cop_codes.keys())[list(cop_codes.values()).index(ac) ] + self.opid = opid def tokenize(self): # call parent method to set basic ops common to all super().tokenize() # sets self.ops = op_type, op_ix - self.ops = self.ops + [self.inputs_ix['op1'], self.opid, self.op2_ix] + self.ops = self.ops + [self.inputs_ix['op1'], self.opid, self.op2_ix, self.timer_ix, self.ctr_ix, self.num] # @tbd: check if time ops have been set and tokenize accordingly print("Specl", self.name, "tokens", self.ops) @@ -101,7 +118,7 @@ def hdf5_load_all(hdf_source): # njit functions for runtime -@njit +@njit(cache=True) def step_special_action(op, state_ix, dict_ix, step): ix = op[1] # ID of this op # these indices must be adjusted to reflect the number of common op tokens @@ -112,22 +129,36 @@ def step_special_action(op, state_ix, dict_ix, step): # @tbd: check if time ops have been set and enable/disable accordingly # - 2 ops will be added for each time matching switch, the state_ix of the time element (year, month, ...) and the state_ix of the constant to match # - matching should be as simple as if (state_ix[tix1] <> state_ix[vtix1]): return state_ix[ix1] (don't modify the value) - # + # - alternative: save the integer timestamp or timestep of the start, and if step/stamp > value, enable + # @tbd: add number of repeats, and save the value of repeats in a register ix1 = op[2] # ID of source of data and destination of data sop = op[3] ix2 = op[4] - if sop == 1: - result = state_ix[ix2] - if sop == 2: - result = state_ix[ix1] + state_ix[ix2] - if sop == 3: - result = state_ix[ix1] - state_ix[ix2] - if sop == 4: - result = state_ix[ix1] * state_ix[ix2] - if sop == 5: - result = state_ix[ix1] / state_ix[ix2] + tix = op[5] # which slot is the time comparison in? + if (step < state_ix[tix]): + return + ctr_ix = op[6] # id of the counter variable + num_ix = op[7] # max times to complete + num_done = state_ix[ctr_ix] + num = state_ix[num_ix] # num to complete + if (num_done >= num): + return + else: + if sop == 1: + result = state_ix[ix2] + elif sop == 2: + result = state_ix[ix1] + state_ix[ix2] + elif sop == 3: + result = state_ix[ix1] - state_ix[ix2] + elif sop == 4: + result = state_ix[ix1] * state_ix[ix2] + elif sop == 5: + result = state_ix[ix1] / state_ix[ix2] + # set value in target # tbd: handle this with a model linkage? cons: this makes a loop since the ix1 is source and destination + state_ix[ix1] = result + state_ix[op[1]] = result return result From 275d1945b3c104380fbea6b50c914fdfe602372a Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Wed, 24 Jan 2024 19:00:29 -0500 Subject: [PATCH 16/35] fix incorrect hand off creation to base class ModelObject to enable storage of siminfo in model_props_parsed --- HSP2/om_sim_timer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HSP2/om_sim_timer.py b/HSP2/om_sim_timer.py index 33056fa6..21d8d6d0 100644 --- a/HSP2/om_sim_timer.py +++ b/HSP2/om_sim_timer.py @@ -11,7 +11,7 @@ class SimTimer(ModelObject): def __init__(self, name, container, siminfo): - super(SimTimer, self).__init__(name, container) + super(SimTimer, self).__init__(name, container, siminfo) self.state_path = '/STATE/timer' self.time_array = self.dti_to_time_array(siminfo) # creates numpy formatted array of year, mo, day, ... for each timestep self.date_path_ix = [] # where are the are components stored in the state_ix Dict From 5a26f19ab8ddb017697a4277c472dbfcec3c21cd Mon Sep 17 00:00:00 2001 From: Paul Duda Date: Thu, 25 Jan 2024 16:31:16 -0500 Subject: [PATCH 17/35] om.py -- in step_one, handle case where none is returned om_special_action.py -- handle simple dated special actions --- HSP2/om.py | 4 +++- HSP2/om_special_action.py | 16 +++++++++++----- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/HSP2/om.py b/HSP2/om.py index 39a267e9..56d5086e 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -459,7 +459,9 @@ def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): pass #step_simple_channel(ops, state_ix, dict_ix, step) # Op 100 is Basic ACTION in Special Actions elif ops[0] == 100: - state_ix[ops[1]] = step_special_action(ops, state_ix, dict_ix, step) + result = step_special_action(ops, state_ix, dict_ix, step) + if result: + state_ix[ops[1]] = result return diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index 44f99c24..9c450eef 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -52,10 +52,16 @@ def handle_prop(self, model_props, prop_name, strict = False, default_value = No self.handle_ac(prop_val) if (prop_name == 'when'): # when to perform this? timestamp or time-step index - # TODO: find the timestep matching the date supplied for now we just do 0, - # does SimTimer or oter HSP2 code have a way of translating date or Unix timestamp to step? prop_val = 0 - if (prop_name == 'NUM') and (pop_val == ''): + si = ModelObject.model_object_cache['/STATE/timer'] + if len(model_props['YR']) > 0: + # translate date to equivalent model step + datestring = model_props['YR'] + '-' + model_props['MO'] + '-' + \ + model_props['DA'] + ' ' + model_props['HR'] + ':' + \ + model_props['MN'] + ':00' + if datestring in si.model_props_parsed['tindex']: + prop_val = si.model_props_parsed['tindex'].get_loc(datestring) + if (prop_name == 'NUM') and (prop_val == ''): prop_val = default_value return prop_val @@ -135,13 +141,13 @@ def step_special_action(op, state_ix, dict_ix, step): sop = op[3] ix2 = op[4] tix = op[5] # which slot is the time comparison in? - if (step < state_ix[tix]): + if (tix in state_ix and step < state_ix[tix]): return ctr_ix = op[6] # id of the counter variable num_ix = op[7] # max times to complete num_done = state_ix[ctr_ix] num = state_ix[num_ix] # num to complete - if (num_done >= num): + if (tix in state_ix and num_done >= num): return else: if sop == 1: From f21f10ac512f126bdc065765ad60d0b89e3b1a70 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Tue, 30 Jan 2024 12:31:24 -0500 Subject: [PATCH 18/35] changed to fixed width integer array for tokens and do not iterate through constants and other non-executables to improve performance --- HSP2/om.py | 244 ++++++++++++++---- HSP2/om_model_linkage.py | 34 ++- HSP2/om_model_object.py | 83 +++++- .../HSP2results/PL3_5250_0001.json.disabled | 18 ++ .../testcbp/HSP2results/PL3_5250_nospecl.uci | 230 +++++++++++++++++ 5 files changed, 529 insertions(+), 80 deletions(-) create mode 100644 tests/testcbp/HSP2results/PL3_5250_0001.json.disabled create mode 100644 tests/testcbp/HSP2results/PL3_5250_nospecl.uci diff --git a/HSP2/om.py b/HSP2/om.py index 56d5086e..5d8207b1 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -12,7 +12,7 @@ import numpy as np import time from numba.typed import Dict -from numpy import zeros +from numpy import zeros, int32 from numba import int8, float32, njit, types, typed # import the types import random # this is only used for a demo so may be deprecated from HSP2.state import * @@ -61,13 +61,16 @@ def is_float_digit(n: str) -> bool: #from HSP2.om_data_matrix import * #from HSP2.om_model_broadcast import * #from HSP2.om_simple_channel import * +#from HSP2.om_impoundment import * from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries def init_om_dicts(): """ The base dictionaries used to store model object info """ - op_tokens = Dict.empty(key_type=types.int64, value_type=types.i8[:]) + op_tokens = ModelObject.make_op_tokens() # this is just to start, layer it is resized to the object needs + # Was + #op_tokens = Dict.empty(key_type=types.int64, value_type=types.i8[:]) model_object_cache = {} # this does not need to be a special Dict as it is not used in numba return op_tokens, model_object_cache @@ -106,7 +109,7 @@ def state_load_om_python(state, io_manager, siminfo): hsp2_local_py = state['hsp2_local_py'] # Load a function from code if it exists if 'om_init_model' in dir(hsp2_local_py): - hsp2_local_py.om_init_model(io_manager, siminfo, state['op_tokenModelObject.model_object_caches'], state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'], state['model_object_cache']) + hsp2_local_py.om_init_model(io_manager, siminfo, state['op_tokens'], state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'], state['model_object_cache']) def state_load_dynamics_om(state, io_manager, siminfo): @@ -116,8 +119,8 @@ def state_load_dynamics_om(state, io_manager, siminfo): op_tokens, model_object_cache = init_om_dicts() state_paths, state_ix, dict_ix, ts_ix = state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] # set globals on ModelObject, this makes them persistent throughout all subsequent object instantiation and use - ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache = ( - op_tokens, state_paths, state_ix, dict_ix, model_object_cache + ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache, ModelObject.ts_ix = ( + op_tokens, state_paths, state_ix, dict_ix, model_object_cache, ts_ix ) state['op_tokens'], state['model_object_cache'] = op_tokens, model_object_cache # load dynamic coding libraries if defined by user @@ -125,6 +128,7 @@ def state_load_dynamics_om(state, io_manager, siminfo): # occuring within this function call, since this function is also called from another runtime engine # but if things fail post develop-specact-1 pull requests we may investigate here # also, it may be that this should be loaded elsewhere? + # comment state_load_om_python() to disable dynamic python state_load_om_python(state, io_manager, siminfo) state_load_om_json(state, io_manager, siminfo) return @@ -152,12 +156,12 @@ def state_om_model_run_prep(state, io_manager, siminfo): print("Tokenizing models") model_tokenizer_recursive(model_root_object, model_object_cache, model_exec_list) # model_exec_list is the ordered list of component operations - print("model_exec_list:", model_exec_list) - # This is used to stash the model_exec_list -- is this used? - op_tokens[0] = np.asarray(model_exec_list, dtype="i8") + print("op_tokens has", len(model_object_cache),"elements") + #print("model_exec_list(", len(model_exec_list),"items):", model_exec_list) + # This is used to stash the model_exec_list in the dict_ix, this might be slow, need to verify. # the resulting set of objects is returned. state['model_object_cache'] = model_object_cache - state['op_tokens'] = op_tokens + state['model_exec_list'] = np.asarray(model_exec_list, dtype="i8") state['state_step_om'] = 'disabled' if len(op_tokens) > 0: state['state_step_om'] = 'enabled' @@ -191,22 +195,12 @@ def model_class_loader(model_name, model_props, container = False): # for attributes to pass in. # ".get()" will return NoValue if it does not exist or the value. if object_class == 'Equation': - eqn = model_props.get('equation') - if type(eqn) is str: - eqn_str = eqn - else: - if eqn == None: - # try for equation stored as normal propcode - eqn_str = model_props.get('value') - else: - eqn_str = eqn.get('value') - if eqn_str == None: - raise Exception("Equation object", model_name, "does not have a valid equation string. Halting. ") - return False - model_object = Equation(model_props.get('name'), container, eqn_str ) + model_object = Equation(model_props.get('name'), container, model_props ) #remove_used_keys(model_props, elif object_class == 'SimpleChannel': model_object = SimpleChannel(model_props.get('name'), container, model_props ) + elif object_class == 'Impoundment': + model_object = Impoundment(model_props.get('name'), container, model_props ) elif object_class == 'Constant': model_object = ModelConstant(model_props.get('name'), container, model_props.get('value') ) elif ( object_class.lower() == 'datamatrix'): @@ -219,7 +213,6 @@ def model_class_loader(model_name, model_props, container = False): model_object = DataMatrix(model_props.get('name'), container, model_props) elif object_class == 'ModelBroadcast': # add a matrix with the data, then add a matrix accessor for each required variable - #print("Loading ModelBroadcast class ") has_props = ModelBroadcast.check_properties(model_props) if has_props == False: print("ModelBroadcast object must have", ModelBroadcast.required_properties()) @@ -239,8 +232,8 @@ def model_class_loader(model_name, model_props, container = False): elif object_class == 'SpecialAction': model_object = SpecialAction(model_props.get('name'), container, model_props) else: - print("Loading", model_props.get('name'), "with object_class", object_class,"as ModelObject") - model_object = ModelObject(model_props.get('name'), container) + #print("Loading", model_props.get('name'), "with object_class", object_class,"as ModelObject") + model_object = ModelObject(model_props.get('name'), container, model_props) # one way to insure no class attributes get parsed as sub-comps is: # model_object.remove_used_keys() if len(model_object.model_props_parsed) == 0: @@ -269,11 +262,11 @@ def model_class_translate(model_props, object_class): model_props['object_class'] = 'SimpleChannel' print("Handling USGSChannelGeomObject_sub as SimpleChannel") if object_class == 'hydroImpoundment': - model_props['object_class'] = 'SimpleImpoundment' - print("Handling hydroImpoundment as SimpleImpoundment") + model_props['object_class'] = 'Impoundment' + print("Handling hydroImpoundment as Impoundment") if object_class == 'hydroImpSmall': - model_props['object_class'] = 'SimpleImpoundment' - print("Handling hydroImpSmall as SimpleImpoundment") + model_props['object_class'] = 'Impoundment' + print("Handling hydroImpSmall as Impoundment") def model_loader_recursive(model_data, container): k_list = model_data.keys() @@ -336,6 +329,7 @@ def model_tokenizer_recursive(model_object, model_object_cache, model_exec_list, that are sending to that broadcast? - Or is it better to let it as it is, """ + #print("Handling", model_object.name, " ", model_object.state_path) if model_object.ix in model_exec_list: return if model_object.ix in model_touch_list: @@ -372,9 +366,60 @@ def model_tokenizer_recursive(model_object, model_object_cache, model_exec_list, return # now after tokenizing all inputs this should be OK to tokenize model_object.add_op_tokens() - model_exec_list.append(model_object.ix) + if model_object.optype in ModelObject.runnables: + model_exec_list.append(model_object.ix) +def model_order_recursive(model_object, model_object_cache, model_exec_list, model_touch_list = []): + """ + Given a root model_object, trace the inputs to load things in order + Store this order in model_exec_list + Note: All ordering is as-needed organic, except Broadcasts + - read from children is completed after all other inputs + - read from parent is completed before all other inputs + - could this be accomplished by more sophisticated handling of read + broadcasts? + - When loading a read broadcast, can we iterate through items + that are sending to that broadcast? + - Or is it better to let it as it is, + """ + if model_object.ix in model_exec_list: + return + if model_object.ix in model_touch_list: + #print("Already touched", model_object.name, model_object.ix, model_object.state_path) + return + # record as having been called, and will ultimately return, to prevent recursions + model_touch_list.append(model_object.ix) + k_list = model_object.inputs.keys() + input_names = dict.fromkeys(k_list , 1) + if type(input_names) is not dict: + return + # isolate broadcasts, and sort out -- what happens if an equation references a broadcast var? + # is this a limitation of treating all children as inputs? + # alternative, leave broadcasts organic, but load children first? + # children first, then local sub-comps is old method? old method: + # - read parent broadcasts + # - get inputs (essentially, linked vars) + # - send child broadcasts (will send current step parent reads, last step local proc data) + # - execute children + # - execute local sub-comps + for input_name in input_names: + #print("Checking input", input_name) + input_path = model_object.inputs[input_name] + if input_path in model_object_cache.keys(): + input_object = model_object_cache[input_path] + model_order_recursive(input_object, model_object_cache, model_exec_list, model_touch_list) + else: + if input_path in model_object.state_paths.keys(): + # this is a valid state reference without an object + # thus, it is likely part of internals that are manually added + # which should be fine. tho perhaps we should have an object for these too. + continue + print("Problem loading input", input_name, "input_path", input_path, "not in model_object_cache.keys()") + return + # now after loading input dependencies, add this to list + model_exec_list.append(model_object.ix) + def save_object_ts(io_manager, siminfo, op_tokens, ts_ix, ts): # Decide on using from utilities.py: # - save_timeseries(io_manager, ts, savedict, siminfo, saveall, operation, segment, activity, compress=True) @@ -385,42 +430,30 @@ def save_object_ts(io_manager, siminfo, op_tokens, ts_ix, ts): return @njit -def iterate_models(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps): +def iterate_models(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps, dstep = -1): checksum = 0.0 for step in range(steps): pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) - step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) + step_model_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step, dstep) + #print("Steps completed", step) return checksum @njit def pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): for i in model_exec_list: - if op_tokens[i][0] == 1: - pass - elif op_tokens[i][0] == 2: - pass - elif op_tokens[i][0] == 3: - pass - elif op_tokens[i][0] == 4: - pass - elif op_tokens[i][0] == 5: - pass - elif op_tokens[i][0] == 12: + if op_tokens[i][0] == 12: # register type data (like broadcast accumulators) - # disabled till broadcasts are defined pre_step_register(op_tokens[i], state_ix, dict_ix) - pass + pass#pre_step_register(op_tokens[i], state_ix, dict_ix) return -@njit +@njit def step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): val = 0 for i in model_exec_list: step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) return -@njit -def post_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): - return + @njit def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): @@ -431,7 +464,7 @@ def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): if debug == 1: print("DEBUG: Operator ID", ops[1], "is op type", ops[0]) if ops[0] == 1: - pass #state_ix[ops[1]] = step_equation(ops, state_ix) + pass #step_equation(ops, state_ix) elif ops[0] == 2: # todo: this should be moved into a single function, # with the conforming name step_matrix(op_tokens, ops, state_ix, dict_ix) @@ -441,12 +474,12 @@ def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): # this insures a matrix with variables in it is up to date # only need to do this if the matrix data and matrix config are on same object # otherwise, the matrix data is an input and has already been evaluated - pass# state_ix[ops[1]] = exec_tbl_values(ops, state_ix, dict_ix) + pass #exec_tbl_values(ops, state_ix, dict_ix) if (ops[3] > 0): # this evaluates a single value from a matrix if the matrix is configured to do so. if debug == 1: print("DEBUG: Calling exec_tbl_eval", ops) - pass# state_ix[ops[1]] = exec_tbl_eval(op_tokens, ops, state_ix, dict_ix) + pass #exec_tbl_eval(op_tokens, ops, state_ix, dict_ix) elif ops[0] == 3: step_model_link(ops, state_ix, ts_ix, step) elif ops[0] == 4: @@ -459,11 +492,67 @@ def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): pass #step_simple_channel(ops, state_ix, dict_ix, step) # Op 100 is Basic ACTION in Special Actions elif ops[0] == 100: - result = step_special_action(ops, state_ix, dict_ix, step) - if result: - state_ix[ops[1]] = result + step_special_action(ops, state_ix, dict_ix, step) + return + +@njit +def step_model_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step, debug_step = -1): + val = 0 + for i in model_exec_list: + ops = op_tokens[i] + if (step == debug_step): + print("Exec'ing step ", step, " model ID", i) + # op_tokens is passed in for ops like matrices that have lookups from other + # locations. All others rely only on ops + # todo: decide if all step_[class() functions should set value in state_ix instead of returning value? + val = 0 + if ops[0] == 1: + step_equation(ops, state_ix) + elif ops[0] == 2: + # todo: this should be moved into a single function, + # with the conforming name step_matrix(op_tokens, ops, state_ix, dict_ix) + if (ops[1] == ops[2]): + # this insures a matrix with variables in it is up to date + # only need to do this if the matrix data and matrix config are on same object + # otherwise, the matrix data is an input and has already been evaluated + state_ix[ops[1]] = exec_tbl_values(ops, state_ix, dict_ix) + if (ops[3] > 0): + # this evaluates a single value from a matrix if the matrix is configured to do so. + state_ix[ops[1]] = exec_tbl_eval(op_tokens, ops, state_ix, dict_ix) + elif ops[0] == 3: + step_model_link(ops, state_ix, ts_ix, step) + continue + elif ops[0] == 5: + step_sim_timer(ops, state_ix, dict_ix, ts_ix, step) + elif ops[0] == 9: + continue + elif ops[0] == 13: + step_simple_channel(ops, state_ix, dict_ix, step) + # Op 100 is Basic ACTION in Special Actions + elif ops[0] == 100: + step_special_action(ops, state_ix, dict_ix, step) + return + +@njit +def step_model_pcode(model_exec_list, op_tokens, state_info, state_paths, state_ix, dict_ix, ts_ix, step): + ''' + This routine includes support for dynamically loaded python code which is powerful but slow + This is not yet implemented anywhere, just an idea. But in theory it would allow easy switching between + the faster runtime without dynamic code if the user did not request it. + At minimum, this could be used to more efficiently enable/disable this feature for testing by simply calling + a separate routine. + - to do so we would need to add state_paths to the variables passed to step_model which should be OK? + ''' + hydr_ix = hydr_get_ix(state_ix, state_paths, state_info['domain']) # could be done more efficiently, once per model run + state_step_hydr(state_info, state_paths, state_ix, dict_ix, ts_ix, hydr_ix, step) + val = 0 + for i in model_exec_list: + step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) return +@njit +def post_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + return @njit def test_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): @@ -474,3 +563,48 @@ def test_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): print(op_tokens[i]) step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) return + +def step_object(thisobject, step): + # this calls the step for a given model object and timestep + # this is a workaround since the object method ModelObject.step() fails to find the step_one() function ? + step_one(thisobject.op_tokens, thisobject.op_tokens[thisobject.ix], thisobject.state_ix, thisobject.dict_ix, thisobject.ts_ix, step) + + +@njit +def pre_step_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + for i in model_exec_list: + ops = op_tokens[i] + #for i in model_exec_list: + # op = op_tokens[i] + if ops[0] == 12: + # register type data (like broadcast accumulators) + pre_step_register(ops, state_ix, dict_ix) + continue + #elif ops[0] == 1: + # # register type data (like broadcast accumulators) + # continue + return + +@njit +def test_perf(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): +# for i in model_exec_list: +# ops = op_tokens[i] + for ops in op_tokens: + #step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step) + continue + return + +@njit +def iterate_perf(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps, debug_step = -1): + checksum = 0.0 + for step in range(steps): + pre_step_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) + step_model_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step, debug_step) + #print("Steps completed", step) + return checksum + +def time_perf(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps): + start = time.time() + iterate_perf(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps) + end = time.time() + print(len(model_exec_list), "components iterated over", siminfo['steps'], "time steps took" , end - start, "seconds") diff --git a/HSP2/om_model_linkage.py b/HSP2/om_model_linkage.py index b005743a..d4b4ac61 100644 --- a/HSP2/om_model_linkage.py +++ b/HSP2/om_model_linkage.py @@ -8,12 +8,12 @@ from HSP2.om_model_object import ModelObject from numba import njit class ModelLinkage(ModelObject): - def __init__(self, name, container = False, model_props = []): - super(ModelLinkage, self).__init__(name, container) + def __init__(self, name, container = False, model_props = {}): + super(ModelLinkage, self).__init__(name, container, model_props) # ModelLinkage copies a values from right to left # right_path: is the data source for the link # left_path: is the destination of the link - # left_path: is implicit in types 1-3, i.e., the ModelLinkage object path itself is the left_path + # - is implicit in types 1-3, i.e., the ModelLinkage object path itself is the left_path # - left_path parameter is only needed for pushes (type 4 and 5) # - the push is functionally equivalent to a pull whose path resolves to the specified left_path # - but the push allows the potential for multiple objects to set a single state @@ -26,21 +26,32 @@ def __init__(self, name, container = False, model_props = []): # this is required print("Error: a link must have a container object to serve as the destination") return False - right_path = self.handle_prop(model_props, 'right_path') - link_type = self.handle_prop(model_props, 'link_type', False, 0) - left_path = self.handle_prop(model_props, 'left_path') + self.right_path = self.handle_prop(model_props, 'right_path') + self.link_type = self.handle_prop(model_props, 'link_type', False, 0) + self.left_path = self.handle_prop(model_props, 'left_path') if self.left_path == False: # self.state_path gets set when creating at the parent level self.left_path = self.state_path - # this breaks for some reason, doesn't like the input name being different than the variable path ending? + if (self.link_type == 0): + # if this is a simple input we remove the object from the model_object_cache, and pass back to parent as an input + del self.model_object_cache[self.state_path] + del self.state_ix[self.ix] + container.add_input(self.name, self.right_path) + # this breaks for some reason, doesn't like the input name being different than the variable path ending? + # maybe because we should be adding the input to the container, not the self? self.add_input(self.right_path, self.right_path) def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): # parent method handles most cases, but subclass handles special situations. prop_val = super().handle_prop(model_props, prop_name, strict, default_value) - if ( (prop_name == 'right_value') and (prop_val == None) or (prop_val == '')): + if ( (prop_name == 'right_path') and (prop_val == None) or (prop_val == '')): raise Exception("right_path cannot be empty. Object creation halted. Path to object with error is " + self.state_path) + if ( (prop_name == 'right_path')): + # check for special keyword [parent] + pre_val = prop_val + prop_val.replace("[parent]", self.container.state_path) + #print("Changed ", pre_val, " to ", prop_val) return prop_val @staticmethod @@ -69,13 +80,14 @@ def tokenize(self): # - if this is a data property link then we add op codes to do a copy of data from one state address to another # - if this is simply a parent-child connection, we do not render op-codes, but we do use this for assigning # - execution hierarchy + #print("Linkage/link_type ", self.name, self.link_type,"created with params", self.model_props_parsed) if self.link_type in (2, 3): src_ix = get_state_ix(self.state_ix, self.state_paths, self.right_path) if not (src_ix == False): self.ops = self.ops + [src_ix, self.link_type] else: print("Error: link ", self.name, "does not have a valid source path") - #print("tokenize() result", self.ops) + #print(self.name,"tokenize() result", self.ops) if (self.link_type == 4) or (self.link_type == 5): # we push to the remote path in this one left_ix = get_state_ix(self.state_ix, self.state_paths, self.left_path) @@ -89,8 +101,8 @@ def tokenize(self): # Function for use during model simulations of tokenized objects @njit def step_model_link(op_token, state_ix, ts_ix, step): - if step == 2: - print("step_model_link() called at step 2 with op_token=", op_token) + #if step == 2: + #print("step_model_link() called at step 2 with op_token=", op_token) if op_token[3] == 1: return True elif op_token[3] == 2: diff --git a/HSP2/om_model_object.py b/HSP2/om_model_object.py index 999ee419..3df78b6c 100644 --- a/HSP2/om_model_object.py +++ b/HSP2/om_model_object.py @@ -7,17 +7,20 @@ from HSP2.om import * from pandas import Series, DataFrame, concat, HDFStore, set_option, to_numeric from pandas import Timestamp, Timedelta, read_hdf, read_csv +from numpy import pad class ModelObject: state_ix = {} # Shared Dict with the numerical state of each object state_paths = {} # Shared Dict with the hdf5 path of each object dict_ix = {} # Shared Dict with the hdf5 path of each object ts_ix = {} # Shared Dict with the hdf5 path of each object - op_tokens = {} # Shared Dict with the tokenized representation of each object + op_tokens = {} # Shared Dict with the tokenized representation of each object, will be turned into array of ints model_object_cache = {} # Shared with actual objects, keyed by their path model_exec_list = {} # Shared with actual objects, keyed by their path + max_token_length = 64 # limit on complexity of tokenized objects since op_tokens must be fixed dimensions for numba + runnables = [1,2,5,6,8,9,10,11,12,13,14,15] # runnable components important for optimization - def __init__(self, name, container = False, model_props = []): + def __init__(self, name, container = False, model_props = {}): self.name = name self.container = container # will be a link to another object self.log_path = "" # Ex: "/RESULTS/RCHRES_001/SPECL" @@ -33,8 +36,12 @@ def __init__(self, name, container = False, model_props = []): self.paths_found = False # this should be False at start self.default_value = 0.0 self.ops = [] - self.optype = 0 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - input/ModelLinkage, 4 - broadcastChannel, 5 - SimTimer, 6 - Conditional, 7 - ModelConstant (numeric), 8 - matrix accessor, 9 - MicroWatershedModel, 10 - MicroWatershedNetwork, 11 - ModelTimeseries, 12 - ModelRegister, 13 - SimpleChannel, 14 - SimpleImpoundment - self.register_path() + self.optype = 0 # OpTypes are as follows: + # 0 - model object, 1 - equation, 2 - datamatrix, 3 - input/ModelLinkage, + # 4 - broadcastChannel, 5 - SimTimer, 6 - Conditional, 7 - ModelConstant (numeric), + # 8 - matrix accessor, 9 - MicroWatershedModel, 10 - MicroWatershedNetwork, 11 - ModelTimeseries, + # 12 - ModelRegister, 13 - SimpleChannel, 14 - SimpleImpoundment, 15 - FlowBy + self.register_path() # note this registers the path AND stores the object in model_object_cache self.parse_model_props(model_props) @staticmethod @@ -46,6 +53,30 @@ def required_properties(): req_props = ['name'] return req_props + @staticmethod + def make_op_tokens(num_ops = 5000): + op_tokens = int32(zeros((num_ops,64))) # was Dict.empty(key_type=types.int64, value_type=types.i8[:]) + return op_tokens + + @staticmethod + def runnable_op_list(op_tokens, meo): + # only return those objects that do something at runtime + rmeo = [] + run_ops = {} + for ops in ModelObject.op_tokens: + if ops[0] in ModelObject.runnables: + run_ops[ops[1]] = ops + print("Found runnable", ops[1], "type", ops[0]) + for ix in meo: + if ix in run_ops.keys(): + rmeo.append(ix) + rmeo = np.asarray(rmeo, dtype="i8") + return rmeo + + def format_ops(self): + ops = pad(self.ops,(0,self.max_token_length))[0:self.max_token_length] + return ops + @classmethod def check_properties(cls, model_props): # this is for pre-screening properties for validity in model creation routines @@ -57,22 +88,35 @@ def check_properties(cls, model_props): return False return True + def handle_inputs(self, model_props): + if 'inputs' in model_props.keys(): + for i_pair in model_props['inputs']: + i_name = i_pair[0] + i_target = i_pair[1] + i_target.replace("[parent]", self.container.state_path) + self.add_input(i_name, i_target) + def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): # this checks to see if the prop is in dict with value form, or just a value # strict = True causes an exception if property is missing from model_props dict prop_val = model_props.get(prop_name) - if type(prop_val) == list: - prop_val = prop_val.get('value') + if type(prop_val) == list: # this doesn't work, but nothing gets passed in like this? Except broadcast params, but they are handled in the sub-class + prop_val = prop_val elif type(prop_val) == dict: prop_val = prop_val.get('value') if strict and (prop_val == None): raise Exception("Cannot find property " + prop_name + " in properties passed to "+ self.name + " and strict = True. Object creation halted. Path to object with error is " + self.state_path) + if (prop_val == None) and not (default_value == None): + prop_val = default_value return prop_val def parse_model_props(self, model_props, strict = False ): # sub-classes will allow an create argument "model_props" and handle them here. + # - subclasses should insure that they call super().parse_model_props() or include all code below # see also: handle_prop(), which will be called y parse_model_props # for all attributes supported by the class + # this base object only handles inputs + self.handle_inputs(model_props) self.model_props_parsed = model_props return True @@ -96,14 +140,14 @@ def save_object_hdf(self, hdfname, overwrite = False ): def make_paths(self, base_path = False): if base_path == False: # we are NOT forcing paths if not (self.container == False): - self.state_path = self.container.state_path + "/" + self.name - self.attribute_path = self.container.attribute_path + "/" + self.name + self.state_path = self.container.state_path + "/" + str(self.name) + self.attribute_path = self.container.attribute_path + "/" + str(self.name) elif self.name == "": self.state_path = "/STATE" self.attribute_path = "/OBJECTS" else: - self.state_path = "/STATE/" + self.name - self.attribute_path = "/OBJECTS/" + self.name + self.state_path = "/STATE/" + str(self.name) + self.attribute_path = "/OBJECTS/" + str(self.name) else: # base_path is a Dict with state_path and attribute_path set self.state_path = base_path['STATE'] + self.name @@ -212,6 +256,10 @@ def add_input(self, var_name, var_path, input_type = 1, trust = False): var_path = found_path self.inputs[var_name] = var_path self.inputs_ix[var_name] = var_ix + # Should we create a register for the input to be reported here? + # i.e., if we have an input named Qin on RCHRES_R001, shouldn't we be able + # to find the data in /STATE/RCHRES_R001/Qin ??? It is redundant data and writing + # but matches a complete data model and prevents stale data? return self.inputs_ix[var_name] def add_object_input(self, var_name, var_object, link_type = 1): @@ -221,14 +269,18 @@ def add_object_input(self, var_name, var_object, link_type = 1): self.inputs_ix[var_name] = var_object.ix return self.inputs_ix[var_name] - def create_parent_var(self, parent_var_name, source_object): + def create_parent_var(self, parent_var_name, source): # see decision points: https://github.com/HARPgroup/HSPsquared/issues/78 # This is used when an object sets an additional property on its parent # Like in simple_channel sets [channel prop name]_Qout on its parent # Generally, this should have 2 components. # 1 - a state variable on the child (this could be an implicit sub-comp, or a constant sub-comp, the child handles the setup of this) see constant_or_path() # 2 - an input link - self.container.add_object_input(parent_var_name, source_object, 1) + # the beauty of this is that the parent object and any of it's children will find the variable "[source_object]_varname" + if type(source) == str: + self.container.add_input(parent_var_name, source, 1, False) + elif isinstance(source, ModelObject): + self.container.add_object_input(parent_var_name, source, 1) def insure_path(self, var_path): # if this path can be found in the hdf5 make sure that it is registered in state @@ -269,8 +321,11 @@ def add_op_tokens(self): # can be customized by subclasses to add multiple lines if needed. if self.ops == []: self.tokenize() - #print(self.name, "tokens", self.ops) - self.op_tokens[self.ix] = np.asarray(self.ops, dtype="i8") + #print(self.state_path, "tokens", self.ops) + if len(self.ops) > self.max_token_length: + raise Exception("op tokens cannot exceed max length of" + self.max_token_length + "(" + self.state_path + "). ") + self.op_tokens[self.ix] = self.format_ops() + #self.op_tokens[self.ix] = np.asarray(self.ops, dtype="i8") def step(self, step): # this tests the model for a single timestep. diff --git a/tests/testcbp/HSP2results/PL3_5250_0001.json.disabled b/tests/testcbp/HSP2results/PL3_5250_0001.json.disabled new file mode 100644 index 00000000..dde4acdb --- /dev/null +++ b/tests/testcbp/HSP2results/PL3_5250_0001.json.disabled @@ -0,0 +1,18 @@ +{ + "RCHRES_R001": { + "name": "RCHRES_R001", + "object_class": "ModelObject", + "drainage_area_sqmi": { + "name": "drainage_area_sqmi", + "object_class": "Constant", + "value": 99.95 + }, + "IVOLin": { + "name": "IVOLin", + "object_class": "ModelLinkage", + "right_path": "/STATE/RCHRES_R001/IVOL", + "link_type": 2 + } + } +} + diff --git a/tests/testcbp/HSP2results/PL3_5250_nospecl.uci b/tests/testcbp/HSP2results/PL3_5250_nospecl.uci new file mode 100644 index 00000000..6c04b569 --- /dev/null +++ b/tests/testcbp/HSP2results/PL3_5250_nospecl.uci @@ -0,0 +1,230 @@ +RUN + +GLOBAL + PL3_5250_0 riv | P5 | hsp2_2022 | Occoquan + START 2001/01/01 END 2001/12/31 + RUN INTERP OUTPUT LEVEL 1 1 + RESUME 0 RUN 1 UNIT SYSTEM 1 +END GLOBAL + +FILES + ***<----FILE NAME-------------------------------------------------> +WDM1 21 met_A51059.wdm +WDM2 22 prad_A51059.wdm +WDM3 23 ps_sep_div_ams_hsp2_2022_PL3_5250_0001.wdm +WDM4 24 PL3_5250_0001.wdm +MESSU 25 PL3_5250_0001.ech + 26 PL3_5250_0001.out + 31 PL3_5250_0001.tau +END FILES + +OPN SEQUENCE + INGRP INDELT 01:00 + RCHRES 1 + PLTGEN 1 + END INGRP +END OPN SEQUENCE + +RCHRES + ACTIVITY + # - # HYFG ADFG CNFG HTFG SDFG GQFG OXFG NUFG PKFG PHFG *** + 1 1 1 0 0 0 0 0 0 0 0 + END ACTIVITY + + PRINT-INFO + # - # HYFG ADFG CNFG HTFG SDFG GQFG OXFG NUFG PKFG PHFG PIVL***PY + 1 5 5 0 0 0 0 0 0 0 0 0 12 + END PRINT-INFO + + GEN-INFO + RCHRES<-------Name------->Nexit Unit Systems Printer *** + # - # User t-series Engl Metr LKFG *** + 1 PL3_5250_0001 3 1 1 1 26 0 1 + END GEN-INFO + + HYDR-PARM1 + RCHRES Flags for HYDR section *** + # - # VC A1 A2 A3 ODFVFG for each ODGTFG for each *** FUNCT for each + FG FG FG FG possible exit possible exit *** possible exit + 1 2 3 4 5 1 2 3 4 5 *** 1 2 3 4 5 + VC A1 A2 A3 V1 V2 V3 V4 V5 G1 G2 G3 G4 G5 *** F1 F2 F3 F4 F5 + 1 0 1 1 1 0 0 4 0 0 1 2 0 0 0 0 0 0 0 0 + END HYDR-PARM1 + + HYDR-PARM2 + RCHRES *** + # - # FTABNO LEN DELTH STCOR KS DB50 *** + 1 1. 10. 2. 0.5 + END HYDR-PARM2 + + HYDR-INIT + RCHRES Initial conditions for HYDR section *** + # - # VOL Initial value of COLIND *** Initial value of OUTDGT + (ac-ft) for each possible exit *** for each possible exit + VOL CEX1 CEX2 CEX3 CEX4 CEX5 *** DEX1 DEX2 DEX3 DEX4 DEX5 + 1 12175.000 + END HYDR-INIT + + ADCALC-DATA + RCHRES Data for section ADCALC *** + # - # CRRAT VOL *** + 1 1.5 12175. + END ADCALC-DATA + +END RCHRES + +FTABLES + FTABLE 1 + ROWS COLS *** + 20 4 + DEPTH AREA VOLUME DISCH *** + (FT) (ACRES) (AC-FT) (CFS) *** + 0 0 0 0 + 20 124 1007 0 + 30 240 2781 0 + 40 444 6106 0 + 50 804 12175 0 + 52 909 13886 39 + 54 1024 15819 78 + 56 1155 17999 117 + 57 1226 19227 136 + 58 1296 20456 137 + 60 1413 23180 138 + 62 1524 26140 140 + 63 1586 27745 1922 + 64 1647 29351 5179 + 65 1701 31247 9398 + 66 1755 33143 14393 + 67 1803 34984 20645 + 69 1879 38705 36532 + 70 1908 40585 44603 + 76 2100 54000 103071 + END FTABLE 1 +END FTABLES + +EXT SOURCES +<-Volume-> SsysSgap<--Mult-->Tran <-Target vols> <-Grp> <-Member->*** + # # tem strg<-factor->strg # # # #*** +*** METEOROLOGY +WDM1 1000 EVAP ENGLZERO 1.000 SAME RCHRES 1 EXTNL POTEV +WDM1 1001 DEWP ENGLZERO SAME RCHRES 1 EXTNL DEWTMP +WDM1 1002 WNDH ENGLZERO SAME RCHRES 1 EXTNL WIND +WDM1 1003 RADH ENGLZERO SAME RCHRES 1 EXTNL SOLRAD +WDM1 1004 ATMP ENGLZERO SAME RCHRES 1 EXTNL GATMP +WDM1 1005 CLDC ENGLZERO SAME RCHRES 1 EXTNL CLOUD + +*** PRECIPITATION AND ATMOSPHERIC DEPOSITION LOADS +WDM2 2000 HPRC ENGLZERO SAME RCHRES 1 EXTNL PREC +WDM2 2001 NO23 ENGLZERO DIV RCHRES 1 EXTNL NUADFX 1 1 +WDM2 2002 NH4A ENGLZERO DIV RCHRES 1 EXTNL NUADFX 2 1 +WDM2 2003 NO3D ENGLZERO DIV RCHRES 1 EXTNL NUADFX 1 1 +WDM2 2004 NH4D ENGLZERO DIV RCHRES 1 EXTNL NUADFX 2 1 +WDM2 2005 ORGN ENGLZERO DIV RCHRES 1 EXTNL PLADFX 1 1 +WDM2 2006 PO4A ENGLZERO DIV RCHRES 1 EXTNL NUADFX 3 1 +WDM2 2007 ORGP ENGLZERO DIV RCHRES 1 EXTNL PLADFX 2 1 + +*** POINT SOURCE +WDM3 3000 FLOW ENGLZERO DIV RCHRES 1 INFLOW IVOL +WDM3 3001 HEAT ENGLZERO DIV RCHRES 1 INFLOW IHEAT +WDM3 3002 NH3X ENGLZERO DIV RCHRES 1 INFLOW NUIF1 2 +WDM3 3003 NO3X ENGLZERO DIV RCHRES 1 INFLOW NUIF1 1 +WDM3 3004 ORNX ENGLZERO DIV RCHRES 1 INFLOW PKIF 3 +WDM3 3005 PO4X ENGLZERO DIV RCHRES 1 INFLOW NUIF1 4 +WDM3 3006 ORPX ENGLZERO DIV RCHRES 1 INFLOW PKIF 4 +WDM3 3021 BODX ENGLZERO DIV RCHRES 1 INFLOW OXIF 2 +WDM3 3022 TSSX ENGLZERO 0.0005 DIV RCHRES 1 INFLOW ISED 3 +WDM3 3023 DOXX ENGLZERO DIV RCHRES 1 INFLOW OXIF 1 +WDM3 3024 TOCX ENGLZERO DIV RCHRES 1 INFLOW PKIF 5 + +*** DIVERSIONS +WDM3 3007 DIVR ENGLZERO SAME RCHRES 1 EXTNL OUTDGT 1 +WDM3 3008 DIVA ENGLZERO SAME RCHRES 1 EXTNL OUTDGT 2 + +*** SEPTIC +WDM3 3010 SNO3 ENGLZERO 1.0000 DIV RCHRES 1 INFLOW NUIF1 1 + +*** AEOLIAN SEDIMENT +WDM3 3061 SFAS ENGLZERO 7.027e-06DIV RCHRES 1 INFLOW ISED 2 +WDM3 3062 SFAC ENGLZERO 7.027e-06DIV RCHRES 1 INFLOW ISED 3 + +*** UPSTREAM and EOS INPUT *** +WDM4 11 WATR ENGLZERO SAME RCHRES 1 INFLOW IVOL +WDM4 12 HEAT ENGLZERO SAME RCHRES 1 INFLOW IHEAT +WDM4 13 DOXY ENGLZERO SAME RCHRES 1 INFLOW OXIF 1 +WDM4 21 SAND ENGLZERO SAME RCHRES 1 INFLOW ISED 1 +WDM4 22 SILT ENGLZERO SAME RCHRES 1 INFLOW ISED 2 +WDM4 23 CLAY ENGLZERO SAME RCHRES 1 INFLOW ISED 3 +WDM4 31 NO3D ENGLZERO SAME RCHRES 1 INFLOW NUIF1 1 +WDM4 32 NH3D ENGLZERO SAME RCHRES 1 INFLOW NUIF1 2 +WDM4 33 NH3A ENGLZERO SAME RCHRES 1 INFLOW NUIF2 1 1 +WDM4 34 NH3I ENGLZERO SAME RCHRES 1 INFLOW NUIF2 2 1 +WDM4 35 NH3C ENGLZERO SAME RCHRES 1 INFLOW NUIF2 3 1 +WDM4 36 RORN ENGLZERO SAME RCHRES 1 INFLOW PKIF 3 +WDM4 41 PO4D ENGLZERO SAME RCHRES 1 INFLOW NUIF1 4 +WDM4 42 PO4A ENGLZERO SAME RCHRES 1 INFLOW NUIF2 1 2 +WDM4 43 PO4I ENGLZERO SAME RCHRES 1 INFLOW NUIF2 2 2 +WDM4 44 PO4C ENGLZERO SAME RCHRES 1 INFLOW NUIF2 3 2 +WDM4 45 RORP ENGLZERO SAME RCHRES 1 INFLOW PKIF 4 +WDM4 51 BODA ENGLZERO SAME RCHRES 1 INFLOW OXIF 2 +WDM4 52 TORC ENGLZERO SAME RCHRES 1 INFLOW PKIF 5 +WDM4 53 PHYT ENGLZERO SAME RCHRES 1 INFLOW PKIF 1 +END EXT SOURCES + +EXT TARGETS +<-Volume-> <-Grp> <-Member-><--Mult-->Tran <-Volume-> Tsys Tgap Amd *** + # # #<-factor->strg # # tem strg strg*** +RCHRES 1 OFLOW OVOL 3 SAME WDM4 111 WATR ENGL REPL +RCHRES 1 OFLOW OHEAT 3 SAME WDM4 112 HEAT ENGL REPL +RCHRES 1 OFLOW OXCF2 3 1 SAME WDM4 113 DOXY ENGL REPL +RCHRES 1 OFLOW OSED 3 1 SAME WDM4 121 SAND ENGL REPL +RCHRES 1 OFLOW OSED 3 2 SAME WDM4 122 SILT ENGL REPL +RCHRES 1 OFLOW OSED 3 3 SAME WDM4 123 CLAY ENGL REPL +RCHRES 1 OFLOW NUCF9 3 1 SAME WDM4 131 NO3D ENGL REPL +RCHRES 1 OFLOW NUCF9 3 2 SAME WDM4 132 NH3D ENGL REPL +RCHRES 1 OFLOW OSNH4 3 1 SAME WDM4 133 NH3A ENGL REPL +RCHRES 1 OFLOW OSNH4 3 2 SAME WDM4 134 NH3I ENGL REPL +RCHRES 1 OFLOW OSNH4 3 3 SAME WDM4 135 NH3C ENGL REPL +RCHRES 1 OFLOW PKCF2 3 3 SAME WDM4 136 RORN ENGL REPL +RCHRES 1 OFLOW NUCF9 3 4 SAME WDM4 141 PO4D ENGL REPL +RCHRES 1 OFLOW OSPO4 3 1 SAME WDM4 142 PO4A ENGL REPL +RCHRES 1 OFLOW OSPO4 3 2 SAME WDM4 143 PO4I ENGL REPL +RCHRES 1 OFLOW OSPO4 3 3 SAME WDM4 144 PO4C ENGL REPL +RCHRES 1 OFLOW PKCF2 3 4 SAME WDM4 145 RORP ENGL REPL +RCHRES 1 OFLOW OXCF2 3 2 SAME WDM4 151 BODA ENGL REPL +RCHRES 1 OFLOW PKCF2 3 5 SAME WDM4 152 TORC ENGL REPL +RCHRES 1 OFLOW PKCF2 3 1 SAME WDM4 153 PHYT ENGL REPL +END EXT TARGETS + +NETWORK +<-Volume-> <-Grp> <-Member-><--Mult-->Tran <-Target vols> <-Grp> <-Member-> *** + # # #<-factor->strg # # # # *** +RCHRES 1 HYDR TAU AVER PLTGEN 1 INPUT MEAN 1 +END NETWORK + +PLTGEN + PLOTINFO + # - # FILE NPT NMN LABL PYR PIVL *** + 1 31 1 12 24 + END PLOTINFO + + GEN-LABELS + # - #<----------------Title-----------------> *** + 1 PL3_5250_0001 daily_shear_stress_lbsft2 + END GEN-LABELS + + SCALING + #thru# YMIN YMAX IVLIN THRESH *** + 1 99 0. 100000. 20. + END SCALING + + CURV-DATA + <-Curve label--> Line Intg Col Tran *** + # - # type eqv code code *** + 1 daily_shear_stre 1 1 AVER + END CURV-DATA +END PLTGEN + +SPEC-ACTIONS +END SPEC-ACTIONS + +END RUN From 9c5cf061c89ff1979355664efe41e941609cc039 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Tue, 30 Jan 2024 13:44:23 -0500 Subject: [PATCH 19/35] expand run duration on test --- tests/testcbp/HSP2results/PL3_5250_0001.uci | 43 ++++++++++++++++++++ tests/testcbp/HSP2results/PL3_5250_specl.uci | 41 ++++++++++++++++++- 2 files changed, 83 insertions(+), 1 deletion(-) diff --git a/tests/testcbp/HSP2results/PL3_5250_0001.uci b/tests/testcbp/HSP2results/PL3_5250_0001.uci index 6c04b569..e5247e20 100644 --- a/tests/testcbp/HSP2results/PL3_5250_0001.uci +++ b/tests/testcbp/HSP2results/PL3_5250_0001.uci @@ -225,6 +225,49 @@ PLTGEN END PLTGEN SPEC-ACTIONS +*** ACTIONS +***optyp range dc ds yr mo da hr mn d t vari s1 s2 s3 ac value tc ts num + <****><-><--><><-><--><-><-><-><-><><> <----><-><-><-><-><--------> <> <-><-> + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. END SPEC-ACTIONS END RUN diff --git a/tests/testcbp/HSP2results/PL3_5250_specl.uci b/tests/testcbp/HSP2results/PL3_5250_specl.uci index 83696691..b56e2860 100644 --- a/tests/testcbp/HSP2results/PL3_5250_specl.uci +++ b/tests/testcbp/HSP2results/PL3_5250_specl.uci @@ -2,7 +2,7 @@ RUN GLOBAL PL3_5250_0 riv | P5 | hsp2_2022 | Occoquan - START 2001/01/01 END 2001/12/31 + START 2001/01/01 END 2020/12/31 RUN INTERP OUTPUT LEVEL 1 1 RESUME 0 RUN 1 UNIT SYSTEM 1 END GLOBAL @@ -229,6 +229,45 @@ SPEC-ACTIONS ***optyp range dc ds yr mo da hr mn d t vari s1 s2 s3 ac value tc ts num <****><-><--><><-><--><-><-><-><-><><> <----><-><-><-><-><--------> <> <-><-> RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL += 10. + RCHRES 1 DY 11984 1 1 12 2 3 IVOL -= 10. END SPEC-ACTIONS END RUN From 1ad90e105e9fe772df73d5b70e947051b2abacaa Mon Sep 17 00:00:00 2001 From: Paul Duda Date: Tue, 30 Jan 2024 16:21:50 -0500 Subject: [PATCH 20/35] om_special_action.py -- add False to return to satisfy njit requirement io.py -- output aver in addition to sum and last --- HSP2/om_special_action.py | 4 ++-- HSP2IO/io.py | 6 ++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index 9c450eef..3fe77aad 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -142,13 +142,13 @@ def step_special_action(op, state_ix, dict_ix, step): ix2 = op[4] tix = op[5] # which slot is the time comparison in? if (tix in state_ix and step < state_ix[tix]): - return + return False ctr_ix = op[6] # id of the counter variable num_ix = op[7] # max times to complete num_done = state_ix[ctr_ix] num = state_ix[num_ix] # num to complete if (tix in state_ix and num_done >= num): - return + return False else: if sop == 1: result = state_ix[ix2] diff --git a/HSP2IO/io.py b/HSP2IO/io.py index a5929182..4d8437d0 100644 --- a/HSP2IO/io.py +++ b/HSP2IO/io.py @@ -75,17 +75,23 @@ def write_ts(self, # change time step of output to daily sumdf1 = data_frame.resample('D',kind='timestamp',origin='start').sum() lastdf2 = data_frame.resample('D', kind='timestamp', origin='start').last() + meandf3 = data_frame.resample('D', kind='timestamp', origin='start').mean() data_frame= pd.merge(lastdf2.add_suffix('_last'), sumdf1.add_suffix('_sum'), left_index=True, right_index=True) + data_frame = pd.merge(data_frame, meandf3.add_suffix('_aver'), left_index=True,right_index=True) elif outstep == 4: # change to monthly sumdf1 = data_frame.resample('M',kind='timestamp',origin='start').sum() lastdf2 = data_frame.resample('M', kind='timestamp', origin='start').last() + meandf3 = data_frame.resample('M', kind='timestamp', origin='start').mean() data_frame = pd.merge(lastdf2.add_suffix('_last'), sumdf1.add_suffix('_sum'), left_index=True, right_index=True) + data_frame = pd.merge(data_frame, meandf3.add_suffix('_aver'), left_index=True, right_index=True) elif outstep == 5: # change to annual sumdf1 = data_frame.resample('Y',kind='timestamp',origin='start').sum() lastdf2 = data_frame.resample('Y', kind='timestamp', origin='start').last() + meandf3 = data_frame.resample('Y', kind='timestamp', origin='start').mean() data_frame = pd.merge(lastdf2.add_suffix('_last'), sumdf1.add_suffix('_sum'), left_index=True, right_index=True) + data_frame = pd.merge(data_frame, meandf3.add_suffix('_aver'), left_index=True, right_index=True) self._output.write_ts(data_frame, category, operation, segment, activity) def read_ts(self, From a5a2c6aff3903c8221035e9a989ac4f7645b3604 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Tue, 30 Jan 2024 17:06:18 -0500 Subject: [PATCH 21/35] bencmarking script made to include 1,000 special actions --- HSP2/om.py | 2 +- HSP2/om_model_object.py | 2 +- HSP2/om_special_action.py | 2 - tests/testcbp/HSP2results/benchmark.py | 59 ++++++++++++++++++++++++++ 4 files changed, 61 insertions(+), 4 deletions(-) create mode 100644 tests/testcbp/HSP2results/benchmark.py diff --git a/HSP2/om.py b/HSP2/om.py index 5d8207b1..d4eb8170 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -434,7 +434,7 @@ def iterate_models(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps, checksum = 0.0 for step in range(steps): pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) - step_model_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step, dstep) + step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) #print("Steps completed", step) return checksum diff --git a/HSP2/om_model_object.py b/HSP2/om_model_object.py index 3df78b6c..29bb8700 100644 --- a/HSP2/om_model_object.py +++ b/HSP2/om_model_object.py @@ -18,7 +18,7 @@ class ModelObject: model_object_cache = {} # Shared with actual objects, keyed by their path model_exec_list = {} # Shared with actual objects, keyed by their path max_token_length = 64 # limit on complexity of tokenized objects since op_tokens must be fixed dimensions for numba - runnables = [1,2,5,6,8,9,10,11,12,13,14,15] # runnable components important for optimization + runnables = [1,2,5,6,8,9,10,11,12,13,14,15, 100] # runnable components important for optimization def __init__(self, name, container = False, model_props = {}): self.name = name diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index 9c450eef..5fb9d12b 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -24,7 +24,6 @@ def parse_model_props(self, model_props, strict=False): # - action(operation) to perform = AC # - operand2, a numeric value for simple ACTION = [VALUE] # note: [op_abbrev] is *maybe* the first letter of the OPTYP? Not a very good idea to have a coded convention like that - print("Creating ACTION with props", model_props) self.op_type = self.handle_prop(model_props, 'OPTYP') self.range1 = self.handle_prop(model_props, 'RANGE1') self.range2 = self.handle_prop(model_props, 'RANGE2') @@ -108,7 +107,6 @@ def tokenize(self): super().tokenize() # sets self.ops = op_type, op_ix self.ops = self.ops + [self.inputs_ix['op1'], self.opid, self.op2_ix, self.timer_ix, self.ctr_ix, self.num] # @tbd: check if time ops have been set and tokenize accordingly - print("Specl", self.name, "tokens", self.ops) def add_op_tokens(self): # this puts the tokens into the global simulation queue diff --git a/tests/testcbp/HSP2results/benchmark.py b/tests/testcbp/HSP2results/benchmark.py new file mode 100644 index 00000000..079204c4 --- /dev/null +++ b/tests/testcbp/HSP2results/benchmark.py @@ -0,0 +1,59 @@ +# bare bones tester +import os +os.chdir("C:/usr/local/home/git/HSPsquared") +from HSP2.main import * +from HSP2.om import * +#from HSP2.om_equation import * + +state = init_state_dicts() +# set up info and timer +siminfo = {} +siminfo['delt'] = 60 +siminfo['tindex'] = date_range("1984-01-01", "2020-12-31", freq=Minute(siminfo['delt']))[1:] +steps = siminfo['steps'] = len(siminfo['tindex']) +# get any pre-loaded objects +model_data = state['model_data'] +( ModelObject.op_tokens, ModelObject.model_object_cache) = init_om_dicts() +ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.ts_ix = state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] +( op_tokens, state_paths, state_ix, dict_ix, model_object_cache, ts_ix) = ( ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache, ModelObject.ts_ix ) +state_context_hsp2(state, 'RCHRES', 'R001', 'HYDR') +print("Init HYDR state context for domain", state['domain']) +hydr_init_ix(state['state_ix'], state['state_paths'], state['domain']) +# Now, assemble a test dataset +container = False +model_root_object = ModelObject("") +# set up the timer as the first element +timer = SimTimer('timer', model_root_object, siminfo) + +facility = ModelObject('facility', model_root_object) +for k in range(1000): + #eqn = str(25*random.random()) + " * " + c[round((2*random.random()))] + #newq = Equation('eq' + str(k), facility, {'equation':eqn} ) + conval = 50.0*random.random() + newq = ModelConstant('con' + str(k), facility, conval) + speca = SpecialAction('specl' + str(k), facility, {'OPTYP': 'RCHRES', 'RANGE1': 1, 'RANGE2':'', 'AC':'+=', 'VARI':'IVOL', 'VALUE':10.0, 'YR':'2000', 'DA':'1', 'MO':'1', 'HR':'1','MN':''}) + +# adjust op_tokens length to insure capacity +op_tokens = ModelObject.make_op_tokens(len(model_object_cache) +# Parse, load and order all objects +model_loader_recursive(state['model_data'], model_root_object) # this throws a syntax error, but if it is omitted, the whole thing fails. +model_path_loader(ModelObject.model_object_cache) +model_exec_list = [] +model_touch_list = [] +# put all objects in token form for fast runtime execution and sort according to dependency order +print("Tokenizing models") +model_tokenizer_recursive(model_root_object, ModelObject.model_object_cache, model_exec_list, model_touch_list ) + +model_exec_list = np.asarray(model_exec_list, dtype="i8") +# the resulting set of objects is returned. +state['model_object_cache'] = ModelObject.model_object_cache +state['op_tokens'] = ModelObject.op_tokens +state['state_step_om'] = 'disabled' + +# using only these runnables cuts runtime by over 40% +# Test and time the run +start = time.time() +iterate_models(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, siminfo['steps'], -1) +end = time.time() +print(len(model_exec_list), "components iterated over", siminfo['steps'], "time steps took" , end - start, "seconds") + From 449e9b957f9134d170158cc3585fcbfaee55980c Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Tue, 30 Jan 2024 17:13:43 -0500 Subject: [PATCH 22/35] runnable from command line as "py.exe benchmarking.py" --- tests/testcbp/HSP2results/benchmark.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/testcbp/HSP2results/benchmark.py b/tests/testcbp/HSP2results/benchmark.py index 079204c4..b560d893 100644 --- a/tests/testcbp/HSP2results/benchmark.py +++ b/tests/testcbp/HSP2results/benchmark.py @@ -34,9 +34,9 @@ speca = SpecialAction('specl' + str(k), facility, {'OPTYP': 'RCHRES', 'RANGE1': 1, 'RANGE2':'', 'AC':'+=', 'VARI':'IVOL', 'VALUE':10.0, 'YR':'2000', 'DA':'1', 'MO':'1', 'HR':'1','MN':''}) # adjust op_tokens length to insure capacity -op_tokens = ModelObject.make_op_tokens(len(model_object_cache) +op_tokens = ModelObject.make_op_tokens(len(model_object_cache)) +model_loader_recursive(model_data, model_root_object) # Parse, load and order all objects -model_loader_recursive(state['model_data'], model_root_object) # this throws a syntax error, but if it is omitted, the whole thing fails. model_path_loader(ModelObject.model_object_cache) model_exec_list = [] model_touch_list = [] From b672294e29fcba7d1145ba70a23e75271aa414ad Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Fri, 2 Feb 2024 13:59:23 -0500 Subject: [PATCH 23/35] format state_ix for higher read/write speed --- HSP2/om.py | 1 + tests/testcbp/HSP2results/PL3_5250_0001.json | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 tests/testcbp/HSP2results/PL3_5250_0001.json diff --git a/HSP2/om.py b/HSP2/om.py index d4eb8170..aa16dbe4 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -163,6 +163,7 @@ def state_om_model_run_prep(state, io_manager, siminfo): state['model_object_cache'] = model_object_cache state['model_exec_list'] = np.asarray(model_exec_list, dtype="i8") state['state_step_om'] = 'disabled' + state['state_ix'] = np.asarray(list(state['state_ix'].values()), dtype="float32") if len(op_tokens) > 0: state['state_step_om'] = 'enabled' return diff --git a/tests/testcbp/HSP2results/PL3_5250_0001.json b/tests/testcbp/HSP2results/PL3_5250_0001.json new file mode 100644 index 00000000..dde4acdb --- /dev/null +++ b/tests/testcbp/HSP2results/PL3_5250_0001.json @@ -0,0 +1,18 @@ +{ + "RCHRES_R001": { + "name": "RCHRES_R001", + "object_class": "ModelObject", + "drainage_area_sqmi": { + "name": "drainage_area_sqmi", + "object_class": "Constant", + "value": 99.95 + }, + "IVOLin": { + "name": "IVOLin", + "object_class": "ModelLinkage", + "right_path": "/STATE/RCHRES_R001/IVOL", + "link_type": 2 + } + } +} + From f1c1be8402ca19e0e94652096e1dd2c21cc90482 Mon Sep 17 00:00:00 2001 From: Paul Duda Date: Fri, 2 Feb 2024 15:28:53 -0500 Subject: [PATCH 24/35] utilities.py -- fix gener to gener linkage naming --- HSP2/utilities.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/HSP2/utilities.py b/HSP2/utilities.py index 6156d720..3706a77d 100644 --- a/HSP2/utilities.py +++ b/HSP2/utilities.py @@ -462,10 +462,6 @@ def get_gener_timeseries(ts: Dict, gener_instances: Dict, ddlinks: List, ddmassl mldata = ddmasslinks[link.MLNO] for dat in mldata: mfactor = dat.MFACTOR - sgrpn = dat.SGRPN - smemn = dat.SMEMN - smemsb1 = dat.SMEMSB1 - smemsb2 = dat.SMEMSB2 tmemn = dat.TMEMN tmemsb1 = dat.TMEMSB1 tmemsb2 = dat.TMEMSB2 @@ -474,9 +470,8 @@ def get_gener_timeseries(ts: Dict, gener_instances: Dict, ddlinks: List, ddmassl factor = afactr * mfactor # may need to do something in here for special cases like in get_flows - - smemn, tmemn = expand_timeseries_names(sgrpn, smemn, smemsb1, smemsb2, tmemn, tmemsb1, - tmemsb2) + if tmemn != 'ONE' and tmemn != 'TWO': + tmemn = clean_name(tmemn, tmemsb1 + tmemsb2) t = series * factor From 9c0db21f08f58f0bdd87407215f8834e49f6a362 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Tue, 6 Feb 2024 18:02:27 -0500 Subject: [PATCH 25/35] finalized ndarray for state_ix which has the largest impact. allow switching of modes by setting ModelObject.ops_data_type --- HSP2/HYDR.py | 14 ++++++++------ HSP2/om.py | 12 +++++++----- HSP2/om_model_object.py | 20 ++++++++++++++++---- HSP2/state.py | 1 + tests/testcbp/HSP2results/benchmark.py | 18 +++++++++++++++--- 5 files changed, 47 insertions(+), 18 deletions(-) diff --git a/HSP2/HYDR.py b/HSP2/HYDR.py index c3ae9341..fb8ed373 100644 --- a/HSP2/HYDR.py +++ b/HSP2/HYDR.py @@ -151,14 +151,16 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): # must split dicts out of state Dict since numba cannot handle mixed-type nested Dicts state_ix, dict_ix, ts_ix = state['state_ix'], state['dict_ix'], state['ts_ix'] state_paths = state['state_paths'] + model_exec_list = state['model_exec_list'] # order of special actions and other dynamic ops # initialize the hydr paths in case they don't already reside here hydr_init_ix(state_ix, state_paths, state['domain']) op_tokens = state['op_tokens'] + print("state_ix is type", type(state_ix)) ####################################################################################### # Do the simulation with _hydr_ (ie run reaches simulation code) errors = _hydr_(ui, ts, COLIND, OUTDGT, rchtab, funct, Olabels, OVOLlabels, - state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens) + state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens, model_exec_list) if 'O' in ts: del ts['O'] if 'OVOL' in ts: del ts['OVOL'] @@ -172,7 +174,7 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): @njit(cache=True) -def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens): +def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens, model_exec_list): errors = zeros(int(ui['errlen'])).astype(int64) steps = int(ui['steps']) # number of simulation steps @@ -342,14 +344,14 @@ def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_inf # - these if statements may be irrelevant if default functions simply return # when no objects are defined. if (state_info['state_step_om'] == 'enabled'): - pre_step_model(op_tokens[0], op_tokens, state_ix, dict_ix, ts_ix, step) + pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) if (state_info['state_step_hydr'] == 'enabled'): state_step_hydr(state_info, state_paths, state_ix, dict_ix, ts_ix, hydr_ix, step) if (state_info['state_step_om'] == 'enabled'): #print("trying to execute state_step_om()") - # op_tokens[0] contains the model exec list. Later we may amend this - # perhaps even storing a domain specific exec list under domain/exec_list? - step_model(op_tokens[0], op_tokens, state_ix, dict_ix, ts_ix, step) # traditional 'ACTIONS' done in here + # model_exec_list contains the model exec list in dependency order + # now these are all executed at once, but we need to make them only for domain end points + step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) # traditional 'ACTIONS' done in here if ( (state_info['state_step_hydr'] == 'enabled') or (state_info['state_step_om'] == 'enabled') ): # Do write-backs for editable STATE variables diff --git a/HSP2/om.py b/HSP2/om.py index aa16dbe4..6a817bba 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -148,22 +148,25 @@ def state_om_model_run_prep(state, io_manager, siminfo): # since this is a function that accepts state as an argument and these were both set in state_load_dynamics_om # we can assume they are there and functioning model_object_cache = state['model_object_cache'] - op_tokens = state['op_tokens'] model_path_loader(model_object_cache) # len() will be 1 if we only have a simtimer, but > 1 if we have a river being added model_exec_list = [] # put all objects in token form for fast runtime execution and sort according to dependency order print("Tokenizing models") + ModelObject.op_tokens = ModelObject.make_op_tokens(max(ModelObject.state_ix.keys()) + 1) model_tokenizer_recursive(model_root_object, model_object_cache, model_exec_list) + op_tokens = ModelObject.op_tokens + print("op_tokens has", len(op_tokens),"elements") # model_exec_list is the ordered list of component operations - print("op_tokens has", len(model_object_cache),"elements") #print("model_exec_list(", len(model_exec_list),"items):", model_exec_list) # This is used to stash the model_exec_list in the dict_ix, this might be slow, need to verify. # the resulting set of objects is returned. + state['state_step_om'] = 'disabled' state['model_object_cache'] = model_object_cache state['model_exec_list'] = np.asarray(model_exec_list, dtype="i8") - state['state_step_om'] = 'disabled' - state['state_ix'] = np.asarray(list(state['state_ix'].values()), dtype="float32") + if ModelObject.ops_data_type == 'ndarray': + state['state_ix'] = np.asarray(list(state['state_ix'].values()), dtype="float32") + state['op_tokens'] = op_tokens if len(op_tokens) > 0: state['state_step_om'] = 'enabled' return @@ -449,7 +452,6 @@ def pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): @njit def step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): - val = 0 for i in model_exec_list: step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) return diff --git a/HSP2/om_model_object.py b/HSP2/om_model_object.py index 29bb8700..0a13a95c 100644 --- a/HSP2/om_model_object.py +++ b/HSP2/om_model_object.py @@ -19,6 +19,7 @@ class ModelObject: model_exec_list = {} # Shared with actual objects, keyed by their path max_token_length = 64 # limit on complexity of tokenized objects since op_tokens must be fixed dimensions for numba runnables = [1,2,5,6,8,9,10,11,12,13,14,15, 100] # runnable components important for optimization + ops_data_type = 'ndarray' # options are ndarray or Dict - Dict appears slower, but unsure of the cause, so keep as option. def __init__(self, name, container = False, model_props = {}): self.name = name @@ -55,7 +56,10 @@ def required_properties(): @staticmethod def make_op_tokens(num_ops = 5000): - op_tokens = int32(zeros((num_ops,64))) # was Dict.empty(key_type=types.int64, value_type=types.i8[:]) + if (ModelObject.ops_data_type == 'ndarray'): + op_tokens = int32(zeros((num_ops,64))) # was Dict.empty(key_type=types.int64, value_type=types.i8[:]) + else: + op_tokens = Dict.empty(key_type=types.int64, value_type=types.i8[:]) return op_tokens @staticmethod @@ -73,10 +77,19 @@ def runnable_op_list(op_tokens, meo): rmeo = np.asarray(rmeo, dtype="i8") return rmeo - def format_ops(self): - ops = pad(self.ops,(0,self.max_token_length))[0:self.max_token_length] + @staticmethod + def model_format_ops(ops): + if (ModelObject.ops_data_type == 'ndarray'): + ops = pad(ops,(0,ModelObject.max_token_length))[0:ModelObject.max_token_length] + else: + ops = np.asarray(ops, dtype="i8") return ops + def format_ops(self): + # this can be sub-classed if needed, but should not be since it is based on the ops_data_type + # See ModelObject.model_format_ops() + return ModelObject.model_format_ops(self.ops) + @classmethod def check_properties(cls, model_props): # this is for pre-screening properties for validity in model creation routines @@ -325,7 +338,6 @@ def add_op_tokens(self): if len(self.ops) > self.max_token_length: raise Exception("op tokens cannot exceed max length of" + self.max_token_length + "(" + self.state_path + "). ") self.op_tokens[self.ix] = self.format_ops() - #self.op_tokens[self.ix] = np.asarray(self.ops, dtype="i8") def step(self, step): # this tests the model for a single timestep. diff --git a/HSP2/state.py b/HSP2/state.py index 34160c2b..9c6040c9 100644 --- a/HSP2/state.py +++ b/HSP2/state.py @@ -18,6 +18,7 @@ def init_state_dicts(): state = {} # shared state Dictionary, contains numba-ready Dicts state_paths = Dict.empty(key_type=types.unicode_type, value_type=types.int64) state_ix = Dict.empty(key_type=types.int64, value_type=types.float64) + state_ix = Dict.empty(key_type=types.int64, value_type=types.float64) dict_ix = Dict.empty(key_type=types.int64, value_type=types.float64[:,:]) ts_ix = Dict.empty(key_type=types.int64, value_type=types.float64[:]) # initialize state for hydr diff --git a/tests/testcbp/HSP2results/benchmark.py b/tests/testcbp/HSP2results/benchmark.py index b560d893..79387fba 100644 --- a/tests/testcbp/HSP2results/benchmark.py +++ b/tests/testcbp/HSP2results/benchmark.py @@ -34,7 +34,7 @@ speca = SpecialAction('specl' + str(k), facility, {'OPTYP': 'RCHRES', 'RANGE1': 1, 'RANGE2':'', 'AC':'+=', 'VARI':'IVOL', 'VALUE':10.0, 'YR':'2000', 'DA':'1', 'MO':'1', 'HR':'1','MN':''}) # adjust op_tokens length to insure capacity -op_tokens = ModelObject.make_op_tokens(len(model_object_cache)) +op_tokens = ModelObject.op_tokens = ModelObject.make_op_tokens(max(ModelObject.state_ix.keys()) + 1) model_loader_recursive(model_data, model_root_object) # Parse, load and order all objects model_path_loader(ModelObject.model_object_cache) @@ -43,7 +43,7 @@ # put all objects in token form for fast runtime execution and sort according to dependency order print("Tokenizing models") model_tokenizer_recursive(model_root_object, ModelObject.model_object_cache, model_exec_list, model_touch_list ) - +op_tokens = ModelObject.op_tokens model_exec_list = np.asarray(model_exec_list, dtype="i8") # the resulting set of objects is returned. state['model_object_cache'] = ModelObject.model_object_cache @@ -55,5 +55,17 @@ start = time.time() iterate_models(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, siminfo['steps'], -1) end = time.time() -print(len(model_exec_list), "components iterated over", siminfo['steps'], "time steps took" , end - start, "seconds") +print(len(model_exec_list), "components iterated over state_ix", siminfo['steps'], "time steps took" , end - start, "seconds") + + +# test with np.array state +#np_state_ix = np.asarray(list(state_ix.values()), dtype="float32") +np_state_ix = zeros(max(state_ix.keys()) + 1, dtype="float32") +for ix, iv in state_ix.items(): + np_state_ix[ix] = iv + +start = time.time() +iterate_models(model_exec_list, op_tokens, np_state_ix, dict_ix, ts_ix, siminfo['steps'], -1) +end = time.time() +print(len(model_exec_list), "components iterated over np_state_ix", siminfo['steps'], "time steps took" , end - start, "seconds") From 0dd9b8139259cdb05b199f32b02c4d2c434495b9 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Wed, 21 Feb 2024 09:27:02 -0500 Subject: [PATCH 26/35] make state float64 instead of float32 --- HSP2/om.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HSP2/om.py b/HSP2/om.py index 6a817bba..c2d72f2c 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -165,7 +165,7 @@ def state_om_model_run_prep(state, io_manager, siminfo): state['model_object_cache'] = model_object_cache state['model_exec_list'] = np.asarray(model_exec_list, dtype="i8") if ModelObject.ops_data_type == 'ndarray': - state['state_ix'] = np.asarray(list(state['state_ix'].values()), dtype="float32") + state['state_ix'] = np.asarray(list(state['state_ix'].values()), dtype="float64") state['op_tokens'] = op_tokens if len(op_tokens) > 0: state['state_step_om'] = 'enabled' From aa58d28fa7ab02a73bf8a19e84b2ae9bc8819ada Mon Sep 17 00:00:00 2001 From: Paul Duda Date: Wed, 3 Apr 2024 11:51:12 -0400 Subject: [PATCH 27/35] Create CONTRIBUTING.md --- CONTRIBUTING.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..8e917170 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,7 @@ +

Contribution Guidelines:

+The HSPsquared team appreciates your interest as we work to build a community of collaborators! + +

Pull Requests:

+
    +
  • Please submit pull requests into the Develop branch.
  • +
  • If your pull request relates to an Issue, please be sure to reference the issue in the description.
  • From 4a7b9416726691340cb14659482bb4a74169c434 Mon Sep 17 00:00:00 2001 From: Paul Duda Date: Wed, 3 Apr 2024 12:25:24 -0400 Subject: [PATCH 28/35] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 55c26951..feea1990 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ The **[Hydrologic Simulation Program–Python (HSP2)](https://github.com/respec/HSPsquared)** watershed model is is a port of the well-established [Hydrological Simulation Program - FORTRAN (HSPF)](https://www.epa.gov/ceam/hydrological-simulation-program-fortran-hspf), re-coded with modern scientific Python and data formats. -HSP2 (pronouced "HSP-squared") is being developed by an open source team launched and led by RESPEC with internal funding and now in collaboration with LimnoTech and with additional support from the U.S. Army Corps of Engineers, Engineer Research and Development Center (ERDC), Environmental Laboratory. +HSP2 (pronouced "HSP-squared") is being developed by an open source team launched and led by RESPEC with internal funding. Our list of collaborators is growing, now including LimnoTech and with additional support from the U.S. Army Corps of Engineers (Engineer Research and Development Center (ERDC), Environmental Laboratory), modelers at the Virginia Department of Environmental Quality, and others. HSP2 currently supports all HSPF hydrology and detailed water quality modules. Support for specialty modules is currently in progress. See our [Release Notes](https://github.com/respec/HSPsquared/releases) for up-to-date details. From 5d77ac69ebcb22c6da3e52b5416ef7967b755ff6 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Fri, 12 Apr 2024 15:19:47 -0400 Subject: [PATCH 29/35] domain specific dependency endpoint support functions and test implementation in SEDTRN --- .gitignore | 147 +- HSP2/HYDR.py | 1411 ++++++------ HSP2/SEDTRN.py | 2018 +++++++++-------- HSP2/SPECL.py | 121 +- HSP2/main.py | 908 ++++---- HSP2/om.py | 1235 +++++----- HSP2/om_model_linkage.py | 271 +-- HSP2/om_model_object.py | 835 +++---- HSP2/om_special_action.py | 344 +-- HSP2/state.py | 476 ++-- tests/test10/HSP2results/test10.uci | 1996 ++++++++-------- .../HSP2results/check_depends_endpoint.py | 75 + 12 files changed, 4989 insertions(+), 4848 deletions(-) create mode 100644 tests/testcbp/HSP2results/check_depends_endpoint.py diff --git a/.gitignore b/.gitignore index f177d0cd..c67b3a04 100644 --- a/.gitignore +++ b/.gitignore @@ -1,72 +1,75 @@ -# Editors & IDEs -.vscode -.vs -.idea - -# Jupyter Notebook -.ipynb_checkpoints - -# Distribution / packaging -build -build/* -_build -_build/* -.bzr -develop-eggs -dist -dist/* -downloads -*.egg -*.EGG -*.egg-info -eggs -fake-eggs -.installed.cfg -.mr.developer.cfg -parts - -# Byte-compiled / optimized / DLL files -*.pyc -__pycache__ -__pycache__/* -*.pyo -.svn - -# Unit test / coverage reports -htmlcov -.tox -.tox/* -.coverage -.coverage/* -*.ech -*.log -*.hbnhead -*.units.dbf - -# Translations -*.mo - -# Stashes, etc. -stats.dat -.ropeproject -.hg -*.tmp* -tests/_LargeFileStash - -# Temporary files -_TutorialData/ - -# Specific files -run.py -tests/GLWACSO/HSP2results/hspp007.hdf -tests/GLWACSO/HSPFresults/hspf006.HBN -tests/GLWACSO/HSP2results/hspp007.uci -tests/test_report_conversion.html -tests/land_spec/hwmA51800.h5 -tests/testcbp/HSP2results/PL3_5250_0001.h5 -tests/testcbp/HSP2results/PL3_5250_specl.h5 -tests/testcbp/HSP2results/*.csv - -# R files -.Rdata -.Rhistory +# Editors & IDEs +.vscode +.vs +.idea + +# Jupyter Notebook +.ipynb_checkpoints + +# Distribution / packaging +build +build/* +_build +_build/* +.bzr +develop-eggs +dist +dist/* +downloads +*.egg +*.EGG +*.egg-info +eggs +fake-eggs +.installed.cfg +.mr.developer.cfg +parts + +# Byte-compiled / optimized / DLL files +*.pyc +__pycache__ +__pycache__/* +*.pyo +.svn + +# Unit test / coverage reports +htmlcov +.tox +.tox/* +.coverage +.coverage/* +*.ech +*.log +*.hbnhead +*.units.dbf + +# Translations +*.mo + +# Stashes, etc. +stats.dat +.ropeproject +.hg +*.tmp* +tests/_LargeFileStash + +# Temporary files +_TutorialData/ + + +# Specific files +run.py +tests/GLWACSO/HSP2results/hspp007.hdf +tests/GLWACSO/HSPFresults/hspf006.HBN +tests/GLWACSO/HSP2results/hspp007.uci +tests/test_report_conversion.html + +# Omit big files +tests/land_spec/hwmA51800.h5 +tests/testcbp/HSP2results/PL3_5250_0001.h5 +tests/testcbp/HSP2results/*.csv +tests/test10/HSP2results/test10.h5 + +# R files +.Rdata +.Rhistory diff --git a/HSP2/HYDR.py b/HSP2/HYDR.py index fb8ed373..c714407b 100644 --- a/HSP2/HYDR.py +++ b/HSP2/HYDR.py @@ -1,707 +1,706 @@ -''' Copyright (c) 2020 by RESPEC, INC. -Author: Robert Heaphy, Ph.D. -License: LGPL2 -Conversion of no category version of HSPF HRCHHYD.FOR into Python''' - - -''' Development Notes: - Categories not implimented in this version - Irregation only partially implimented in this version - Only English units currently supported - FTABLE can come from WDM or UCI file based on FTBDSN 1 or 0 -''' - - -from numpy import zeros, any, full, nan, array, int64, arange -from pandas import DataFrame -from math import sqrt, log10 -from numba import njit -from numba.typed import List -from HSP2.utilities import initm, make_numba_dict - -# the following imports added by rb to handle dynamic code and special actions -from HSP2.state import * -from HSP2.SPECL import specl -from HSP2.om import * -from HSP2.om_model_object import * -from HSP2.om_sim_timer import * -from HSP2.om_special_action import * -#from HSP2.om_equation import * -from HSP2.om_model_linkage import * -#from HSP2.om_data_matrix import * -#from HSP2.om_model_broadcast import * - - -ERRMSGS =('HYDR: SOLVE equations are indeterminate', #ERRMSG0 - 'HYDR: extrapolation of rchtab will take place', #ERRMSG1 - 'HYDR: SOLVE trapped with an oscillating condition', #ERRMSG2 - 'HYDR: Solve did not converge', #ERRMSG3 - 'HYDR: Solve converged to point outside valid range') #ERRMSG4 - -TOLERANCE = 0.001 # newton method max loops -MAXLOOPS = 100 # newton method exit tolerance - - -def hydr(io_manager, siminfo, uci, ts, ftables, state): - ''' find the state of the reach/reservoir at the end of the time interval - and the outflows during the interval - - CALL: hydr(store, general, ui, ts, state) - store is the Pandas/PyTable open store - general is a dictionary with simulation level infor (OP_SEQUENCE for example) - ui is a dictionary with RID specific HSPF UCI like data - ts is a dictionary with RID specific timeseries - state is a dictionary that contains all dynamic code dictionaries such as: - - specactions is a dictionary with all special actions - ''' - - steps = siminfo['steps'] # number of simulation points - uunits = siminfo['units'] - nexits = int(uci['PARAMETERS']['NEXITS']) - - # units conversion constants, 1 ACRE is 43560 sq ft. assumes input in acre-ft - VFACT = 43560.0 - AFACT = 43560.0 - if uunits == 2: - # si units conversion constants, 1 hectare is 10000 sq m, assumes area input in hectares, vol in Mm3 - VFACT = 1.0e6 - AFACT = 10000.0 - - u = uci['PARAMETERS'] - funct = array([u[name] for name in u.keys() if name.startswith('FUNCT')]).astype(int)[0:nexits] - ODGTF = array([u[name] for name in u.keys() if name.startswith('ODGTF')]).astype(int)[0:nexits] - ODFVF = array([u[name] for name in u.keys() if name.startswith('ODFVF')]).astype(int)[0:nexits] - - u = uci['STATES'] - colin = array([u[name] for name in u.keys() if name.startswith('COLIN')]).astype(float)[0:nexits] - outdg = array([u[name] for name in u.keys() if name.startswith('OUTDG')]).astype(float)[0:nexits] - - # COLIND timeseries might come in as COLIND, COLIND0, etc. otherwise UCI default - names = list(sorted([n for n in ts if n.startswith('COLIND')], reverse=True)) - df = DataFrame() - for i,c in enumerate(ODFVF): - df[i] = ts[names.pop()][0:steps] if c < 0 else full(steps, c) - COLIND = df.to_numpy() - - # OUTDGT timeseries might come in as OUTDGT, OUTDGT0, etc. otherwise UCI default - names = list(sorted([n for n in ts if n.startswith('OUTDG')], reverse=True)) - df = DataFrame() - for i,c in enumerate(ODGTF): - df[i] = ts[names.pop()][0:steps] if c > 0 else zeros(steps) - OUTDGT = df.to_numpy() - - # generic SAVE table doesn't know nexits for output flows and rates - if nexits > 1: - u = uci['SAVE'] - for key in ('O', 'OVOL'): - for i in range(nexits): - u[f'{key}{i+1}'] = u[key] - del u[key] - - # optional - defined, but can't used accidently - for name in ('SOLRAD','CLOUD','DEWTEMP','GATMP','WIND'): - if name not in ts: - ts[name] = full(steps, nan) - - # optional timeseries - for name in ('IVOL','POTEV','PREC'): - if name not in ts: - ts[name] = zeros(steps) - ts['CONVF'] = initm(siminfo, uci, 'VCONFG', 'MONTHLY_CONVF', 1.0) - - # extract key columns of specified FTable for faster access (1d vs. 2d) - rchtab = ftables[f"{uci['PARAMETERS']['FTBUCI']}"] - #rchtab = store[f"FTABLES/{uci['PARAMETERS']['FTBUCI']}"] - ts['volumeFT'] = rchtab['Volume'].to_numpy() * VFACT - ts['depthFT'] = rchtab['Depth'].to_numpy() - ts['sareaFT'] = rchtab['Area'].to_numpy() * AFACT - rchtab = rchtab.to_numpy() - - ui = make_numba_dict(uci) # Note: all values coverted to float automatically - ui['steps'] = steps - ui['delt'] = siminfo['delt'] - ui['nexits'] = nexits - ui['errlen'] = len(ERRMSGS) - ui['nrows'] = rchtab.shape[0] - ui['nodfv'] = any(ODFVF) - ui['uunits'] = uunits - - # Numba can't do 'O' + str(i) stuff yet, so do it here. Also need new style lists - Olabels = List() - OVOLlabels = List() - for i in range(nexits): - Olabels.append(f'O{i+1}') - OVOLlabels.append(f'OVOL{i+1}') - - ####################################################################################### - # the following section (1 of 3) added to HYDR by rb to handle dynamic code and special actions - ####################################################################################### - # state_info is some generic things about the simulation - # must be numba safe, so we don't just pass the whole state which is not - state_info = Dict.empty(key_type=types.unicode_type, value_type=types.unicode_type) - state_info['operation'], state_info['segment'], state_info['activity'] = state['operation'], state['segment'], state['activity'] - state_info['domain'], state_info['state_step_hydr'], state_info['state_step_om'] = state['domain'], state['state_step_hydr'], state['state_step_om'] - hsp2_local_py = state['hsp2_local_py'] - # It appears necessary to load this here, instead of from main.py, otherwise, - # _hydr_() does not recognize the function state_step_hydr()? - if (hsp2_local_py != False): - from hsp2_local_py import state_step_hydr - else: - from HSP2.state_fn_defaults import state_step_hydr - # must split dicts out of state Dict since numba cannot handle mixed-type nested Dicts - state_ix, dict_ix, ts_ix = state['state_ix'], state['dict_ix'], state['ts_ix'] - state_paths = state['state_paths'] - model_exec_list = state['model_exec_list'] # order of special actions and other dynamic ops - # initialize the hydr paths in case they don't already reside here - hydr_init_ix(state_ix, state_paths, state['domain']) - op_tokens = state['op_tokens'] - print("state_ix is type", type(state_ix)) - ####################################################################################### - - # Do the simulation with _hydr_ (ie run reaches simulation code) - errors = _hydr_(ui, ts, COLIND, OUTDGT, rchtab, funct, Olabels, OVOLlabels, - state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens, model_exec_list) - - if 'O' in ts: del ts['O'] - if 'OVOL' in ts: del ts['OVOL'] - - # save initial outflow(s) from reach: - uci['PARAMETERS']['ROS'] = ui['ROS'] - for i in range(nexits): - uci['PARAMETERS']['OS'+str(i+1)] = ui['OS'+str(i+1)] - - return errors, ERRMSGS - - -@njit(cache=True) -def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens, model_exec_list): - errors = zeros(int(ui['errlen'])).astype(int64) - - steps = int(ui['steps']) # number of simulation steps - delts = ui['delt'] * 60.0 # seconds in simulation interval - uunits = ui['uunits'] - nrows = int(ui['nrows']) - nexits = int(ui['nexits']) - AUX1FG = int(ui['AUX1FG']) # True means DEP, SAREA will be computed - AUX2FG = int(ui['AUX2FG']) - AUX3FG = int(ui['AUX3FG']) - LKFG = int(ui['LKFG']) # flag, 1:lake, 0:stream - length = ui['LEN'] * 5280.0 # length of reach, in feet - if uunits == 2: - length = ui['LEN'] * 1000.0 # length of reach, in meters - DB50 = ui['DB50'] / 12.0 # mean diameter of bed material - if uunits == 2: - DB50 = ui['DB50'] / 40.0 # mean diameter of bed material - DELTH = ui['DELTH'] - stcor = ui['STCOR'] - - # units conversion constants, 1 ACRE is 43560 sq ft. assumes input in acre-ft - VFACT = 43560.0 - AFACT = 43560.0 - LFACTA = 1.0 - SFACTA = 1.0 - TFACTA = 1.0 - # physical constants (English units) - GAM = 62.4 # density of water - GRAV = 32.2 # gravitational acceleration - AKAPPA = 0.4 # von karmen constant - if uunits == 2: - # si units conversion constants, 1 hectare is 10000 sq m, assumes area input in hectares, vol in Mm3 - VFACT = 1.0e6 - AFACT = 10000.0 - # physical constants (English units) - GAM = 9806. # density of water - GRAV = 9.81 # gravitational acceleration - - volumeFT = ts['volumeFT'] - depthFT = ts['depthFT'] - sareaFT = ts['sareaFT'] - - nodfv = ui['nodfv'] - ks = ui['KS'] - coks = 1 - ks - facta1 = 1.0 / (coks * delts) - - # MAIN loop Initialization - IVOL = ts['IVOL'] * VFACT # or sum civol, zeros if no inflow ??? - POTEV = ts['POTEV'] / 12.0 - PREC = ts['PREC'] / 12.0 - CONVF = ts['CONVF'] - convf = CONVF[0] - - # faster to preallocate arrays - like MATLAB) - o = zeros(nexits) - odz = zeros(nexits) - ovol = zeros(nexits) - oseff = zeros(nexits) - od1 = zeros(nexits) - od2 = zeros(nexits) - outdgt = zeros(nexits) - colind = zeros(nexits) - - outdgt[:] = OUTDGT[0,:] - colind[:] = COLIND[0,:] - - # numba limitation, ts can't have both 1-d and 2-d arrays in save Dict - O = zeros((steps, nexits)) - OVOL = zeros((steps, nexits)) - - ts['PRSUPY'] = PRSUPY = zeros(steps) - ts['RO'] = RO = zeros(steps) - ts['ROVOL'] = ROVOL = zeros(steps) - ts['VOL'] = VOL = zeros(steps) - ts['VOLEV'] = VOLEV = zeros(steps) - ts['IRRDEM'] = IRRDEM = zeros(steps) - if AUX1FG: - ts['DEP'] = DEP = zeros(steps) - ts['SAREA'] = SAREA = zeros(steps) - ts['USTAR'] = USTAR = zeros(steps) - ts['TAU'] = TAU = zeros(steps) - ts['AVDEP'] = AVDEP = zeros(steps) - ts['AVVEL'] = AVVEL = zeros(steps) - ts['HRAD'] = HRAD = zeros(steps) - ts['TWID'] = TWID = zeros(steps) - - zeroindex = fndrow(0.0, volumeFT) #$1126-1127 - topvolume = volumeFT[-1] - - vol = ui['VOL'] * VFACT # hydr-init, initial volume of water - if vol >= topvolume: - errors[1] += 1 # ERRMSG1: extrapolation of rchtab will take place - - # find row index that brackets the VOL - indx = fndrow(vol, volumeFT) - if nodfv: # simple interpolation, the hard way!! - v1 = volumeFT[indx] - v2 = volumeFT[indx+1] - rod1,od1[:] = demand(v1, rowsFT[indx, :], funct, nexits, delts, convf, colind, outdgt) - rod2,od2[:] = demand(v2, rowsFT[indx+1,:], funct, nexits, delts, convf, colind, outdgt) - a1 = (v2 - vol) / (v2 - v1) - o[:] = a1 * od1[:] + (1.0 - a1) * od2[:] - ro = (a1 * rod1) + ((1.0 - a1) * rod2) - else: - ro,o[:] = demand(vol, rowsFT[indx,:], funct, nexits, delts, convf, colind, outdgt) #$1159-1160 - - # back to PHYDR - if AUX1FG >= 1: - dep, stage, sarea, avdep, twid, hrad = auxil(volumeFT, depthFT, sareaFT, indx, vol, length, stcor, AUX1FG, errors) # initial - - # hydr-irrig - irexit = int(ui['IREXIT']) -1 # irexit - exit number for irrigation withdrawals, 0 based ??? - #if irexit >= 1: - irminv = ui['IRMINV'] - rirwdl = 0.0 - #rirdem = 0.0 - #rirsht = 0.0 - irrdem = 0.0 - - # store initial outflow from reach: - ui['ROS'] = ro - for index in range(nexits): - ui['OS' + str(index + 1)] = o[index] - - # other initial vars - rovol = 0.0 - volev = 0.0 - IVOL0 = ts['IVOL'] # the actual inflow in simulation native units - - ####################################################################################### - # the following section (2 of 3) added by rb to HYDR, this one to prepare for dynamic state including special actions - ####################################################################################### - hydr_ix = hydr_get_ix(state_ix, state_paths, state_info['domain']) - # these are integer placeholders faster than calling the array look each timestep - o1_ix, o2_ix, o3_ix, ivol_ix = hydr_ix['O1'], hydr_ix['O2'], hydr_ix['O3'], hydr_ix['IVOL'] - ro_ix, rovol_ix, volev_ix, vol_ix = hydr_ix['RO'], hydr_ix['ROVOL'], hydr_ix['VOLEV'], hydr_ix['VOL'] - # handle varying length outdgt - out_ix = arange(nexits) - if nexits > 0: - out_ix[0] = o1_ix - if nexits > 1: - out_ix[1] = o2_ix - if nexits > 2: - out_ix[2] = o3_ix - ####################################################################################### - - # HYDR (except where noted) - for step in range(steps): - convf = CONVF[step] - outdgt[:] = OUTDGT[step, :] - colind[:] = COLIND[step, :] - roseff = ro - oseff[:] = o[:] - - ####################################################################################### - # the following section (3 of 3) added by rb to accommodate dynamic code, operations models, and special actions - ####################################################################################### - # set state_ix with value of local state variables and/or needed vars - # Note: we pass IVOL0, not IVOL here since IVOL has been converted to different units - state_ix[ro_ix], state_ix[rovol_ix] = ro, rovol - di = 0 - for oi in range(nexits): - state_ix[out_ix[oi]] = outdgt[oi] - state_ix[vol_ix], state_ix[ivol_ix] = vol, IVOL0[step] - state_ix[volev_ix] = volev - # - these if statements may be irrelevant if default functions simply return - # when no objects are defined. - if (state_info['state_step_om'] == 'enabled'): - pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) - if (state_info['state_step_hydr'] == 'enabled'): - state_step_hydr(state_info, state_paths, state_ix, dict_ix, ts_ix, hydr_ix, step) - if (state_info['state_step_om'] == 'enabled'): - #print("trying to execute state_step_om()") - # model_exec_list contains the model exec list in dependency order - # now these are all executed at once, but we need to make them only for domain end points - step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) # traditional 'ACTIONS' done in here - if ( (state_info['state_step_hydr'] == 'enabled') - or (state_info['state_step_om'] == 'enabled') ): - # Do write-backs for editable STATE variables - # OUTDGT is writeable - for oi in range(nexits): - outdgt[oi] = state_ix[out_ix[oi]] - # IVOL is writeable. - # Note: we must convert IVOL to the units expected in _hydr_ - # maybe routines should do this, and this is not needed (but pass VFACT in state) - IVOL[step] = state_ix[ivol_ix] * VFACT - # End dynamic code step() - ####################################################################################### - - # vols, sas variables and their initializations not needed. - if irexit >= 0: # irrigation exit is set, zero based number - if rirwdl > 0.0: # equivalent to OVOL for the irrigation exit - vol = irminv if irminv > vol - rirwdl else vol - rirwdl - if vol >= volumeFT[-1]: - errors[1] += 1 # ERRMSG1: extrapolation of rchtab will take place - - # DISCH with hydrologic routing - indx = fndrow(vol, volumeFT) # find row index that brackets the VOL - vv1 = volumeFT[indx] - rod1,od1[:] = demand(vv1, rowsFT[indx, :], funct, nexits, delts, convf, colind, outdgt) - vv2 = volumeFT[indx+1] - rod2,od2[:] = demand(vv2, rowsFT[indx+1,:], funct, nexits, delts, convf, colind, outdgt) - aa1 = (vv2 - vol) / (vv2 - vv1) - ro = (aa1 * rod1) + ((1.0 - aa1) * rod2) - o[:] = (aa1 * od1[:]) + ((1.0 - aa1) * od2[:]) - - # back to HYDR - if AUX1FG >= 1: # recompute surface area and depth - dep, stage, sarea, avdep, twid, hrad = auxil(volumeFT, depthFT, sareaFT, indx, vol, length, stcor, - AUX1FG, errors) - else: - irrdem = 0.0 - #o[irexit] = 0.0 #???? not used anywhere, check if o[irexit] - - prsupy = PREC[step] * sarea - if uunits == 2: - prsupy = PREC[step] * sarea / 3.281 - volt = vol + IVOL[step] + prsupy - volev = 0.0 - if AUX1FG: # subtract evaporation - volpev = POTEV[step] * sarea - if uunits == 2: - volpev = POTEV[step] * sarea / 3.281 - if volev >= volt: - volev = volt - volt = 0.0 - else: - volev = volpev - volt -= volev - - # ROUTE/NOROUT calls - # common code - volint = volt - (ks * roseff * delts) # find intercept of eq 4 on vol axis - if volint < (volt * 1.0e-5): - volint = 0.0 - if volint <= 0.0: # case 3 -- no solution to simultaneous equations - indx = zeroindex - vol = 0.0 - ro = 0.0 - o[:] = 0.0 - rovol = volt - - if roseff > 0.0: # numba limitation, cant combine into one line - ovol[:] = (rovol/roseff) * oseff[:] - else: - ovol[:] = rovol / nexits - - else: # case 1 or 2 - oint = volint * facta1 # == ointsp, so ointsp variable dropped - if nodfv: - # ROUTE - rodz,odz[:] = demand(0.0, rowsFT[zeroindex,:], funct, nexits, delts, convf, colind, outdgt) - if oint > rodz: - # SOLVE - case 1-- outflow demands can be met in full - # premov will be used to check whether we are in a trap, arbitrary value - premov = -20 - move = 10 - - vv1 = volumeFT[indx] - rod1,od1[:] = demand(vv1, rowsFT[indx, :], funct, nexits, delts, convf,colind, outdgt) - vv2 = volumeFT[indx+1] - rod2,od2[:] = demand(vv2, rowsFT[indx+1,:], funct, nexits, delts, convf, colind, outdgt) - - while move != 0: - facta2 = rod1 - rod2 - factb2 = vv2 - vv1 - factc2 = vv2 * rod1 - vv1 * rod2 - det = facta1 * factb2 - facta2 - if det <= 0.0: - det = 0.0001 - errors[0] += 1 # ERRMSG0: SOLVE is indeterminate - - vol = max(0.0, (oint * factb2 - factc2 ) / det) - if vol > vv2: - if indx >= nrows-2: - if vol > topvolume: - errors[1] += 1 # ERRMSG1: extrapolation of rchtab will take place - move = 0 - else: - move = 1 - indx += 1 - vv1 = vv2 - od1[:] = od2[:] - rod1 = rod2 - vv2 = volumeFT[indx+1] - rod2,od2[:] = demand(vv2, rowsFT[indx+1,:], funct, nexits, delts, convf, colind, outdgt) - elif vol < vv1: - indx -= 1 - move = -1 - vv2 = vv1 - od2[:] = od1[:] - rod2 = rod1 - vv1 = volumeFT[indx] - rod1,od1[:] = demand(vv1, rowsFT[indx,:], funct, nexits, delts, convf, colind, outdgt) - else: - move = 0 - - # check whether algorithm is in a trap, yo-yoing back and forth - if move + premov == 0: - errors[2] += 1 # ERRMSG2: oscillating trap - move = 0 - premov = move - - ro = oint - facta1 * vol - if vol < 1.0e-5: - ro = oint - vol = 0.0 - if ro < 1.0e-10: - ro = 0.0 - if ro <= 0.0: - o[:] = 0.0 - else: - diff = vol - vv1 - factr = 0.0 if diff < 0.01 else diff / (vv2 - vv1) - o[:] = od1[:] + (od2[:] - od1[:]) * factr - else: - # case 2 -- outflow demands cannot be met in full - ro = 0.0 - for i in range(nexits): - tro = ro + odz[i] - if tro <= oint: - o[i] = odz[i] - ro = tro - else: - o[i] = oint - ro - ro = oint - vol = 0.0 - indx = zeroindex - else: - # NOROUT - rod1,od1[:] = demand(vol, rowsFT[indx,:], funct, nexits, delts, convf, colind, outdgt) - if oint >= rod1: #case 1 -outflow demands are met in full - ro = rod1 - vol = volint - coks * ro * delts - if vol < 1.0e-5: - vol = 0.0 - o[:] = od1[:] - else: # case 2 -outflow demands cannot be met in full - ro = 0.0 - for i in range(nexits): - tro = ro + odz[i] - if tro <= oint: - o[i] = odz[i] - ro = tro - else: - o[i] = oint - ro - ro = oint - vol = 0.0 - indx = zeroindex - - # common ROUTE/NOROUT code - # an irrigation demand was made before routing - if (irexit >= 0) and (irrdem > 0.0): # an irrigation demand was made before routing - oseff[irexit] = irrdem - o[irexit] = irrdem - roseff += irrdem - ro += irrdem - IRRDEM[step] = irrdem - - # estimate the volumes of outflow - ovol[:] = (ks * oseff[:] + coks * o[:]) * delts - rovol = (ks * roseff + coks * ro) * delts - - # HYDR - if nexits > 1: - O[step,:] = o[:] * SFACTA * LFACTA - OVOL[step,:] = ovol[:] / VFACT - PRSUPY[step] = prsupy / VFACT - RO[step] = ro * SFACTA * LFACTA - ROVOL[step] = rovol / VFACT - VOLEV[step] = volev / VFACT - VOL[step] = vol / VFACT - - if AUX1FG: # compute final depth, surface area - if vol >= topvolume: - errors[1] += 1 # ERRMSG1: extrapolation of rchtab - indx = fndrow(vol, volumeFT) - dep, stage, sarea, avdep, twid, hrad = auxil(volumeFT, depthFT, sareaFT, indx, vol, length, stcor, AUX1FG, errors) - DEP[step] = dep - SAREA[step] = sarea / AFACT - - if vol > 0.0 and sarea > 0.0: - twid = sarea / length - avdep = vol / sarea - elif AUX1FG == 2: - twid = sarea / length - avdep = 0.0 - else: - twid = 0.0 - avdep = 0.0 - - if AUX2FG: - avvel = (length * ro / vol) if vol > 0.0 else 0.0 - if AUX3FG: - if avdep > 0.0: - # SHEAR; ustar (bed shear velocity), tau (bed shear stress) - if LKFG: # flag, 1:lake, 0:stream - ustar = avvel / (17.66 + (log10(avdep / (96.5 * DB50))) * 2.3 / AKAPPA) - tau = GAM/GRAV * ustar**2 #3796 - else: - hrad = (avdep*twid)/(2.0*avdep + twid) # hydraulic radius, manual eq 41 - slope = DELTH / length - ustar = sqrt(GRAV * slope * hrad) - tau = (GAM * slope) * hrad - else: - ustar = 0.0 - tau = 0.0 - hrad = 0.0 - USTAR[step] = ustar * LFACTA - TAU[step] = tau * TFACTA - - AVDEP[step] = avdep - AVVEL[step] = avvel - HRAD[step] = hrad - TWID[step] = twid - # END MAIN LOOP - - # NUMBA limitation for ts, and saving to HDF5 file is in individual columns - if nexits > 1: - for i in range(nexits): - ts[Olabels[i]] = O[:,i] - ts[OVOLlabels[i]] = OVOL[:,i] - return errors - - -@njit(cache=True) -def fndrow(v, volFT): - ''' finds highest index in FTable volume column whose volume < v''' - for indx,vol in enumerate(volFT): - if v < vol: - return indx-1 - return len(volFT) - 2 - - -@njit(cache=True) -def demand(vol, rowFT, funct, nexits, delts, convf, colind, outdgt): - od = zeros(nexits) - for i in range(nexits): - col = colind[i] - icol = int(col) - if icol != 0: - diff = col - float(icol) - if diff >= 1.0e-6: - _od1 = rowFT[icol-1] - odfv = _od1 + diff * (_od1 - rowFT[icol]) * convf - else: - odfv = rowFT[icol-1] * convf - else: - odfv = 0.0 - odgt = outdgt[i] - - if odfv == 0.0 and odgt == 0.0: - od[i] = 0.0 - elif odfv != 0.0 and odgt == 0.0: - od[i] = odfv - elif odfv == 0.0 and odgt != 0.0: - od[i] = odgt - else: - if funct[i] == 1: od[i] = min(odfv,odgt) - elif funct[i] == 2: od[i] = max(odfv,odgt) - elif funct[i] == 3: od[i] = odfv + odgt - elif funct[i] == 4: od[i] = max(odfv, (vol - odgt) / delts) - return od.sum(), od - - -@njit(cache=True) -def auxil(volumeFT, depthFT, sareaFT, indx, vol, length, stcor, AUX1FG, errors): - '''Compute depth, stage, surface area, average depth, topwidth and hydraulic radius''' - if vol > 0.0: - sa1 = sareaFT[indx] - a = sareaFT[indx+1] - sa1 - b = 2.0 * sa1 - vol1 = volumeFT[indx] - c = -((vol - vol1) / (volumeFT[indx+1] - vol1)) * (b+a) - - rdep2 = 0.5 # initial guess for the Newton's method - for i in range(MAXLOOPS): - rdep1 = rdep2 - rdep2 = rdep1 - (a*rdep1**2 + b*rdep1 + c)/(2.0 * a * rdep1 + b) - if abs(rdep2-rdep1) < TOLERANCE: - break - else: - errors[3] += 1 # convergence failure error message - if rdep2 > 1.0 or rdep2 < 0.0: - errors[4] += 1 # converged outside valid range error message - - dep1 = depthFT[indx] - dep = dep1 + rdep2 * (depthFT[indx+1] - dep1) # manual eq (36) - sarea = sa1 + a * rdep2 - - avdep = vol / sarea # average depth calculation, manual eq (39) - twid = sarea / length # top-width calculation, manual eq (40) - hrad = (avdep * twid) / (2.0 * avdep + twid) # hydraulic radius, manual eq (41) - elif AUX1FG == 2: - dep = depthFT[indx] # removed in HSPF 12.4 - sarea = sareaFT[indx] - avdep = 0.0 - twid = sarea / length - hrad = 0.0 - else: - dep = 0.0 - sarea = 0.0 - avdep = 0.0 - twid = 0.0 - hrad = 0.0 - - stage = dep + stcor # stage calculation and output, manual eq (37) - - return dep, stage, sarea, avdep, twid, hrad - -def expand_HYDR_masslinks(flags, uci, dat, recs): - if flags['HYDR']: - # IVOL - rec = {} - rec['MFACTOR'] = dat.MFACTOR - rec['SGRPN'] = 'HYDR' - if dat.SGRPN == "ROFLOW": - rec['SMEMN'] = 'ROVOL' - else: - rec['SMEMN'] = 'OVOL' - rec['SMEMSB1'] = dat.SMEMSB1 - rec['SMEMSB2'] = dat.SMEMSB2 - rec['TMEMN'] = 'IVOL' - rec['TMEMSB1'] = dat.TMEMSB1 - rec['TMEMSB2'] = dat.TMEMSB2 - rec['SVOL'] = dat.SVOL - recs.append(rec) - return recs +''' Copyright (c) 2020 by RESPEC, INC. +Author: Robert Heaphy, Ph.D. +License: LGPL2 +Conversion of no category version of HSPF HRCHHYD.FOR into Python''' + + +''' Development Notes: + Categories not implimented in this version + Irregation only partially implimented in this version + Only English units currently supported + FTABLE can come from WDM or UCI file based on FTBDSN 1 or 0 +''' + + +from numpy import zeros, any, full, nan, array, int64, arange +from pandas import DataFrame +from math import sqrt, log10 +from numba import njit +from numba.typed import List +from HSP2.utilities import initm, make_numba_dict + +# the following imports added by rb to handle dynamic code and special actions +from HSP2.state import * +from HSP2.SPECL import specl +from HSP2.om import * +from HSP2.om_model_object import * +from HSP2.om_sim_timer import * +from HSP2.om_special_action import * +#from HSP2.om_equation import * +from HSP2.om_model_linkage import * +#from HSP2.om_data_matrix import * +#from HSP2.om_model_broadcast import * + + +ERRMSGS =('HYDR: SOLVE equations are indeterminate', #ERRMSG0 + 'HYDR: extrapolation of rchtab will take place', #ERRMSG1 + 'HYDR: SOLVE trapped with an oscillating condition', #ERRMSG2 + 'HYDR: Solve did not converge', #ERRMSG3 + 'HYDR: Solve converged to point outside valid range') #ERRMSG4 + +TOLERANCE = 0.001 # newton method max loops +MAXLOOPS = 100 # newton method exit tolerance + + +def hydr(io_manager, siminfo, uci, ts, ftables, state): + ''' find the state of the reach/reservoir at the end of the time interval + and the outflows during the interval + + CALL: hydr(store, general, ui, ts, state) + store is the Pandas/PyTable open store + general is a dictionary with simulation level infor (OP_SEQUENCE for example) + ui is a dictionary with RID specific HSPF UCI like data + ts is a dictionary with RID specific timeseries + state is a dictionary that contains all dynamic code dictionaries such as: + - specactions is a dictionary with all special actions + ''' + + steps = siminfo['steps'] # number of simulation points + uunits = siminfo['units'] + nexits = int(uci['PARAMETERS']['NEXITS']) + + # units conversion constants, 1 ACRE is 43560 sq ft. assumes input in acre-ft + VFACT = 43560.0 + AFACT = 43560.0 + if uunits == 2: + # si units conversion constants, 1 hectare is 10000 sq m, assumes area input in hectares, vol in Mm3 + VFACT = 1.0e6 + AFACT = 10000.0 + + u = uci['PARAMETERS'] + funct = array([u[name] for name in u.keys() if name.startswith('FUNCT')]).astype(int)[0:nexits] + ODGTF = array([u[name] for name in u.keys() if name.startswith('ODGTF')]).astype(int)[0:nexits] + ODFVF = array([u[name] for name in u.keys() if name.startswith('ODFVF')]).astype(int)[0:nexits] + + u = uci['STATES'] + colin = array([u[name] for name in u.keys() if name.startswith('COLIN')]).astype(float)[0:nexits] + outdg = array([u[name] for name in u.keys() if name.startswith('OUTDG')]).astype(float)[0:nexits] + + # COLIND timeseries might come in as COLIND, COLIND0, etc. otherwise UCI default + names = list(sorted([n for n in ts if n.startswith('COLIND')], reverse=True)) + df = DataFrame() + for i,c in enumerate(ODFVF): + df[i] = ts[names.pop()][0:steps] if c < 0 else full(steps, c) + COLIND = df.to_numpy() + + # OUTDGT timeseries might come in as OUTDGT, OUTDGT0, etc. otherwise UCI default + names = list(sorted([n for n in ts if n.startswith('OUTDG')], reverse=True)) + df = DataFrame() + for i,c in enumerate(ODGTF): + df[i] = ts[names.pop()][0:steps] if c > 0 else zeros(steps) + OUTDGT = df.to_numpy() + + # generic SAVE table doesn't know nexits for output flows and rates + if nexits > 1: + u = uci['SAVE'] + for key in ('O', 'OVOL'): + for i in range(nexits): + u[f'{key}{i+1}'] = u[key] + del u[key] + + # optional - defined, but can't used accidently + for name in ('SOLRAD','CLOUD','DEWTEMP','GATMP','WIND'): + if name not in ts: + ts[name] = full(steps, nan) + + # optional timeseries + for name in ('IVOL','POTEV','PREC'): + if name not in ts: + ts[name] = zeros(steps) + ts['CONVF'] = initm(siminfo, uci, 'VCONFG', 'MONTHLY_CONVF', 1.0) + + # extract key columns of specified FTable for faster access (1d vs. 2d) + rchtab = ftables[f"{uci['PARAMETERS']['FTBUCI']}"] + #rchtab = store[f"FTABLES/{uci['PARAMETERS']['FTBUCI']}"] + ts['volumeFT'] = rchtab['Volume'].to_numpy() * VFACT + ts['depthFT'] = rchtab['Depth'].to_numpy() + ts['sareaFT'] = rchtab['Area'].to_numpy() * AFACT + rchtab = rchtab.to_numpy() + + ui = make_numba_dict(uci) # Note: all values coverted to float automatically + ui['steps'] = steps + ui['delt'] = siminfo['delt'] + ui['nexits'] = nexits + ui['errlen'] = len(ERRMSGS) + ui['nrows'] = rchtab.shape[0] + ui['nodfv'] = any(ODFVF) + ui['uunits'] = uunits + + # Numba can't do 'O' + str(i) stuff yet, so do it here. Also need new style lists + Olabels = List() + OVOLlabels = List() + for i in range(nexits): + Olabels.append(f'O{i+1}') + OVOLlabels.append(f'OVOL{i+1}') + + ####################################################################################### + # the following section (1 of 3) added to HYDR by rb to handle dynamic code and special actions + ####################################################################################### + # state_info is some generic things about the simulation + # must be numba safe, so we don't just pass the whole state which is not + state_info = Dict.empty(key_type=types.unicode_type, value_type=types.unicode_type) + state_info['operation'], state_info['segment'], state_info['activity'] = state['operation'], state['segment'], state['activity'] + state_info['domain'], state_info['state_step_hydr'], state_info['state_step_om'] = state['domain'], state['state_step_hydr'], state['state_step_om'] + hsp2_local_py = state['hsp2_local_py'] + # It appears necessary to load this here, instead of from main.py, otherwise, + # _hydr_() does not recognize the function state_step_hydr()? + if (hsp2_local_py != False): + from hsp2_local_py import state_step_hydr + else: + from HSP2.state_fn_defaults import state_step_hydr + # initialize the hydr paths in case they don't already reside here + hydr_init_ix(state, state['domain']) + # must split dicts out of state Dict since numba cannot handle mixed-type nested Dicts + state_ix, dict_ix, ts_ix = state['state_ix'], state['dict_ix'], state['ts_ix'] + state_paths = state['state_paths'] + model_exec_list = state['model_exec_list'] # order of special actions and other dynamic ops + op_tokens = state['op_tokens'] + ####################################################################################### + + # Do the simulation with _hydr_ (ie run reaches simulation code) + errors = _hydr_(ui, ts, COLIND, OUTDGT, rchtab, funct, Olabels, OVOLlabels, + state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens, model_exec_list) + + if 'O' in ts: del ts['O'] + if 'OVOL' in ts: del ts['OVOL'] + + # save initial outflow(s) from reach: + uci['PARAMETERS']['ROS'] = ui['ROS'] + for i in range(nexits): + uci['PARAMETERS']['OS'+str(i+1)] = ui['OS'+str(i+1)] + + return errors, ERRMSGS + + +@njit +def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens, model_exec_list): + errors = zeros(int(ui['errlen'])).astype(int64) + + steps = int(ui['steps']) # number of simulation steps + delts = ui['delt'] * 60.0 # seconds in simulation interval + uunits = ui['uunits'] + nrows = int(ui['nrows']) + nexits = int(ui['nexits']) + AUX1FG = int(ui['AUX1FG']) # True means DEP, SAREA will be computed + AUX2FG = int(ui['AUX2FG']) + AUX3FG = int(ui['AUX3FG']) + LKFG = int(ui['LKFG']) # flag, 1:lake, 0:stream + length = ui['LEN'] * 5280.0 # length of reach, in feet + if uunits == 2: + length = ui['LEN'] * 1000.0 # length of reach, in meters + DB50 = ui['DB50'] / 12.0 # mean diameter of bed material + if uunits == 2: + DB50 = ui['DB50'] / 40.0 # mean diameter of bed material + DELTH = ui['DELTH'] + stcor = ui['STCOR'] + + # units conversion constants, 1 ACRE is 43560 sq ft. assumes input in acre-ft + VFACT = 43560.0 + AFACT = 43560.0 + LFACTA = 1.0 + SFACTA = 1.0 + TFACTA = 1.0 + # physical constants (English units) + GAM = 62.4 # density of water + GRAV = 32.2 # gravitational acceleration + AKAPPA = 0.4 # von karmen constant + if uunits == 2: + # si units conversion constants, 1 hectare is 10000 sq m, assumes area input in hectares, vol in Mm3 + VFACT = 1.0e6 + AFACT = 10000.0 + # physical constants (English units) + GAM = 9806. # density of water + GRAV = 9.81 # gravitational acceleration + + volumeFT = ts['volumeFT'] + depthFT = ts['depthFT'] + sareaFT = ts['sareaFT'] + + nodfv = ui['nodfv'] + ks = ui['KS'] + coks = 1 - ks + facta1 = 1.0 / (coks * delts) + + # MAIN loop Initialization + IVOL = ts['IVOL'] * VFACT # or sum civol, zeros if no inflow ??? + POTEV = ts['POTEV'] / 12.0 + PREC = ts['PREC'] / 12.0 + CONVF = ts['CONVF'] + convf = CONVF[0] + + # faster to preallocate arrays - like MATLAB) + o = zeros(nexits) + odz = zeros(nexits) + ovol = zeros(nexits) + oseff = zeros(nexits) + od1 = zeros(nexits) + od2 = zeros(nexits) + outdgt = zeros(nexits) + colind = zeros(nexits) + + outdgt[:] = OUTDGT[0,:] + colind[:] = COLIND[0,:] + + # numba limitation, ts can't have both 1-d and 2-d arrays in save Dict + O = zeros((steps, nexits)) + OVOL = zeros((steps, nexits)) + + ts['PRSUPY'] = PRSUPY = zeros(steps) + ts['RO'] = RO = zeros(steps) + ts['ROVOL'] = ROVOL = zeros(steps) + ts['VOL'] = VOL = zeros(steps) + ts['VOLEV'] = VOLEV = zeros(steps) + ts['IRRDEM'] = IRRDEM = zeros(steps) + if AUX1FG: + ts['DEP'] = DEP = zeros(steps) + ts['SAREA'] = SAREA = zeros(steps) + ts['USTAR'] = USTAR = zeros(steps) + ts['TAU'] = TAU = zeros(steps) + ts['AVDEP'] = AVDEP = zeros(steps) + ts['AVVEL'] = AVVEL = zeros(steps) + ts['HRAD'] = HRAD = zeros(steps) + ts['TWID'] = TWID = zeros(steps) + + zeroindex = fndrow(0.0, volumeFT) #$1126-1127 + topvolume = volumeFT[-1] + + vol = ui['VOL'] * VFACT # hydr-init, initial volume of water + if vol >= topvolume: + errors[1] += 1 # ERRMSG1: extrapolation of rchtab will take place + + # find row index that brackets the VOL + indx = fndrow(vol, volumeFT) + if nodfv: # simple interpolation, the hard way!! + v1 = volumeFT[indx] + v2 = volumeFT[indx+1] + rod1,od1[:] = demand(v1, rowsFT[indx, :], funct, nexits, delts, convf, colind, outdgt) + rod2,od2[:] = demand(v2, rowsFT[indx+1,:], funct, nexits, delts, convf, colind, outdgt) + a1 = (v2 - vol) / (v2 - v1) + o[:] = a1 * od1[:] + (1.0 - a1) * od2[:] + ro = (a1 * rod1) + ((1.0 - a1) * rod2) + else: + ro,o[:] = demand(vol, rowsFT[indx,:], funct, nexits, delts, convf, colind, outdgt) #$1159-1160 + + # back to PHYDR + if AUX1FG >= 1: + dep, stage, sarea, avdep, twid, hrad = auxil(volumeFT, depthFT, sareaFT, indx, vol, length, stcor, AUX1FG, errors) # initial + + # hydr-irrig + irexit = int(ui['IREXIT']) -1 # irexit - exit number for irrigation withdrawals, 0 based ??? + #if irexit >= 1: + irminv = ui['IRMINV'] + rirwdl = 0.0 + #rirdem = 0.0 + #rirsht = 0.0 + irrdem = 0.0 + + # store initial outflow from reach: + ui['ROS'] = ro + for index in range(nexits): + ui['OS' + str(index + 1)] = o[index] + + # other initial vars + rovol = 0.0 + volev = 0.0 + IVOL0 = ts['IVOL'] # the actual inflow in simulation native units + + ####################################################################################### + # the following section (2 of 3) added by rb to HYDR, this one to prepare for dynamic state including special actions + ####################################################################################### + hydr_ix = hydr_get_ix(state_ix, state_paths, state_info['domain']) + # these are integer placeholders faster than calling the array look each timestep + o1_ix, o2_ix, o3_ix, ivol_ix = hydr_ix['O1'], hydr_ix['O2'], hydr_ix['O3'], hydr_ix['IVOL'] + ro_ix, rovol_ix, volev_ix, vol_ix = hydr_ix['RO'], hydr_ix['ROVOL'], hydr_ix['VOLEV'], hydr_ix['VOL'] + # handle varying length outdgt + out_ix = arange(nexits) + if nexits > 0: + out_ix[0] = o1_ix + if nexits > 1: + out_ix[1] = o2_ix + if nexits > 2: + out_ix[2] = o3_ix + ####################################################################################### + + # HYDR (except where noted) + for step in range(steps): + convf = CONVF[step] + outdgt[:] = OUTDGT[step, :] + colind[:] = COLIND[step, :] + roseff = ro + oseff[:] = o[:] + + ####################################################################################### + # the following section (3 of 3) added by rb to accommodate dynamic code, operations models, and special actions + ####################################################################################### + # set state_ix with value of local state variables and/or needed vars + # Note: we pass IVOL0, not IVOL here since IVOL has been converted to different units + state_ix[ro_ix], state_ix[rovol_ix] = ro, rovol + di = 0 + for oi in range(nexits): + state_ix[out_ix[oi]] = outdgt[oi] + state_ix[vol_ix], state_ix[ivol_ix] = vol, IVOL0[step] + state_ix[volev_ix] = volev + # - these if statements may be irrelevant if default functions simply return + # when no objects are defined. + if (state_info['state_step_om'] == 'enabled'): + pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) + if (state_info['state_step_hydr'] == 'enabled'): + state_step_hydr(state_info, state_paths, state_ix, dict_ix, ts_ix, hydr_ix, step) + if (state_info['state_step_om'] == 'enabled'): + #print("trying to execute state_step_om()") + # model_exec_list contains the model exec list in dependency order + # now these are all executed at once, but we need to make them only for domain end points + step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) # traditional 'ACTIONS' done in here + if ( (state_info['state_step_hydr'] == 'enabled') + or (state_info['state_step_om'] == 'enabled') ): + # Do write-backs for editable STATE variables + # OUTDGT is writeable + for oi in range(nexits): + outdgt[oi] = state_ix[out_ix[oi]] + # IVOL is writeable. + # Note: we must convert IVOL to the units expected in _hydr_ + # maybe routines should do this, and this is not needed (but pass VFACT in state) + IVOL[step] = state_ix[ivol_ix] * VFACT + # End dynamic code step() + ####################################################################################### + + # vols, sas variables and their initializations not needed. + if irexit >= 0: # irrigation exit is set, zero based number + if rirwdl > 0.0: # equivalent to OVOL for the irrigation exit + vol = irminv if irminv > vol - rirwdl else vol - rirwdl + if vol >= volumeFT[-1]: + errors[1] += 1 # ERRMSG1: extrapolation of rchtab will take place + + # DISCH with hydrologic routing + indx = fndrow(vol, volumeFT) # find row index that brackets the VOL + vv1 = volumeFT[indx] + rod1,od1[:] = demand(vv1, rowsFT[indx, :], funct, nexits, delts, convf, colind, outdgt) + vv2 = volumeFT[indx+1] + rod2,od2[:] = demand(vv2, rowsFT[indx+1,:], funct, nexits, delts, convf, colind, outdgt) + aa1 = (vv2 - vol) / (vv2 - vv1) + ro = (aa1 * rod1) + ((1.0 - aa1) * rod2) + o[:] = (aa1 * od1[:]) + ((1.0 - aa1) * od2[:]) + + # back to HYDR + if AUX1FG >= 1: # recompute surface area and depth + dep, stage, sarea, avdep, twid, hrad = auxil(volumeFT, depthFT, sareaFT, indx, vol, length, stcor, + AUX1FG, errors) + else: + irrdem = 0.0 + #o[irexit] = 0.0 #???? not used anywhere, check if o[irexit] + + prsupy = PREC[step] * sarea + if uunits == 2: + prsupy = PREC[step] * sarea / 3.281 + volt = vol + IVOL[step] + prsupy + volev = 0.0 + if AUX1FG: # subtract evaporation + volpev = POTEV[step] * sarea + if uunits == 2: + volpev = POTEV[step] * sarea / 3.281 + if volev >= volt: + volev = volt + volt = 0.0 + else: + volev = volpev + volt -= volev + + # ROUTE/NOROUT calls + # common code + volint = volt - (ks * roseff * delts) # find intercept of eq 4 on vol axis + if volint < (volt * 1.0e-5): + volint = 0.0 + if volint <= 0.0: # case 3 -- no solution to simultaneous equations + indx = zeroindex + vol = 0.0 + ro = 0.0 + o[:] = 0.0 + rovol = volt + + if roseff > 0.0: # numba limitation, cant combine into one line + ovol[:] = (rovol/roseff) * oseff[:] + else: + ovol[:] = rovol / nexits + + else: # case 1 or 2 + oint = volint * facta1 # == ointsp, so ointsp variable dropped + if nodfv: + # ROUTE + rodz,odz[:] = demand(0.0, rowsFT[zeroindex,:], funct, nexits, delts, convf, colind, outdgt) + if oint > rodz: + # SOLVE - case 1-- outflow demands can be met in full + # premov will be used to check whether we are in a trap, arbitrary value + premov = -20 + move = 10 + + vv1 = volumeFT[indx] + rod1,od1[:] = demand(vv1, rowsFT[indx, :], funct, nexits, delts, convf,colind, outdgt) + vv2 = volumeFT[indx+1] + rod2,od2[:] = demand(vv2, rowsFT[indx+1,:], funct, nexits, delts, convf, colind, outdgt) + + while move != 0: + facta2 = rod1 - rod2 + factb2 = vv2 - vv1 + factc2 = vv2 * rod1 - vv1 * rod2 + det = facta1 * factb2 - facta2 + if det <= 0.0: + det = 0.0001 + errors[0] += 1 # ERRMSG0: SOLVE is indeterminate + + vol = max(0.0, (oint * factb2 - factc2 ) / det) + if vol > vv2: + if indx >= nrows-2: + if vol > topvolume: + errors[1] += 1 # ERRMSG1: extrapolation of rchtab will take place + move = 0 + else: + move = 1 + indx += 1 + vv1 = vv2 + od1[:] = od2[:] + rod1 = rod2 + vv2 = volumeFT[indx+1] + rod2,od2[:] = demand(vv2, rowsFT[indx+1,:], funct, nexits, delts, convf, colind, outdgt) + elif vol < vv1: + indx -= 1 + move = -1 + vv2 = vv1 + od2[:] = od1[:] + rod2 = rod1 + vv1 = volumeFT[indx] + rod1,od1[:] = demand(vv1, rowsFT[indx,:], funct, nexits, delts, convf, colind, outdgt) + else: + move = 0 + + # check whether algorithm is in a trap, yo-yoing back and forth + if move + premov == 0: + errors[2] += 1 # ERRMSG2: oscillating trap + move = 0 + premov = move + + ro = oint - facta1 * vol + if vol < 1.0e-5: + ro = oint + vol = 0.0 + if ro < 1.0e-10: + ro = 0.0 + if ro <= 0.0: + o[:] = 0.0 + else: + diff = vol - vv1 + factr = 0.0 if diff < 0.01 else diff / (vv2 - vv1) + o[:] = od1[:] + (od2[:] - od1[:]) * factr + else: + # case 2 -- outflow demands cannot be met in full + ro = 0.0 + for i in range(nexits): + tro = ro + odz[i] + if tro <= oint: + o[i] = odz[i] + ro = tro + else: + o[i] = oint - ro + ro = oint + vol = 0.0 + indx = zeroindex + else: + # NOROUT + rod1,od1[:] = demand(vol, rowsFT[indx,:], funct, nexits, delts, convf, colind, outdgt) + if oint >= rod1: #case 1 -outflow demands are met in full + ro = rod1 + vol = volint - coks * ro * delts + if vol < 1.0e-5: + vol = 0.0 + o[:] = od1[:] + else: # case 2 -outflow demands cannot be met in full + ro = 0.0 + for i in range(nexits): + tro = ro + odz[i] + if tro <= oint: + o[i] = odz[i] + ro = tro + else: + o[i] = oint - ro + ro = oint + vol = 0.0 + indx = zeroindex + + # common ROUTE/NOROUT code + # an irrigation demand was made before routing + if (irexit >= 0) and (irrdem > 0.0): # an irrigation demand was made before routing + oseff[irexit] = irrdem + o[irexit] = irrdem + roseff += irrdem + ro += irrdem + IRRDEM[step] = irrdem + + # estimate the volumes of outflow + ovol[:] = (ks * oseff[:] + coks * o[:]) * delts + rovol = (ks * roseff + coks * ro) * delts + + # HYDR + if nexits > 1: + O[step,:] = o[:] * SFACTA * LFACTA + OVOL[step,:] = ovol[:] / VFACT + PRSUPY[step] = prsupy / VFACT + RO[step] = ro * SFACTA * LFACTA + ROVOL[step] = rovol / VFACT + VOLEV[step] = volev / VFACT + VOL[step] = vol / VFACT + + if AUX1FG: # compute final depth, surface area + if vol >= topvolume: + errors[1] += 1 # ERRMSG1: extrapolation of rchtab + indx = fndrow(vol, volumeFT) + dep, stage, sarea, avdep, twid, hrad = auxil(volumeFT, depthFT, sareaFT, indx, vol, length, stcor, AUX1FG, errors) + DEP[step] = dep + SAREA[step] = sarea / AFACT + + if vol > 0.0 and sarea > 0.0: + twid = sarea / length + avdep = vol / sarea + elif AUX1FG == 2: + twid = sarea / length + avdep = 0.0 + else: + twid = 0.0 + avdep = 0.0 + + if AUX2FG: + avvel = (length * ro / vol) if vol > 0.0 else 0.0 + if AUX3FG: + if avdep > 0.0: + # SHEAR; ustar (bed shear velocity), tau (bed shear stress) + if LKFG: # flag, 1:lake, 0:stream + ustar = avvel / (17.66 + (log10(avdep / (96.5 * DB50))) * 2.3 / AKAPPA) + tau = GAM/GRAV * ustar**2 #3796 + else: + hrad = (avdep*twid)/(2.0*avdep + twid) # hydraulic radius, manual eq 41 + slope = DELTH / length + ustar = sqrt(GRAV * slope * hrad) + tau = (GAM * slope) * hrad + else: + ustar = 0.0 + tau = 0.0 + hrad = 0.0 + USTAR[step] = ustar * LFACTA + TAU[step] = tau * TFACTA + + AVDEP[step] = avdep + AVVEL[step] = avvel + HRAD[step] = hrad + TWID[step] = twid + # END MAIN LOOP + + # NUMBA limitation for ts, and saving to HDF5 file is in individual columns + if nexits > 1: + for i in range(nexits): + ts[Olabels[i]] = O[:,i] + ts[OVOLlabels[i]] = OVOL[:,i] + return errors + + +@njit(cache=True) +def fndrow(v, volFT): + ''' finds highest index in FTable volume column whose volume < v''' + for indx,vol in enumerate(volFT): + if v < vol: + return indx-1 + return len(volFT) - 2 + + +@njit(cache=True) +def demand(vol, rowFT, funct, nexits, delts, convf, colind, outdgt): + od = zeros(nexits) + for i in range(nexits): + col = colind[i] + icol = int(col) + if icol != 0: + diff = col - float(icol) + if diff >= 1.0e-6: + _od1 = rowFT[icol-1] + odfv = _od1 + diff * (_od1 - rowFT[icol]) * convf + else: + odfv = rowFT[icol-1] * convf + else: + odfv = 0.0 + odgt = outdgt[i] + + if odfv == 0.0 and odgt == 0.0: + od[i] = 0.0 + elif odfv != 0.0 and odgt == 0.0: + od[i] = odfv + elif odfv == 0.0 and odgt != 0.0: + od[i] = odgt + else: + if funct[i] == 1: od[i] = min(odfv,odgt) + elif funct[i] == 2: od[i] = max(odfv,odgt) + elif funct[i] == 3: od[i] = odfv + odgt + elif funct[i] == 4: od[i] = max(odfv, (vol - odgt) / delts) + return od.sum(), od + + +@njit(cache=True) +def auxil(volumeFT, depthFT, sareaFT, indx, vol, length, stcor, AUX1FG, errors): + '''Compute depth, stage, surface area, average depth, topwidth and hydraulic radius''' + if vol > 0.0: + sa1 = sareaFT[indx] + a = sareaFT[indx+1] - sa1 + b = 2.0 * sa1 + vol1 = volumeFT[indx] + c = -((vol - vol1) / (volumeFT[indx+1] - vol1)) * (b+a) + + rdep2 = 0.5 # initial guess for the Newton's method + for i in range(MAXLOOPS): + rdep1 = rdep2 + rdep2 = rdep1 - (a*rdep1**2 + b*rdep1 + c)/(2.0 * a * rdep1 + b) + if abs(rdep2-rdep1) < TOLERANCE: + break + else: + errors[3] += 1 # convergence failure error message + if rdep2 > 1.0 or rdep2 < 0.0: + errors[4] += 1 # converged outside valid range error message + + dep1 = depthFT[indx] + dep = dep1 + rdep2 * (depthFT[indx+1] - dep1) # manual eq (36) + sarea = sa1 + a * rdep2 + + avdep = vol / sarea # average depth calculation, manual eq (39) + twid = sarea / length # top-width calculation, manual eq (40) + hrad = (avdep * twid) / (2.0 * avdep + twid) # hydraulic radius, manual eq (41) + elif AUX1FG == 2: + dep = depthFT[indx] # removed in HSPF 12.4 + sarea = sareaFT[indx] + avdep = 0.0 + twid = sarea / length + hrad = 0.0 + else: + dep = 0.0 + sarea = 0.0 + avdep = 0.0 + twid = 0.0 + hrad = 0.0 + + stage = dep + stcor # stage calculation and output, manual eq (37) + + return dep, stage, sarea, avdep, twid, hrad + +def expand_HYDR_masslinks(flags, uci, dat, recs): + if flags['HYDR']: + # IVOL + rec = {} + rec['MFACTOR'] = dat.MFACTOR + rec['SGRPN'] = 'HYDR' + if dat.SGRPN == "ROFLOW": + rec['SMEMN'] = 'ROVOL' + else: + rec['SMEMN'] = 'OVOL' + rec['SMEMSB1'] = dat.SMEMSB1 + rec['SMEMSB2'] = dat.SMEMSB2 + rec['TMEMN'] = 'IVOL' + rec['TMEMSB1'] = dat.TMEMSB1 + rec['TMEMSB2'] = dat.TMEMSB2 + rec['SVOL'] = dat.SVOL + recs.append(rec) + return recs \ No newline at end of file diff --git a/HSP2/SEDTRN.py b/HSP2/SEDTRN.py index 082aa38b..46cc1624 100644 --- a/HSP2/SEDTRN.py +++ b/HSP2/SEDTRN.py @@ -1,1008 +1,1012 @@ -''' Copyright (c) 2020 by RESPEC, INC. -Authors: Robert Heaphy, Ph.D. and Paul Duda -License: LGPL2 -''' - -from numpy import array, zeros, where, int64, arange -from math import log10, exp -from numba import njit -from HSP2.ADCALC import advect -from HSP2.utilities import make_numba_dict - -# the following imports added to handle special actions -from HSP2.om import * -from HSP2.om_model_linkage import * - -ERRMSGS =('SEDTRN: Warning -- bed storage of sediment size fraction sand is empty', #ERRMSG0 - 'SEDTRN: Warning -- bed storage of sediment size fraction silt is empty', #ERRMSG1 - 'SEDTRN: Warning -- bed storage of sediment size fraction clay is empty', #ERRMSG2 - 'SEDTRN: Warning -- bed depth appears excessive', #ERRMSG3 - 'SEDTRN: Fatal error ocurred in colby method- variable outside valid range- switching to toffaleti method', #ERRMSG4 - 'SEDTRN: Simulation of sediment requires all 3 "auxiliary flags" (AUX1FG, etc) in section HYDR must be turned on', #ERRMSG5 - 'SEDTRN: When specifying the initial composition of the bed, the fraction of sand, silt, and clay must sum to a value close to 1.0.') #ERRMSG6 - -def sedtrn(io_manager, siminfo, uci, ts, state): - ''' Simulate behavior of inorganic sediment''' - - # simlen = siminfo['steps'] - # delt = siminfo['delt'] - delt60 = siminfo['delt'] / 60 - delts = siminfo['delt'] * 60 - uunits = siminfo['units'] - - advectData = uci['advectData'] - (nexits, vol, VOL, SROVOL, EROVOL, SOVOL, EOVOL) = advectData - - ts['VOL'] = VOL - ts['SROVOL'] = SROVOL - ts['EROVOL'] = EROVOL - for i in range(nexits): - ts['SOVOL' + str(i + 1)] = SOVOL[:, i] - ts['EOVOL' + str(i + 1)] = EOVOL[:, i] - - ui = make_numba_dict(uci) - ui['simlen'] = siminfo['steps'] - ui['uunits'] = siminfo['units'] - ui['vol'] = vol - ui['delts'] = siminfo['delt'] * 60 - ui['delt60'] = siminfo['delt'] / 60 - ui['errlen'] = len(ERRMSGS) - - ui_silt = uci['SILT'] - if uunits == 1: - ui['silt_d'] = ui_silt['D'] * 0.0833 - ui['silt_w'] = ui_silt['W'] * delts * 0.0254 # convert settling velocity from m/sec to m/ivl - else: - ui['silt_d'] = ui_silt['D'] * 0.001 - ui['silt_w'] = ui_silt['W'] * delts * 0.001 # convert settling velocity from m/sec to m/ivl - ui['silt_rho'] = ui_silt['RHO'] - ui['silt_taucd'] = ui_silt['TAUCD'] - ui['silt_taucs'] = ui_silt['TAUCS'] - ui['silt_m'] = ui_silt['M'] * delt60 / 24.0 * 4.880 # convert erodibility coeff from /day to /ivl - - ui_clay = uci['CLAY'] - if uunits == 1: - ui['clay_d'] = ui_clay['D'] * 0.0833 - ui['clay_w'] = ui_clay['W'] * delts * 0.0254 # convert settling velocity from m/sec to m/ivl - else: - ui['clay_d'] = ui_clay['D'] * 0.001 - ui['clay_w'] = ui_clay['W'] * delts * 0.001 # convert settling velocity from m/sec to m/ivl - ui['clay_rho'] = ui_clay['RHO'] - ui['clay_taucd'] = ui_clay['TAUCD'] - ui['clay_taucs'] = ui_clay['TAUCS'] - ui['clay_m'] = ui_clay['M'] * delt60 / 24.0 * 4.880 # convert erodibility coeff from /day to /ivl - - ####################################################################################### - # the following section (1 of 3) added to SEDTRN by pbd to handle special actions - ####################################################################################### - # state_info is some generic things about the simulation - # must be numba safe, so we don't just pass the whole state which is not - state_info = Dict.empty(key_type=types.unicode_type, value_type=types.unicode_type) - state_info['operation'], state_info['segment'], state_info['activity'] = state['operation'], state['segment'], state['activity'] - state_info['domain'], state_info['state_step_hydr'], state_info['state_step_om'] = state['domain'], state['state_step_hydr'], state['state_step_om'] - # hsp2_local_py = state['hsp2_local_py'] - # # It appears necessary to load this here, instead of from main.py, otherwise, - # # _hydr_() does not recognize the function state_step_hydr()? - # if (hsp2_local_py != False): - # from hsp2_local_py import state_step_hydr - # else: - # from HSP2.state_fn_defaults import state_step_hydr - # must split dicts out of state Dict since numba cannot handle mixed-type nested Dicts - state_ix, dict_ix, ts_ix = state['state_ix'], state['dict_ix'], state['ts_ix'] - state_paths = state['state_paths'] - # initialize the sedtrn paths in case they don't already reside here - sedtrn_init_ix(state_ix, state_paths, state['domain']) - op_tokens = state['op_tokens'] - ####################################################################################### - - ############################################################################ - errors = _sedtrn_(ui, ts, state_info, state_paths, state_ix, dict_ix, ts_ix, op_tokens) # run SEDTRN simulation code - ############################################################################ - - if nexits > 1: - u = uci['SAVE'] - key1 = 'OSED1' - key2 = 'OSED2' - key3 = 'OSED3' - key4 = 'OSED4' - for i in range(nexits): - u[f'{key1}{i + 1}'] = u[key1] - u[f'{key2}{i + 1}'] = u[key2] - u[f'{key3}{i + 1}'] = u[key3] - u[f'{key4}{i + 1}'] = u[key4] - del u[key1] - del u[key2] - del u[key3] - del u[key4] - - return errors, ERRMSGS - -@njit(cache=True) -def _sedtrn_(ui, ts, state_info, state_paths, state_ix, dict_ix, ts_ix, op_tokens): - ''' Simulate behavior of inorganic sediment''' - errorsV = zeros(int(ui['errlen'])).astype(int64) - - simlen = int(ui['simlen']) - uunits = int(ui['uunits']) - delts = ui['delts'] - delt60 = ui['delt60'] - - AFACT = 43560.0 - if uunits == 2: - # si units conversion - AFACT = 1000000.0 - vol = ui['vol'] * AFACT - - svol = vol - nexits = int(ui['NEXITS']) - - # table SANDFG - sandfg = ui['SANDFG'] # 1: Toffaleti method, 2:Colby method, 3:old HSPF power function - - if ui['AUX3FG'] == 0: - errorsV[5] += 1 # error - sediment transport requires aux3fg to be on - - # table SED-GENPARM - bedwid = ui['BEDWID'] - bedwrn = ui['BEDWRN'] - por = ui['POR'] - - # table SED-HYDPARM - if uunits == 1: - len_ = ui['LEN'] * 5280 - db50 = ui['DB50'] * 0.0833 - else: - len_ = ui['LEN'] * 1000 - db50 = ui['DB50'] * 0.001 - delth = ui['DELTH'] - - # evaluate some quantities used in colby and/or toffaleti sand transport simulation methods - if uunits == 1: - db50e = db50 - db50m = db50 * 304.8 - else: - db50e = db50 * 3.28 - db50m = db50 * 1000.0 - slope = delth / len_ - - # SAND PARAMETERS; table SAND-PM - if uunits == 1: - sand_d = ui['D'] * 0.0833 - sand_w = ui['W'] * delts * 0.0254 # convert settling velocity from m/sec to m/ivl - else: - sand_d = ui['D'] * 0.001 - sand_w = ui['W'] * delts * 0.001 # convert settling velocity from m/sec to m/ivl - sand_rho = ui['RHO'] - sand_ksand = ui['KSAND'] - sand_expsnd = ui['EXPSND'] - - # SILT PARAMETERS; table SILT-CLAY-PM --- note: first occurance is silt - silt_d = ui['silt_d'] - silt_w = ui['silt_w'] - silt_rho = ui['silt_rho'] - silt_taucd = ui['silt_taucd'] - silt_taucs = ui['silt_taucs'] - silt_m = ui['silt_m'] - - # CLAY PARAMETERS; table SILT-CLAY-PM --- note: second occurance is clay - clay_d = ui['clay_d'] - clay_w = ui['clay_w'] - clay_rho = ui['clay_rho'] - clay_taucd = ui['clay_taucd'] - clay_taucs = ui['clay_taucs'] - clay_m = ui['clay_m'] - - # bed sediment conditions; table BED-INIT - beddep = ui['BEDDEP'] - sand_bedfr = ui['SANDFR'] - silt_bedfr = ui['SILTFR'] - clay_bedfr = ui['CLAYFR'] - total_bedfr = sand_bedfr + silt_bedfr + clay_bedfr - if abs(total_bedfr - 1.0) > 0.01: - errorsV[6] += 1 # error message: sum of bed sediment fractions is not close enough to 1.0 - - # suspended sediment concentrations; table ssed-init - sand_ssed1 = ui['SSED1'] - silt_ssed2 = ui['SSED2'] - clay_ssed3 = ui['SSED3'] - total_ssed4 = sand_ssed1 + silt_ssed2 + clay_ssed3 - - # get input time series- inflow of sediment is in units of mg.ft3/l.ivl (english) or mg.m3/l.ivl (metric) - TAU = ts['TAU'] - AVDEP = ts['AVDEP'] - AVVEL = ts['AVVEL'] - RO = ts['RO'] - HRAD = ts['HRAD'] - TWID = ts['TWID'] - - if not 'ISED1' in ts: - ts['ISED1'] = zeros(simlen) - if not 'ISED2' in ts: - ts['ISED2'] = zeros(simlen) - if not 'ISED3' in ts: - ts['ISED3'] = zeros(simlen) - - ISED1 = ts['ISED1'] # if present, else ISED is identically zero; sand - ISED2 = ts['ISED2'] # if present, else ISED is identically zero; silt - ISED3 = ts['ISED3'] # if present, else ISED is identically zero; clay - ISED4 = ISED1 + ISED2 + ISED3 - - htfg = int(ui['HTFG']) - if htfg == 1: - TW = ts['TW'] - if htfg == 0 and sandfg != 3: - TW = ts['TW'] - TW = where(TW < -100.0, 20.0, TW) - - # preallocate storage for computed time series - # WASH = ts['WASH'] = zeros(simlen) # washload concentration, state variable - # SAND = ts['SAND'] = zeros(simlen) # sandload oncentration, state variable - # BDSAND = ts['BDSAND'] = zeros(simlen) # bed storage of sand, state variable - # SDCF1_11 = ts['WASH'] = zeros(simlen) # deposition of washload on bed - # SDCF1_21 = ts['WASH'] = zeros(simlen) # total outflow of washload from RCHRES - # SDCF1_12 = ts['WASH'] = zeros(simlen) # exchange of sand between bed and suspended storage - # SDCF1_22 = ts['WASH'] = zeros(simlen) # total outflow of sandload from rchres - # SDCF2_1 = ts['SDCF2_1'] = zeros((simlen, nexits)) # washload outflow by gate - # SDCF2_2 = ts['SDCF2_2'] = zeros((simlen, nexits)) # sandload outflow by gate - # ossand = zeros(nexits) - SSED1 = ts['SSED1'] = zeros(simlen) # suspended sand concentration - SSED2 = ts['SSED2'] = zeros(simlen) # suspended silt concentration - SSED3 = ts['SSED3'] = zeros(simlen) # suspended clay concentration - SSED4 = ts['SSED4'] = zeros(simlen) # suspended sediment concentration - RSED1 = ts['RSED1'] = zeros(simlen) # sediment storages - suspended sand - RSED2 = ts['RSED2'] = zeros(simlen) # sediment storages - suspended silt - RSED3 = ts['RSED3'] = zeros(simlen) # sediment storages - suspended clay - RSED4 = ts['RSED4'] = zeros(simlen) # sediment storages - bed sand - RSED5 = ts['RSED5'] = zeros(simlen) # sediment storages - bed silt - RSED6 = ts['RSED6'] = zeros(simlen) # sediment storages - bed clay - RSED7 = ts['RSED7'] = zeros(simlen) # sediment storages - total sand - RSED8 = ts['RSED8'] = zeros(simlen) # sediment storages - total silt - RSED9 = ts['RSED9'] = zeros(simlen) # sediment storages - total clcay - RSED10 = ts['RSED10'] = zeros(simlen) # sediment storages - total sand silt clay - TSED1 = ts['TSED1'] = zeros(simlen) # Total sediment storages by fraction - TSED2 = ts['TSED2'] = zeros(simlen) # Total sediment storages by fraction - TSED3 = ts['TSED3'] = zeros(simlen) # Total sediment storages by fraction - BEDDEP= ts['BEDDEP']= zeros(simlen) # Bed depth - DEPSCR1 = ts['DEPSCR1'] = zeros(simlen) # Deposition (positive) or scour (negative) - sand - DEPSCR2 = ts['DEPSCR2'] = zeros(simlen) # Deposition (positive) or scour (negative) - silt - DEPSCR3 = ts['DEPSCR3'] = zeros(simlen) # Deposition (positive) or scour (negative) - clay - DEPSCR4 = ts['DEPSCR4'] = zeros(simlen) # Deposition (positive) or scour (negative) - total - ROSED1 = ts['ROSED1'] = zeros(simlen) # Total outflows of sediment from the rchres - sand - ROSED2 = ts['ROSED2'] = zeros(simlen) # Total outflows of sediment from the rchres - silt - ROSED3 = ts['ROSED3'] = zeros(simlen) # Total outflows of sediment from the rchres - clay - ROSED4 = ts['ROSED4'] = zeros(simlen) # Total outflows of sediment from the rchres - total - OSED1 = zeros((simlen, nexits)) - OSED2 = zeros((simlen, nexits)) - OSED3 = zeros((simlen, nexits)) - OSED4 = zeros((simlen, nexits)) - - fact = 1.0 / total_bedfr # normalize fractions to sum to one - sand_bedfr *= fact - silt_bedfr *= fact - clay_bedfr *= fact - rhomn = sand_bedfr * sand_rho + silt_bedfr * silt_rho + clay_bedfr * clay_rho - - volsed = len_ * bedwid * beddep * (1.0 - por) # total volume of sediment particles- ft3 or m3 - rwtsed = volsed * rhomn # total weight relative to water- rhomn is in parts/part (same as kg/l) - rwtsed = rwtsed * 1.0E06 # converts from kg/l to mg/l - - # find the weight of each fraction- units are (mg/l)*ft3 or (mg/l)*m3 - sand_wt_rsed4 = sand_bedfr * rwtsed - silt_wt_rsed5 = silt_bedfr * rwtsed - clay_wt_rsed6 = clay_bedfr * rwtsed - - # find the total quantity (bed and suspended) of each sediment size fraction - sand_rsed1 = sand_ssed1 * vol - sand_rssed1 = sand_t_rsed7 = sand_rsed1 + sand_wt_rsed4 - - silt_rsed2 = silt_ssed2 * vol - silt_rssed2 = silt_t_rsed8 = silt_rsed2 + silt_wt_rsed5 - - clay_rsed3 = clay_ssed3 * vol - clay_rssed3 = clay_t_rsed9 = clay_rsed3 + clay_wt_rsed6 - - tsed1 = sand_rsed1 + silt_rsed2 + clay_rsed3 - tsed2 = sand_wt_rsed4 + silt_wt_rsed5 + clay_wt_rsed6 - tsed3 = total_rsed10 = sand_t_rsed7 + silt_t_rsed8 + clay_t_rsed9 - - wsande = sand_w * 3.28 / delts # convert fall velocity from m/ivl to ft/sec - - VOL = ts['VOL'] - SROVOL = ts['SROVOL'] - EROVOL = ts['EROVOL'] - SOVOL = zeros((simlen, nexits)) - EOVOL = zeros((simlen, nexits)) - for i in range(nexits): - SOVOL[:, i] = ts['SOVOL' + str(i + 1)] - EOVOL[:, i] = ts['EOVOL' + str(i + 1)] - - #################### END PSED - - ####################################################################################### - # the following section (2 of 3) added by pbd to SEDTRN, this one to prepare for special actions - ####################################################################################### - sedtrn_ix = sedtrn_get_ix(state_ix, state_paths, state_info['domain']) - # these are integer placeholders faster than calling the array look each timestep - rsed4_ix, rsed5_ix, rsed6_ix = sedtrn_ix['RSED4'], sedtrn_ix['RSED5'], sedtrn_ix['RSED6'] - ####################################################################################### - - for loop in range(simlen): - - ####################################################################################### - # the following section (3 of 3) added by pbd to accommodate special actions - ####################################################################################### - # set state_ix with value of local state variables and/or needed vars - state_ix[rsed4_ix] = sand_wt_rsed4 - state_ix[rsed5_ix] = silt_wt_rsed5 - state_ix[rsed6_ix] = clay_wt_rsed6 - if (state_info['state_step_om'] == 'enabled'): - step_model(op_tokens[0], op_tokens, state_ix, dict_ix, ts_ix, loop) # traditional 'ACTIONS' done in here - # Do write-backs for editable STATE variables - sand_wt_rsed4 = state_ix[rsed4_ix] - silt_wt_rsed5 = state_ix[rsed5_ix] - clay_wt_rsed6 = state_ix[rsed6_ix] - ####################################################################################### - - # perform any necessary unit conversions - if uunits == 2: # uci is in metric units - avvele = AVVEL[loop] * 3.28 - avdepm = AVDEP[loop] - avdepe = AVDEP[loop] * 3.28 - rom = RO[loop] - hrade = HRAD[loop] * 3.28 - twide = TWID[loop] * 3.28 - ised1 = ISED1[loop] / 2.83E-08 - ised2 = ISED2[loop] / 2.83E-08 - ised3 = ISED3[loop] / 2.83E-08 - else: # uci is in english units - avvele = AVVEL[loop] - avdepm = AVDEP[loop] * 0.3048 - avdepe = AVDEP[loop] - rom = RO[loop] * 0.0283 - hrade = HRAD[loop] - twide = TWID[loop] - ised1 = ISED1[loop] / 3.121E-08 - ised2 = ISED2[loop] / 3.121E-08 - ised3 = ISED3[loop] / 3.121E-08 - tau = TAU[loop] - tw = TW[loop] - tw = (tw - 32.0) * 0.5555 - - # Following is routine #&COHESV() to simulate behavior of cohesive sediments (silt and clay) - # compute bed fractions based on relative storages - totbed = silt_wt_rsed5 + clay_wt_rsed6 - frcsed1 = silt_wt_rsed5 / totbed if totbed > 0.0 else 0.5 - frcsed2 = clay_wt_rsed6 / totbed if totbed > 0.0 else 0.5 - - vol = VOL[loop] * AFACT - srovol = SROVOL[loop] - erovol = EROVOL[loop] - sovol = SOVOL[loop, :] - eovol = EOVOL[loop, :] - silt_ssed2, rosed2, osed2 = advect(ised2, silt_ssed2, nexits, svol, vol, srovol, erovol, sovol, eovol) - silt_rsed2 = silt_ssed2 * vol # calculate exchange between bed and suspended sediment - # vols = svol - - # consider deposition and scour - if avdepe > 0.17: - depscr2, silt_rsed2, silt_wt_rsed5 = bdexch(avdepm, silt_w, tau, silt_taucd, silt_taucs, silt_m, vol, frcsed1, silt_rsed2, silt_wt_rsed5) - else: - depscr2 = 0.0 - silt_ssed2 = silt_rsed2 / vol if vol > 0.0 else -1.0e30 - - clay_ssed3, rosed3, osed3 = advect(ised3, clay_ssed3, nexits, svol, vol, srovol, erovol, sovol, eovol) - clay_rsed3 = clay_ssed3 * vol # calculate exchange between bed and suspended sediment - - # consider deposition and scour - if avdepe > 0.17: - depscr3, clay_rsed3, clay_wt_rsed6 = bdexch(avdepm, clay_w, tau, clay_taucd, clay_taucs, clay_m, vol, frcsed2, clay_rsed3, clay_wt_rsed6) - else: - depscr3 = 0.0 - clay_ssed3 = clay_rsed3 / vol if vol > 0.0 else -1.0e30 - # end COHESV() - - # compute fine sediment load - fsl = silt_ssed2 + clay_ssed3 - ksand = sand_ksand - expsnd = sand_expsnd - - # simulate sandload. done after washload because washload affects sand transport if the colby method is used - # Following code is #$SANDLD() - sands = sand_ssed1 # save starting concentration value - if vol > 0.0: # rchres contains water - if rom > 0.0 and avdepe > 0.17: # there is outflow from the rchres- perform advection - # calculate potential value of sand - if sandfg == 1: # case 1 toffaleti equation - gsi = toffaleti(avvele, db50e, hrade, slope, tw, wsande) - psand = (gsi * twide * 10.5) / rom # convert potential sand transport rate to a concentration in mg/l - elif sandfg == 2: # case 2 colby equation - gsi, ferror, d50err, hrerr, velerr = colby(avvele, db50m, hrade, fsl, tw) - if ferror == 1: - pass # ERRMSG: fatal error ocurred in colby method- one or more variables went outside valid range- warn and switch to toffaleti method - gsi = toffaleti(avvele, db50e, hrade, slope, tw, wsande) # switch to toffaleti method - psand = (gsi * twide * 10.5) / rom # convert potential sand transport rate to conc in mg/l - elif sandfg == 3: # case 3 input power function - psand = ksand * avvele**expsnd - - prosnd = (sands * srovol) + (psand * erovol) # calculate potential outflow of sand during ivl - pscour = (vol * psand) - (svol * sands) + prosnd - ised1 # qty.vol/l.ivl # calculate potential bed scour from, or to deposition - if pscour < sand_wt_rsed4: # potential scour is satisfied by bed storage; - # new conc. of sandload is potential conc. - scour = pscour - sand_ssed1 = psand - sand_rsed1 = sand_ssed1 * vol - sand_wt_rsed4 -= scour - else: # potential scour cannot be satisfied; all of the available bed storage is scoured - scour = sand_wt_rsed4 - sand_wt_rsed4 = 0.0 - sand_ssed1 = (ised1 + scour + sands * (svol - srovol)) / (vol + erovol) # calculate new conc. of suspended sandload - sand_rsed1 = sand_ssed1 * vol # calculate new storage of suspended sandload - rosed1 = (srovol * sands) + (erovol * sand_ssed1) # calculate total amount of sand leaving rchres during ivl - osed1 = sovol * sands + eovol * sand_ssed1 # calculate amount of sand leaving through each exit gate in qty.vol/l.ivl - else: # no outflow (still water) or water depth less than two inches - sand_ssed1 = 0.0 - sand_rsed1 = 0.0 - scour = -ised1 - (sands * svol) - sand_wt_rsed4 -= scour - rosed1 = 0.0 - osed1 = zeros(nexits) - else: # rchres is dry; set sand equal to an undefined number - sand_ssed1 = -1.0e30 - sand_rsed1 = 0.0 - scour = -ised1 - (sands * svol) # calculate total amount of sand settling out during interval; this is equal to sand inflow + sand initially present - sand_wt_rsed4 -= scour # update bed storage - rosed1 = 0.0 - osed1 = zeros(nexits) - depscr1 = -scour # calculate depth of bed scour or deposition; positive for deposition - # end SANDLD() - - # set small concentrations to zero - if abs(sand_ssed1) < 1.0e-15: # small conc., set to zero - if depscr1 > 0.0: # deposition has occurred, add small storage to deposition - depscr1 += sand_rsed1 - sand_wt_rsed4 += sand_rsed1 - else: # add small storage to outflow - rosed1 += sand_rsed1 - depscr1 = 0.0 - if nexits > 1: - for n in range(0,nexits): - if osed1[n] > 0.0: - osed1[n] += sand_rsed1 - break - sand_rsed1 = 0.0 - sand_ssed1 = 0.0 - - if abs(silt_ssed2) < 1.0e-15: # small conc., set to zero - if depscr2 > 0.0: # deposition has occurred, add small storage to deposition - depscr2 += silt_rsed2 - silt_wt_rsed5 += silt_rsed2 - else: # add small storage to outflow - rosed2 += silt_rsed2 - depscr2 = 0.0 - if nexits > 1: - for n in range(0, nexits): - if osed2[n] > 0.0: - osed2[n] += silt_rsed2 - break - silt_rsed2 = 0.0 - silt_ssed2 = 0.0 - - if abs(clay_ssed3) < 1.0e-15: # small conc., set to zero - if depscr3 > 0.0: # deposition has occurred, add small storage to deposition - depscr3 += clay_rsed3 - clay_wt_rsed6 += clay_rsed3 - else: # add small storage to outflow - rosed3 += clay_rsed3 - depscr3 = 0.0 - if nexits > 1: - for n in range(0, nexits): - if osed3[n] > 0.0: - osed3[n] += clay_rsed3 - break - clay_rsed3 = 0.0 - clay_ssed3 = 0.0 - - osed4 = zeros(nexits) - # calculate total quantity of material in suspension and in the bed; check bed conditions - osed4 += osed1 - sand_rssed1 = sand_t_rsed7 = sand_rsed1 + sand_wt_rsed4 # total storage in mg.vol/l - if sand_wt_rsed4 == 0.0: # warn that bed is empty - # errmsg - errorsV[0] += 1 # The bed storage of sediment size fraction sand is empty. - - osed4 += osed2 - silt_rssed2 = silt_t_rsed8 = silt_rsed2 + silt_wt_rsed5 # total storage in mg.vol/l - if silt_wt_rsed5 == 0.0: # warn that bed is empty - # errmsg - errorsV[1] += 1 # The bed storage of sediment size fraction silt is empty. - - osed4 += osed3 - clay_rssed3 = clay_t_rsed9 = clay_rsed3 + clay_wt_rsed6 # total storage in mg.vol/l - if clay_wt_rsed6 == 0.0: # warn that bed is empty - # errmsg - errorsV[2] += 1 # The bed storage of sediment size fraction clay is empty. - - # find the volume occupied by each fraction of bed sediment- ft3 or m3 - volsed = (sand_wt_rsed4 / (sand_rho * 1.0e06) - + silt_wt_rsed5 / (silt_rho * 1.0e06) - + clay_wt_rsed6 / (clay_rho * 1.0e06)) - - total_ssed4 = sand_ssed1 + silt_ssed2 + clay_ssed3 - tsed1 = sand_rsed1 + silt_rsed2 + clay_rsed3 - tsed2 = sand_wt_rsed4 + silt_wt_rsed5 + clay_wt_rsed6 - tsed3 = total_rsed10 = sand_t_rsed7 + silt_t_rsed8 + clay_t_rsed9 - depscr4 = depscr1 + depscr2 + depscr3 - rosed4 = rosed1 + rosed2 + rosed3 - - # find total depth of sediment - volsed = volsed / (1.0 - por) # allow for porosit - beddep = volsed / (len_ * bedwid) # calculate thickness of bed- ft or m - if beddep > bedwrn: - # Errormsg: warn that bed depth appears excessive - errorsV[3] += 1 - - svol = vol # svol is volume at start of time step, update for next time thru - - SSED1[loop] = sand_ssed1 - SSED2[loop] = silt_ssed2 - SSED3[loop] = clay_ssed3 - SSED4[loop] = total_ssed4 - BEDDEP[loop]= beddep - if uunits == 1: - RSED1[loop] = sand_rsed1 * 3.121E-08 - RSED2[loop] = silt_rsed2 * 3.121E-08 - RSED3[loop] = clay_rsed3 * 3.121E-08 - RSED4[loop] = sand_wt_rsed4 * 3.121E-08 - RSED5[loop] = silt_wt_rsed5 * 3.121E-08 - RSED6[loop] = clay_wt_rsed6 * 3.121E-08 - RSED7[loop] = sand_t_rsed7 * 3.121E-08 - RSED8[loop] = silt_t_rsed8 * 3.121E-08 - RSED9[loop] = clay_t_rsed9 * 3.121E-08 - RSED10[loop] = total_rsed10 * 3.121E-08 - TSED1[loop] = tsed1 * 3.121E-08 - TSED2[loop] = tsed2 * 3.121E-08 - TSED3[loop] = tsed3 * 3.121E-08 - DEPSCR1[loop] = depscr1 * 3.121E-08 - DEPSCR2[loop] = depscr2 * 3.121E-08 - DEPSCR3[loop] = depscr3 * 3.121E-08 - DEPSCR4[loop] = depscr4 * 3.121E-08 - ROSED1[loop] = rosed1 * 3.121E-08 - ROSED2[loop] = rosed2 * 3.121E-08 - ROSED3[loop] = rosed3 * 3.121E-08 - ROSED4[loop] = rosed4 * 3.121E-08 - OSED1[loop] = osed1 * 3.121E-08 - OSED2[loop] = osed2 * 3.121E-08 - OSED3[loop] = osed3 * 3.121E-08 - OSED4[loop] = osed4 * 3.121E-08 - else: - RSED1[loop] = sand_rsed1 * 1E-06 - RSED2[loop] = silt_rsed2 * 1E-06 - RSED3[loop] = clay_rsed3 * 1E-06 - RSED4[loop] = sand_wt_rsed4 * 1E-06 - RSED5[loop] = silt_wt_rsed5 * 1E-06 - RSED6[loop] = clay_wt_rsed6 * 1E-06 - RSED7[loop] = sand_t_rsed7 * 1E-06 - RSED8[loop] = silt_t_rsed8 * 1E-06 - RSED9[loop] = clay_t_rsed9 * 1E-06 - RSED10[loop] = total_rsed10 * 1E-06 - TSED1[loop] = tsed1 * 1E-06 - TSED2[loop] = tsed2 * 1E-06 - TSED3[loop] = tsed3 * 1E-06 - DEPSCR1[loop] = depscr1 * 1E-06 - DEPSCR2[loop] = depscr2 * 1E-06 - DEPSCR3[loop] = depscr3 * 1E-06 - DEPSCR4[loop] = depscr4 * 1E-06 - ROSED1[loop] = rosed1 * 1E-06 # 2.83E-08 - ROSED2[loop] = rosed2 * 1E-06 - ROSED3[loop] = rosed3 * 1E-06 - ROSED4[loop] = rosed4 * 1E-06 - OSED1[loop] = osed1 * 1E-06 - OSED2[loop] = osed2 * 1E-06 - OSED3[loop] = osed3 * 1E-06 - OSED4[loop] = osed4 * 1E-06 - - if nexits > 1: - for i in range(nexits): - ts['OSED1' + str(i+1)] = OSED1[:, i] - ts['OSED2' + str(i + 1)] = OSED2[:, i] - ts['OSED3' + str(i + 1)] = OSED3[:, i] - ts['OSED4' + str(i + 1)] = OSED4[:, i] - - return errorsV - - -@njit(cache=True) -def bdexch (avdepm, w, tau, taucd, taucs, m, vol, frcsed, susp, bed): - ''' simulate deposition and scour of a cohesive sediment fraction- silt or clay''' - if w > 0.0 and tau < taucd and susp > 1.0e-30: # deposition will occur - expnt = -w / avdepm * (1.0 - tau / taucd) - depmas = susp * (1.0 - exp(expnt)) - susp -= depmas - bed += depmas - else: - depmas = 0.0 # no deposition- concentrations unchanged - - if tau > taucs and m > 0.0: # scour can occur- units are: m- kg/m2.ivl avdepm- m scr- mg/l - scr = frcsed * m / avdepm * 1000.0 * (tau/taucs - 1.0) - scrmas = min(bed, scr * vol) # check availability of material ??? - - # update storages - susp += scrmas - bed -= scrmas - else: # no scour - scrmas = 0.0 - return depmas - scrmas, susp, bed # net deposition or scour, susp, bed - - -''' Sediment Transport in Alluvial Channels, 1963-65 by Bruce Colby. -This report explains the following empirical algorithm.''' - -@njit(cache=True) -def colby(v, db50, fhrad, fsl, tempr): -# Colby's method to calculate the capacity of the flow to transport sand. -# -# The colby method has the following units and applicable ranges of variables. -# average velocity.............v.......fps.........1-10 fps -# hydraulic radius.............fhrad...ft..........1-100 ft -# median bed material size.....db50....mm..........0.1-0.8 mm -# temperature..................tmpr....deg f.......32-100 deg. -# fine sediment concentration..fsl.....mg/liter....0-200000 ppm -# total sediment load..........gsi.....ton/day.ft.. - G = zeros((5,9,7)) # defined by Figure 26 - G[1, 1, 1], G[2, 1, 1], G[3, 1, 1], G[4, 1, 1] = 1.0, 0.30, 0.06, 0.00 - G[1, 2, 1], G[2, 2, 1], G[3, 2, 1], G[4, 2, 1] = 3.00, 3.30, 2.50, 2.00 - G[1, 3, 1], G[2, 3, 1], G[3, 3, 1], G[4, 3, 1] = 5.40, 9.0, 10.0, 20.0 - G[1, 4, 1], G[2, 4, 1], G[3, 4, 1], G[4, 4, 1] = 11.0, 26.0, 50.0, 150.0 - G[1, 5, 1], G[2, 5, 1], G[3, 5, 1], G[4, 5, 1] = 17.0, 49.0, 130.0, 500.0 - G[1, 6, 1], G[2, 6, 1], G[3, 6, 1], G[4, 6, 1] = 29.0, 101.0, 400.0, 1350.0 - G[1, 7, 1], G[2, 7, 1], G[3, 7, 1], G[4, 7, 1] = 44.0, 160.0, 700.0, 2500.0 - G[1, 8, 1], G[2, 8, 1], G[3, 8, 1], G[4, 8, 1] = 60.0, 220.0, 1000.0, 4400.0 - G[1, 1, 2], G[2, 1, 2], G[3, 1, 2], G[4, 1, 2] = 0.38, 0.06, 0.0, 0.0 - G[1, 2, 2], G[2, 2, 2], G[3, 2, 2], G[4, 2, 2] = 1.60, 1.20, 0.65, 0.10 - G[1, 3, 2], G[2, 3, 2], G[3, 3, 2], G[4, 3, 2] = 3.70, 5.0, 4.0, 3.0 - G[1, 4, 2], G[2, 4, 2], G[3, 4, 2], G[4, 4, 2] = 10.0, 18.0, 30.0, 52.0 - G[1, 5, 2], G[2, 5, 2], G[3, 5, 2], G[4, 5, 2] = 17.0, 40.0, 80.0, 160.0 - G[1, 6, 2], G[2, 6, 2], G[3, 6, 2], G[4, 6, 2] = 36.0, 95.0, 230.0, 650.0 - G[1, 7, 2], G[2, 7, 2], G[3, 7, 2], G[4, 7, 2] = 60.0, 150.0, 415.0, 1200.0 - G[1, 8, 2], G[2, 8, 2], G[3, 8, 2], G[4, 8, 2] = 81.0, 215.0, 620.0, 1500.0 - G[1, 1, 3], G[2, 1, 3], G[3, 1, 3], G[4, 1, 3] = 0.14, 0.0, 0.0, 0.0 - G[1, 2, 3], G[2, 2, 3], G[3, 2, 3], G[4, 2, 3] = 1.0, 0.60, 0.15, 0.0 - G[1, 3, 3], G[2, 3, 3], G[3, 3, 3], G[4, 3, 3] = 3.30, 3.00, 1.70, 0.50 - G[1, 4, 3], G[2, 4, 3], G[3, 4, 3], G[4, 4, 3] = 11.0, 15.0, 17.0, 14.0 - G[1, 5, 3], G[2, 5, 3], G[3, 5, 3], G[4, 5, 3] = 20.0, 35.0, 49.0, 70.0 - G[1, 6, 3], G[2, 6, 3], G[3, 6, 3], G[4, 6, 3] = 44.0, 85.0, 150.0, 250.0 - G[1, 7, 3], G[2, 7, 3], G[3, 7, 3], G[4, 7, 3] = 71.0, 145.0, 290.0, 500.0 - G[1, 8, 3], G[2, 8, 3], G[3, 8, 3], G[4, 8, 3] = 100.0, 202.0, 400.0, 700.0 - G[1, 1, 4], G[2, 1, 4], G[3, 1, 4], G[4, 1, 4] = 0.0, 0.0, 0.0, 0.0 - G[1, 2, 4], G[2, 2, 4], G[3, 2, 4], G[4, 2, 4] = 0.70, 0.30, 0.06, 0.0 - G[1, 3, 4], G[2, 3, 4], G[3, 3, 4], G[4, 3, 4] = 2.9, 2.3, 1.0, 0.06 - G[1, 4, 4], G[2, 4, 4], G[3, 4, 4], G[4, 4, 4] = 11.5, 13.0, 12.0, 7.0 - G[1, 5, 4], G[2, 5, 4], G[3, 5, 4], G[4, 5, 4] = 22.0, 31.0, 40.0, 50.0 - G[1, 6, 4], G[2, 6, 4], G[3, 6, 4], G[4, 6, 4] = 47.0, 84.0, 135.0, 210.0 - G[1, 7, 4], G[2, 7, 4], G[3, 7, 4], G[4, 7, 4] = 75.0, 140.0, 240.0, 410.0 - G[1, 8, 4], G[2, 8, 4], G[3, 8, 4], G[4, 8, 4] = 106.0, 190.0, 350.0, 630.0 - G[1, 1, 5], G[2, 1, 5], G[3, 1, 5], G[4, 1, 5] = 0.0, 0.0, 0.0, 0.0 - G[1, 2, 5], G[2, 2, 5], G[3, 2, 5], G[4, 2, 5] = 0.44, 0.06, 0.0, 0.0 - G[1, 3, 5], G[2, 3, 5], G[3, 3, 5], G[4, 3, 5] = 2.8, 1.8, 0.6, 0.0 - G[1, 4, 5], G[2, 4, 5], G[3, 4, 5], G[4, 4, 5] = 12.0, 12.5, 10.0, 4.5 - G[1, 5, 5], G[2, 5, 5], G[3, 5, 5], G[4, 5, 5] = 24.0, 30.0, 35.0, 37.0 - G[1, 6, 5], G[2, 6, 5], G[3, 6, 5], G[4, 6, 5] = 52.0, 78.0, 120.0, 190.0 - G[1, 7, 5], G[2, 7, 5], G[3, 7, 5], G[4, 7, 5] = 83.0, 180.0, 215.0, 380.0 - G[1, 8, 5], G[2, 8, 5], G[3, 8, 5], G[4, 8, 5] = 120.0, 190.0, 305.0, 550.0 - G[1, 1, 6], G[2, 1, 6], G[3, 1, 6], G[4, 1, 6] = 0.0, 0.0, 0.0, 0.0 - G[1, 2, 6], G[2, 2, 6], G[3, 2, 6], G[4, 2, 6] = 0.3, 0.0, 0.0, 0.0 - G[1, 3, 6], G[2, 3, 6], G[3, 3, 6], G[4, 3, 6] = 2.9, 1.4, 0.3, 0.0 - G[1, 4, 6], G[2, 4, 6], G[3, 4, 6], G[4, 4, 6] = 14.0, 11.0, 7.7, 3.0 - G[1, 5, 6], G[2, 5, 6], G[3, 5, 6], G[4, 5, 6] = 27.0, 29.0, 30.0, 30.0 - G[1, 6, 6], G[2, 6, 6], G[3, 6, 6], G[4, 6, 6] = 57.0, 75.0, 110.0, 170.0 - G[1, 7, 6], G[2, 7, 6], G[3, 7, 6], G[4, 7, 6] = 90.0, 140.0, 200.0, 330.0 - G[1, 8, 6], G[2, 8, 6], G[3, 8, 6], G[4, 8, 6] = 135.0, 190.0, 290.0, 520.0 - - F = zeros((6,11)) # defined by Figure 24 - F[1, 1], F[2, 1], F[3, 1], F[4, 1], F[5, 1] = 1.0, 1.1, 1.6, 2.6, 4.2 - F[1, 2], F[2, 2], F[3, 2], F[4, 2], F[5, 2] = 1.0, 1.1, 1.65, 2.75, 4.9 - F[1, 3], F[2, 3], F[3, 3], F[4, 3], F[5, 3] = 1.0, 1.1, 1.7, 3.0, 5.5 - F[1, 4], F[2, 4], F[3, 4], F[4, 4], F[5, 4] = 1.0, 1.12, 1.9, 3.6, 7.0 - F[1, 5], F[2, 5], F[3, 5], F[4, 5], F[5, 5] = 1.0, 1.17, 2.05, 4.3, 8.7 - F[1, 6], F[2, 6], F[3, 6], F[4, 6], F[5, 6] = 1.0, 1.2, 2.3, 5.5, 11.2 - F[1, 7], F[2, 7], F[3, 7], F[4, 7], F[5, 7] = 1.0, 1.22, 2.75, 8.0, 22.0 - F[1, 8], F[2, 8], F[3, 8], F[4, 8], F[5, 8] = 1.0, 1.25, 3.0, 9.6, 29.0 - F[1, 9], F[2, 9], F[3, 9], F[4, 9], F[5, 9] = 1.0, 1.3, 3.5, 12.0, 43.0 - F[1, 10], F[2, 10], F[3, 10], F[4, 10], F[5, 10] = 1.0, 1.4, 4.9, 22.0, 120.0 - - # T = array([[-999, -999, -999, -999, -999, -999, -999, -999], - # [-999, 1.2, 1.15, 1.10, 0.96, 0.90, 0.85, 0.82], - # [-999, 1.35, 1.25, 1.12, 0.92, 0.86, 0.80, 0.75], - # [-999, 1.60, 1.40, 1.20, 0.89, 0.80, 0.72, 0.66], - # [-999, 2.00, 1.65, 1.30, 0.85, 0.72, 0.63, 0.55]]).T # Temperature adjustment, Figure 24 - - T = zeros((8,5)) - T[0, 0], T[0, 1], T[0, 2], T[0, 3], T[0, 4] = -999, -999, -999, -999, -999 - T[1, 0], T[1, 1], T[1, 2], T[1, 3], T[1, 4] = -999, 1.2, 1.35, 1.60, 2.00 - T[2, 0], T[2, 1], T[2, 2], T[2, 3], T[2, 4] = -999, 1.15, 1.25, 1.40, 1.65 - T[3, 0], T[3, 1], T[3, 2], T[3, 3], T[3, 4] = -999, 1.10, 1.12, 1.20, 1.30 - T[4, 0], T[4, 1], T[4, 2], T[4, 3], T[4, 4] = -999, 0.96, 0.92, 0.89, 0.85 - T[5, 0], T[5, 1], T[5, 2], T[5, 3], T[5, 4] = -999, 0.90, 0.86, 0.80, 0.72 - T[6, 0], T[6, 1], T[6, 2], T[6, 3], T[6, 4] = -999, 0.85, 0.80, 0.72, 0.63 - T[7, 0], T[7, 1], T[7, 2], T[7, 3], T[7, 4] = -999, 0.82, 0.75, 0.66, 0.55 - - DF = array([-999, 0.10, 0.20, 0.30, 0.60, 1.00, 2.00, 6.00, 10.00, 20.00, 1.E2]) # Depths for Figure 24 - CF = array([-999, 0.00, 1.E4, 5.E4, 1.E5, 1.5E5]) # Concentrations of sediment for Figure 24 - P = array([-999, 0.60, 0.90, 1.0, 1.0, 0.83, 0.60, 0.40, 0.25, 0.15, 0.09, 0.05]) # Percentage Effect for Figure 24 - DP = array([-999, 0.10, 0.15, 0.20, 0.30, 0.40, 0.50, 0.60, 0.70, 0.80, 0.90, 1.00]) # Median diameters for Figure 24 - DG = array([-999, 0.10, 1.00, 10.0, 100.0]) # Depth values for Figure 26 - VG = array([-999, 1.0, 1.5, 2.0, 3.0, 4.0, 6.0, 8.0, 10.0]) # Velocity values for Figure 26 - D50G = array([-999, 0.10, 0.20, 0.30, 0.40, 0.60, 0.80]) # Median values for figure 26 - TEMP = array([-999, 32.0, 40.0, 50.0, 70.0, 80.0, 90.0, 100.0]) # Temperatures for lookup in Figure 26 - - ferror = 0 - d50err = 0 - hrerr = 0 - velerr = 0 - - id501 = 0 - id502 = 0 - id1 = 0 - iv1 = 0 - it1 = 0 - if not 0.80 >= db50 >= 0.10: # D50G limits - ferror = 1 - d50err = 1 - return 0.0, ferror, d50err, hrerr, velerr - for id501, db50x in enumerate(D50G): - if db50x > db50: - break - id501 -= 1 - id502 = id501 + 1 - zz1 = log10(D50G[id501]) - zz2 = log10(D50G[id502]) - zzratio = (log10(db50) - zz1) / (zz2 - zz1) - - if not 100.0 >= fhrad >= 0.10: # DG limits - ferror = 1 - hrerr = 1 - return 0.0, ferror, d50err, hrerr, velerr - for id1,dgx in enumerate(DG): - if fhrad > dgx: - break - id1 = id1 + 1 - id2 = id1 + 1 - xx1 = log10(DG[id1]) - xx2 = log10(DG[id2]) - xxratio = (log10(fhrad) - xx1) / ((xx2 - xx1)) - - if not 10.0 >= v >= 1.0: # VG limits - ferror = 1 - velerr = 1 - return 0.0, ferror, d50err, hrerr, velerr - for iv1, vx in enumerate(VG): - if vx > v: - break - iv1 -= 1 - iv2 = iv1 + 1 - yy1 = log10(VG[iv1]) - yy2 = log10(VG[iv2]) - yyratio = (log10(v) - yy1) / (yy2 - yy1) - - tmpr = min(100.0, max(32.0, tempr * 1.8 + 32.0)) - - x = zeros((3,3)) - xa= zeros(3) - xg= zeros(3) - for i,i1 in [(1, id1), (2, id2)]: # DO 200 I= 1,2; I1 = II(I) - for j, j1 in [(1, iv1), (2, iv2)]: # DO 190 J= 1,2; J1 = JJ(J) - for k, k1 in [(1, id501), (2, id502)]: # DO 180 K= 1,2; K1 = KK(K) - if G[i1,j1,k1] > 0.0: - x[j,k] = log10(G[i1,j1,k1]) - else: - for j3 in range(j1,8): # DO 140 J3= J1,7 - if G[i1,j3,k1] > 0.0: - break - x[j,k] = log10(G[i1,j3,k1]) + (log10(VG[j1] / VG[j3])) * (log10(G[i1,j3+1,k1] / G[i1,j3,k1])) / (log10(VG[j3+1] / VG[j3])) - - xa[1] = x[1,1] + (x[1,2] - x[1,1]) * zzratio - xa[2] = x[2,1] + (x[2,2] - x[2,1]) * zzratio - xn3 = xa[2] - xa[1] - xg[i] = xa[1] + xn3 * yyratio - - xn4 = xg[2] - xg[1] - gtuc = 10.0**(xg[1] + (xn4 * xxratio)) # uncorrected gt in lb/sec/ft - - # Adjustment coefficient for temperature - if abs(tmpr - 60.0) <= 1.0e-5: - cft = 1.0 - else: - for it1, tempx in enumerate(TEMP): - if tempx > tmpr: - break - it2 = it1 - it1 -= 1 - - xt11 = log10(T[it1][id1]) - xt21 = log10(T[it2][id1]) - xt12 = log10(T[it1][id2]) - xt22 = log10(T[it2][id2]) - - xnt = log10(tmpr / TEMP[it1]) / log10(TEMP[it2] / TEMP[it1]) - xct1 = xt11 + xnt * (xt21 - xt11) - xct2 = xt12 + xnt * (xt22 - xt12) - cft = 10.0**(xct1 + (xct2 - xct1) * xxratio) - - # fine sediment load correction; (i.e. cohesive sediment or wash) load in mg/liter - if fsl <= 10.0: - cff = 1.0 - else: - for id1, dfx in enumerate(DF): - if dfx > fhrad: - break - id2 = id1 + 1 - - if1 = 0 - if fsl > 1.0E+4: - if1 = 4 - if2 = 5 - ERRMSG = '***** SUBROUTINE COLBY -- FSL WENT > 1.E+4' - else: - for if1, cfx in enumerate(CF): - if cfx > fsl: - break - if2 = if1 + 1 - - xf11 = log10(F[if1,id1]) - xf22 = log10(F[if2,id2]) - xf12 = log10(F[if1,id2]) - xf21 = log10(F[if2,id1]) - - xnt = (fsl - CF[if1]) / (CF[if2] - CF[if1]) - xct1 = xf11 + xnt * (xf21 - xf11) - xct2 = xf12 + xnt * (xf22 - xf12) - xnt = log10(fhrad / DF[id1]) / log10(DF[id2] / DF[id1]) - cff = 10.0**(xct1 + xnt * (xct2 - xct1)) - tcf = cft * cff - 1.0 - - # Percent effect correction for median diameter''' - if 0.30 >= db50 >= 0.20: - cfd = 1.0 - else: - for ip1, db50x in enumerate(DP): - if db50x > db50: - break - ip2 = ip1 + 1 - - p1 = log10(P[ip1]) - p2 = log10(P[ip2]) - xnt = log10(db50 / DP[ip1]) / log10(DP[ip2] / DP[ip1]) - - cfd = 10.0**(p1 + xnt * (p2 -p1)) - - return gtuc * (cfd * tcf + 1.0), ferror, d50err, hrerr, velerr - - -@njit(cache=True) -def toffaleti(v, fdiam, fhrad, slope, tempr, vset): - ''' Toffaleti's method to calculate the capacity of the flow to transport sand.''' - - tmpr = tempr * 1.80 + 32.0 # degrees c to degrees f - - # For water temperatures greater than 32f and less than 100f the kinematic viscosity is - vis = 4.106e-4 * tmpr**-0.864 - - # Assuming the d50 grain size is approximately equal to the Geometric mean grain size - # and sigma-g = 1.5, the d65 grain size can be determined as 1.17*d50. - d65 = 1.17 * fdiam - cnv = 0.1198 + 0.00048 * tmpr - cz = 260.67 - 0.667 * tmpr - tt = 1.10 * (0.051 + 0.00009 * tmpr) - zi = vset * v / (cz * fhrad * slope) - if zi < cnv: - zi = 1.5 * cnv - - # The manning-strickler equation is used here to Determine the hydraulic radius - # component due to Grain roughness (r'). Taken from the 1975 asce - # "sedimentation engineering",pg. 128. - rprime = ((v**1.5) * (d65**0.25) / (slope**0.75)) * 0.00349 - ustar = (rprime * slope * 32.2)**0.5 - - afunc = (vis * 1.0e5)**0.333 / (10.0 * ustar) - if afunc <= 0.500: ac = (afunc / 4.89)**-1.45 - elif afunc <= 0.660: ac = (afunc / 0.0036)**0.67 - elif afunc <= 0.720: ac = (afunc / 0.29)**4.17 - elif afunc <= 1.25: ac = 48.0 - elif afunc > 1.25: ac = (afunc / 0.304)**2.74 - - k4func = afunc * slope * d65 * 1.0e5 - if k4func <= 0.24: k4 = 1.0 - elif k4func <= 0.35: k4 = (k4func**1.10) * 4.81 - elif k4func > 0.35: k4 = (k4func** (-1.05)) * 0.49 - - ack4 = ac * k4 - if ack4 - 16.0 < 0.0: - ack4 = 16.0 - k4 = 16.0 / ac - oczu = 1.0 + cnv - 1.5 * zi - oczm = 1.0 + cnv - zi - oczl= 1.0 + cnv - 0.756 * zi - zinv = cnv - 0.758 * zi - zm = -zinv - zn = 1.0 + zinv - zo = -0.736 * zi - zp = 0.244 * zi - zq = 0.5 * zi - - # Cli has been multiplied by 1.0e30 to keep it from Exceeding the computer overflow limit - cli = (5.6e+22 * oczl * (v**2.333) / fhrad**(zm) / ((tt * ac * k4 * fdiam)**1.667) - / (1.0 + cnv) / ((fhrad / 11.24)**(zn) - (2.0 * fdiam)**oczl)) - p1 = (2.0 * fdiam / fhrad)**(zo / 2.0) - c2d = cli * p1 * p1 / 1.0e+30 - - # Check to see if the calculated value is reasonable (< 100.0), and adjust it if it is not. - if c2d > 100.0: - cli = cli * 100.0 / c2d - cmi = 43.2 * cli * (1.0 + cnv) * v * (fhrad**zm) # Cmi has been multiplied by 1.0e30 to keep it from computer overflow - - # upper layer transport capacity - fd11 = fhrad / 11.24 - fd25 = fhrad / 2.5 - gsu = (cmi * (fd11**zp) * (fd25**zq) * (fhrad**oczu - (fd25**oczu)))/(oczu * 1.0e+30) - - gsm = (cmi * (fd11**zp) * (fd25**(oczm) - (fd11**oczm))) / (oczm * 1.0e+30) # middle layer transport capacity - gsl = (cmi * ((fd11**(zn)) - ((2.0 * fdiam)**(oczl)))) / (oczl * 1.0e+30) # lower layer transport capacity - gsb = (cmi * ((2.0 * fdiam)**(zn))) / 1.0e+30 # bed layer transport capacity - - return max(0.0, gsu + gsm + gsl + gsb) # Total transport capacity of the rchres (tons/day/ft) - - -def expand_SEDTRN_masslinks(flags, uci, dat, recs): - if flags['SEDTRN']: - # ISED1 - rec = {} - rec['MFACTOR'] = dat.MFACTOR - rec['SGRPN'] = 'SEDTRN' - if dat.SGRPN == "ROFLOW": - rec['SMEMN'] = 'ROSED' - rec['SMEMSB1'] = '1' - rec['SMEMSB2'] = '' - else: - rec['SMEMN'] = 'OSED' - rec['SMEMSB1'] = '1' - rec['SMEMSB2'] = dat.SMEMSB1 - rec['TMEMN'] = 'ISED1' - rec['TMEMSB1'] = dat.TMEMSB1 - rec['TMEMSB2'] = dat.TMEMSB2 - rec['SVOL'] = dat.SVOL - recs.append(rec) - # ISED2 - rec = {} - rec['MFACTOR'] = dat.MFACTOR - rec['SGRPN'] = 'SEDTRN' - if dat.SGRPN == "ROFLOW": - rec['SMEMN'] = 'ROSED' - rec['SMEMSB1'] = '2' - rec['SMEMSB2'] = '' - else: - rec['SMEMN'] = 'OSED' - rec['SMEMSB1'] = '2' - rec['SMEMSB2'] = dat.SMEMSB1 - rec['TMEMN'] = 'ISED2' - rec['TMEMSB1'] = dat.TMEMSB1 - rec['TMEMSB2'] = dat.TMEMSB2 - rec['SVOL'] = dat.SVOL - recs.append(rec) - # ISED3 - rec = {} - rec['MFACTOR'] = dat.MFACTOR - rec['SGRPN'] = 'SEDTRN' - if dat.SGRPN == "ROFLOW": - rec['SMEMN'] = 'ROSED' - rec['SMEMSB1'] = '3' - rec['SMEMSB2'] = '' - else: - rec['SMEMN'] = 'OSED' - rec['SMEMSB1'] = '3' - rec['SMEMSB2'] = dat.SMEMSB1 - rec['TMEMN'] = 'ISED3' - rec['TMEMSB1'] = dat.TMEMSB1 - rec['TMEMSB2'] = dat.TMEMSB2 - rec['SVOL'] = dat.SVOL - recs.append(rec) +''' Copyright (c) 2020 by RESPEC, INC. +Authors: Robert Heaphy, Ph.D. and Paul Duda +License: LGPL2 +''' + +from numpy import array, zeros, where, int64, arange +from math import log10, exp +from numba import njit +from HSP2.ADCALC import advect +from HSP2.utilities import make_numba_dict + +# the following imports added to handle special actions +from HSP2.om import * +from HSP2.om_model_linkage import * + +ERRMSGS =('SEDTRN: Warning -- bed storage of sediment size fraction sand is empty', #ERRMSG0 + 'SEDTRN: Warning -- bed storage of sediment size fraction silt is empty', #ERRMSG1 + 'SEDTRN: Warning -- bed storage of sediment size fraction clay is empty', #ERRMSG2 + 'SEDTRN: Warning -- bed depth appears excessive', #ERRMSG3 + 'SEDTRN: Fatal error ocurred in colby method- variable outside valid range- switching to toffaleti method', #ERRMSG4 + 'SEDTRN: Simulation of sediment requires all 3 "auxiliary flags" (AUX1FG, etc) in section HYDR must be turned on', #ERRMSG5 + 'SEDTRN: When specifying the initial composition of the bed, the fraction of sand, silt, and clay must sum to a value close to 1.0.') #ERRMSG6 + +def sedtrn(io_manager, siminfo, uci, ts, state): + ''' Simulate behavior of inorganic sediment''' + + # simlen = siminfo['steps'] + # delt = siminfo['delt'] + delt60 = siminfo['delt'] / 60 + delts = siminfo['delt'] * 60 + uunits = siminfo['units'] + + advectData = uci['advectData'] + (nexits, vol, VOL, SROVOL, EROVOL, SOVOL, EOVOL) = advectData + + ts['VOL'] = VOL + ts['SROVOL'] = SROVOL + ts['EROVOL'] = EROVOL + for i in range(nexits): + ts['SOVOL' + str(i + 1)] = SOVOL[:, i] + ts['EOVOL' + str(i + 1)] = EOVOL[:, i] + + ui = make_numba_dict(uci) + ui['simlen'] = siminfo['steps'] + ui['uunits'] = siminfo['units'] + ui['vol'] = vol + ui['delts'] = siminfo['delt'] * 60 + ui['delt60'] = siminfo['delt'] / 60 + ui['errlen'] = len(ERRMSGS) + + ui_silt = uci['SILT'] + if uunits == 1: + ui['silt_d'] = ui_silt['D'] * 0.0833 + ui['silt_w'] = ui_silt['W'] * delts * 0.0254 # convert settling velocity from m/sec to m/ivl + else: + ui['silt_d'] = ui_silt['D'] * 0.001 + ui['silt_w'] = ui_silt['W'] * delts * 0.001 # convert settling velocity from m/sec to m/ivl + ui['silt_rho'] = ui_silt['RHO'] + ui['silt_taucd'] = ui_silt['TAUCD'] + ui['silt_taucs'] = ui_silt['TAUCS'] + ui['silt_m'] = ui_silt['M'] * delt60 / 24.0 * 4.880 # convert erodibility coeff from /day to /ivl + + ui_clay = uci['CLAY'] + if uunits == 1: + ui['clay_d'] = ui_clay['D'] * 0.0833 + ui['clay_w'] = ui_clay['W'] * delts * 0.0254 # convert settling velocity from m/sec to m/ivl + else: + ui['clay_d'] = ui_clay['D'] * 0.001 + ui['clay_w'] = ui_clay['W'] * delts * 0.001 # convert settling velocity from m/sec to m/ivl + ui['clay_rho'] = ui_clay['RHO'] + ui['clay_taucd'] = ui_clay['TAUCD'] + ui['clay_taucs'] = ui_clay['TAUCS'] + ui['clay_m'] = ui_clay['M'] * delt60 / 24.0 * 4.880 # convert erodibility coeff from /day to /ivl + + ####################################################################################### + # the following section (1 of 3) added to SEDTRN by pbd to handle special actions + ####################################################################################### + # state_info is some generic things about the simulation + # must be numba safe, so we don't just pass the whole state which is not + state_info = Dict.empty(key_type=types.unicode_type, value_type=types.unicode_type) + state_info['operation'], state_info['segment'], state_info['activity'] = state['operation'], state['segment'], state['activity'] + state_info['domain'], state_info['state_step_hydr'], state_info['state_step_om'] = state['domain'], state['state_step_hydr'], state['state_step_om'] + # hsp2_local_py = state['hsp2_local_py'] + # # It appears necessary to load this here, instead of from main.py, otherwise, + # # _hydr_() does not recognize the function state_step_hydr()? + # if (hsp2_local_py != False): + # from hsp2_local_py import state_step_hydr + # else: + # from HSP2.state_fn_defaults import state_step_hydr + # must split dicts out of state Dict since numba cannot handle mixed-type nested Dicts + # initialize the sedtrn paths in case they don't already reside here + sedtrn_init_ix(state, state['domain']) + state_ix, dict_ix, ts_ix = state['state_ix'], state['dict_ix'], state['ts_ix'] + state_paths = state['state_paths'] + op_tokens = state['op_tokens'] + # Aggregate the list of all SEDTRN end point dependencies + ep_list = ['RSED4', 'RSED5', 'RSED6'] + model_exec_list = model_domain_dependencies(state, state_info['domain'], ep_list) + model_exec_list = np.asarray(model_exec_list, dtype="i8") # format for use in numba + ####################################################################################### + + ############################################################################ + errors = _sedtrn_(ui, ts, state_info, state_paths, state_ix, dict_ix, ts_ix, op_tokens, model_exec_list) # run SEDTRN simulation code + ############################################################################ + + if nexits > 1: + u = uci['SAVE'] + key1 = 'OSED1' + key2 = 'OSED2' + key3 = 'OSED3' + key4 = 'OSED4' + for i in range(nexits): + u[f'{key1}{i + 1}'] = u[key1] + u[f'{key2}{i + 1}'] = u[key2] + u[f'{key3}{i + 1}'] = u[key3] + u[f'{key4}{i + 1}'] = u[key4] + del u[key1] + del u[key2] + del u[key3] + del u[key4] + + return errors, ERRMSGS + +@njit(cache=True) +def _sedtrn_(ui, ts, state_info, state_paths, state_ix, dict_ix, ts_ix, op_tokens, model_exec_list): + ''' Simulate behavior of inorganic sediment''' + errorsV = zeros(int(ui['errlen'])).astype(int64) + + simlen = int(ui['simlen']) + uunits = int(ui['uunits']) + delts = ui['delts'] + delt60 = ui['delt60'] + + AFACT = 43560.0 + if uunits == 2: + # si units conversion + AFACT = 1000000.0 + vol = ui['vol'] * AFACT + + svol = vol + nexits = int(ui['NEXITS']) + + # table SANDFG + sandfg = ui['SANDFG'] # 1: Toffaleti method, 2:Colby method, 3:old HSPF power function + + if ui['AUX3FG'] == 0: + errorsV[5] += 1 # error - sediment transport requires aux3fg to be on + + # table SED-GENPARM + bedwid = ui['BEDWID'] + bedwrn = ui['BEDWRN'] + por = ui['POR'] + + # table SED-HYDPARM + if uunits == 1: + len_ = ui['LEN'] * 5280 + db50 = ui['DB50'] * 0.0833 + else: + len_ = ui['LEN'] * 1000 + db50 = ui['DB50'] * 0.001 + delth = ui['DELTH'] + + # evaluate some quantities used in colby and/or toffaleti sand transport simulation methods + if uunits == 1: + db50e = db50 + db50m = db50 * 304.8 + else: + db50e = db50 * 3.28 + db50m = db50 * 1000.0 + slope = delth / len_ + + # SAND PARAMETERS; table SAND-PM + if uunits == 1: + sand_d = ui['D'] * 0.0833 + sand_w = ui['W'] * delts * 0.0254 # convert settling velocity from m/sec to m/ivl + else: + sand_d = ui['D'] * 0.001 + sand_w = ui['W'] * delts * 0.001 # convert settling velocity from m/sec to m/ivl + sand_rho = ui['RHO'] + sand_ksand = ui['KSAND'] + sand_expsnd = ui['EXPSND'] + + # SILT PARAMETERS; table SILT-CLAY-PM --- note: first occurance is silt + silt_d = ui['silt_d'] + silt_w = ui['silt_w'] + silt_rho = ui['silt_rho'] + silt_taucd = ui['silt_taucd'] + silt_taucs = ui['silt_taucs'] + silt_m = ui['silt_m'] + + # CLAY PARAMETERS; table SILT-CLAY-PM --- note: second occurance is clay + clay_d = ui['clay_d'] + clay_w = ui['clay_w'] + clay_rho = ui['clay_rho'] + clay_taucd = ui['clay_taucd'] + clay_taucs = ui['clay_taucs'] + clay_m = ui['clay_m'] + + # bed sediment conditions; table BED-INIT + beddep = ui['BEDDEP'] + sand_bedfr = ui['SANDFR'] + silt_bedfr = ui['SILTFR'] + clay_bedfr = ui['CLAYFR'] + total_bedfr = sand_bedfr + silt_bedfr + clay_bedfr + if abs(total_bedfr - 1.0) > 0.01: + errorsV[6] += 1 # error message: sum of bed sediment fractions is not close enough to 1.0 + + # suspended sediment concentrations; table ssed-init + sand_ssed1 = ui['SSED1'] + silt_ssed2 = ui['SSED2'] + clay_ssed3 = ui['SSED3'] + total_ssed4 = sand_ssed1 + silt_ssed2 + clay_ssed3 + + # get input time series- inflow of sediment is in units of mg.ft3/l.ivl (english) or mg.m3/l.ivl (metric) + TAU = ts['TAU'] + AVDEP = ts['AVDEP'] + AVVEL = ts['AVVEL'] + RO = ts['RO'] + HRAD = ts['HRAD'] + TWID = ts['TWID'] + + if not 'ISED1' in ts: + ts['ISED1'] = zeros(simlen) + if not 'ISED2' in ts: + ts['ISED2'] = zeros(simlen) + if not 'ISED3' in ts: + ts['ISED3'] = zeros(simlen) + + ISED1 = ts['ISED1'] # if present, else ISED is identically zero; sand + ISED2 = ts['ISED2'] # if present, else ISED is identically zero; silt + ISED3 = ts['ISED3'] # if present, else ISED is identically zero; clay + ISED4 = ISED1 + ISED2 + ISED3 + + htfg = int(ui['HTFG']) + if htfg == 1: + TW = ts['TW'] + if htfg == 0 and sandfg != 3: + TW = ts['TW'] + TW = where(TW < -100.0, 20.0, TW) + + # preallocate storage for computed time series + # WASH = ts['WASH'] = zeros(simlen) # washload concentration, state variable + # SAND = ts['SAND'] = zeros(simlen) # sandload oncentration, state variable + # BDSAND = ts['BDSAND'] = zeros(simlen) # bed storage of sand, state variable + # SDCF1_11 = ts['WASH'] = zeros(simlen) # deposition of washload on bed + # SDCF1_21 = ts['WASH'] = zeros(simlen) # total outflow of washload from RCHRES + # SDCF1_12 = ts['WASH'] = zeros(simlen) # exchange of sand between bed and suspended storage + # SDCF1_22 = ts['WASH'] = zeros(simlen) # total outflow of sandload from rchres + # SDCF2_1 = ts['SDCF2_1'] = zeros((simlen, nexits)) # washload outflow by gate + # SDCF2_2 = ts['SDCF2_2'] = zeros((simlen, nexits)) # sandload outflow by gate + # ossand = zeros(nexits) + SSED1 = ts['SSED1'] = zeros(simlen) # suspended sand concentration + SSED2 = ts['SSED2'] = zeros(simlen) # suspended silt concentration + SSED3 = ts['SSED3'] = zeros(simlen) # suspended clay concentration + SSED4 = ts['SSED4'] = zeros(simlen) # suspended sediment concentration + RSED1 = ts['RSED1'] = zeros(simlen) # sediment storages - suspended sand + RSED2 = ts['RSED2'] = zeros(simlen) # sediment storages - suspended silt + RSED3 = ts['RSED3'] = zeros(simlen) # sediment storages - suspended clay + RSED4 = ts['RSED4'] = zeros(simlen) # sediment storages - bed sand + RSED5 = ts['RSED5'] = zeros(simlen) # sediment storages - bed silt + RSED6 = ts['RSED6'] = zeros(simlen) # sediment storages - bed clay + RSED7 = ts['RSED7'] = zeros(simlen) # sediment storages - total sand + RSED8 = ts['RSED8'] = zeros(simlen) # sediment storages - total silt + RSED9 = ts['RSED9'] = zeros(simlen) # sediment storages - total clcay + RSED10 = ts['RSED10'] = zeros(simlen) # sediment storages - total sand silt clay + TSED1 = ts['TSED1'] = zeros(simlen) # Total sediment storages by fraction + TSED2 = ts['TSED2'] = zeros(simlen) # Total sediment storages by fraction + TSED3 = ts['TSED3'] = zeros(simlen) # Total sediment storages by fraction + BEDDEP= ts['BEDDEP']= zeros(simlen) # Bed depth + DEPSCR1 = ts['DEPSCR1'] = zeros(simlen) # Deposition (positive) or scour (negative) - sand + DEPSCR2 = ts['DEPSCR2'] = zeros(simlen) # Deposition (positive) or scour (negative) - silt + DEPSCR3 = ts['DEPSCR3'] = zeros(simlen) # Deposition (positive) or scour (negative) - clay + DEPSCR4 = ts['DEPSCR4'] = zeros(simlen) # Deposition (positive) or scour (negative) - total + ROSED1 = ts['ROSED1'] = zeros(simlen) # Total outflows of sediment from the rchres - sand + ROSED2 = ts['ROSED2'] = zeros(simlen) # Total outflows of sediment from the rchres - silt + ROSED3 = ts['ROSED3'] = zeros(simlen) # Total outflows of sediment from the rchres - clay + ROSED4 = ts['ROSED4'] = zeros(simlen) # Total outflows of sediment from the rchres - total + OSED1 = zeros((simlen, nexits)) + OSED2 = zeros((simlen, nexits)) + OSED3 = zeros((simlen, nexits)) + OSED4 = zeros((simlen, nexits)) + + fact = 1.0 / total_bedfr # normalize fractions to sum to one + sand_bedfr *= fact + silt_bedfr *= fact + clay_bedfr *= fact + rhomn = sand_bedfr * sand_rho + silt_bedfr * silt_rho + clay_bedfr * clay_rho + + volsed = len_ * bedwid * beddep * (1.0 - por) # total volume of sediment particles- ft3 or m3 + rwtsed = volsed * rhomn # total weight relative to water- rhomn is in parts/part (same as kg/l) + rwtsed = rwtsed * 1.0E06 # converts from kg/l to mg/l + + # find the weight of each fraction- units are (mg/l)*ft3 or (mg/l)*m3 + sand_wt_rsed4 = sand_bedfr * rwtsed + silt_wt_rsed5 = silt_bedfr * rwtsed + clay_wt_rsed6 = clay_bedfr * rwtsed + + # find the total quantity (bed and suspended) of each sediment size fraction + sand_rsed1 = sand_ssed1 * vol + sand_rssed1 = sand_t_rsed7 = sand_rsed1 + sand_wt_rsed4 + + silt_rsed2 = silt_ssed2 * vol + silt_rssed2 = silt_t_rsed8 = silt_rsed2 + silt_wt_rsed5 + + clay_rsed3 = clay_ssed3 * vol + clay_rssed3 = clay_t_rsed9 = clay_rsed3 + clay_wt_rsed6 + + tsed1 = sand_rsed1 + silt_rsed2 + clay_rsed3 + tsed2 = sand_wt_rsed4 + silt_wt_rsed5 + clay_wt_rsed6 + tsed3 = total_rsed10 = sand_t_rsed7 + silt_t_rsed8 + clay_t_rsed9 + + wsande = sand_w * 3.28 / delts # convert fall velocity from m/ivl to ft/sec + + VOL = ts['VOL'] + SROVOL = ts['SROVOL'] + EROVOL = ts['EROVOL'] + SOVOL = zeros((simlen, nexits)) + EOVOL = zeros((simlen, nexits)) + for i in range(nexits): + SOVOL[:, i] = ts['SOVOL' + str(i + 1)] + EOVOL[:, i] = ts['EOVOL' + str(i + 1)] + + #################### END PSED + + ####################################################################################### + # the following section (2 of 3) added by pbd to SEDTRN, this one to prepare for special actions + ####################################################################################### + sedtrn_ix = sedtrn_get_ix(state_ix, state_paths, state_info['domain']) + # these are integer placeholders faster than calling the array look each timestep + rsed4_ix, rsed5_ix, rsed6_ix = sedtrn_ix['RSED4'], sedtrn_ix['RSED5'], sedtrn_ix['RSED6'] + ####################################################################################### + + for loop in range(simlen): + + ####################################################################################### + # the following section (3 of 3) added by pbd to accommodate special actions + ####################################################################################### + # set state_ix with value of local state variables and/or needed vars + state_ix[rsed4_ix] = sand_wt_rsed4 + state_ix[rsed5_ix] = silt_wt_rsed5 + state_ix[rsed6_ix] = clay_wt_rsed6 + if (state_info['state_step_om'] == 'enabled'): + step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, loop) # traditional 'ACTIONS' done in here + # Do write-backs for editable STATE variables + sand_wt_rsed4 = state_ix[rsed4_ix] + silt_wt_rsed5 = state_ix[rsed5_ix] + clay_wt_rsed6 = state_ix[rsed6_ix] + ####################################################################################### + + # perform any necessary unit conversions + if uunits == 2: # uci is in metric units + avvele = AVVEL[loop] * 3.28 + avdepm = AVDEP[loop] + avdepe = AVDEP[loop] * 3.28 + rom = RO[loop] + hrade = HRAD[loop] * 3.28 + twide = TWID[loop] * 3.28 + ised1 = ISED1[loop] / 2.83E-08 + ised2 = ISED2[loop] / 2.83E-08 + ised3 = ISED3[loop] / 2.83E-08 + else: # uci is in english units + avvele = AVVEL[loop] + avdepm = AVDEP[loop] * 0.3048 + avdepe = AVDEP[loop] + rom = RO[loop] * 0.0283 + hrade = HRAD[loop] + twide = TWID[loop] + ised1 = ISED1[loop] / 3.121E-08 + ised2 = ISED2[loop] / 3.121E-08 + ised3 = ISED3[loop] / 3.121E-08 + tau = TAU[loop] + tw = TW[loop] + tw = (tw - 32.0) * 0.5555 + + # Following is routine #&COHESV() to simulate behavior of cohesive sediments (silt and clay) + # compute bed fractions based on relative storages + totbed = silt_wt_rsed5 + clay_wt_rsed6 + frcsed1 = silt_wt_rsed5 / totbed if totbed > 0.0 else 0.5 + frcsed2 = clay_wt_rsed6 / totbed if totbed > 0.0 else 0.5 + + vol = VOL[loop] * AFACT + srovol = SROVOL[loop] + erovol = EROVOL[loop] + sovol = SOVOL[loop, :] + eovol = EOVOL[loop, :] + silt_ssed2, rosed2, osed2 = advect(ised2, silt_ssed2, nexits, svol, vol, srovol, erovol, sovol, eovol) + silt_rsed2 = silt_ssed2 * vol # calculate exchange between bed and suspended sediment + # vols = svol + + # consider deposition and scour + if avdepe > 0.17: + depscr2, silt_rsed2, silt_wt_rsed5 = bdexch(avdepm, silt_w, tau, silt_taucd, silt_taucs, silt_m, vol, frcsed1, silt_rsed2, silt_wt_rsed5) + else: + depscr2 = 0.0 + silt_ssed2 = silt_rsed2 / vol if vol > 0.0 else -1.0e30 + + clay_ssed3, rosed3, osed3 = advect(ised3, clay_ssed3, nexits, svol, vol, srovol, erovol, sovol, eovol) + clay_rsed3 = clay_ssed3 * vol # calculate exchange between bed and suspended sediment + + # consider deposition and scour + if avdepe > 0.17: + depscr3, clay_rsed3, clay_wt_rsed6 = bdexch(avdepm, clay_w, tau, clay_taucd, clay_taucs, clay_m, vol, frcsed2, clay_rsed3, clay_wt_rsed6) + else: + depscr3 = 0.0 + clay_ssed3 = clay_rsed3 / vol if vol > 0.0 else -1.0e30 + # end COHESV() + + # compute fine sediment load + fsl = silt_ssed2 + clay_ssed3 + ksand = sand_ksand + expsnd = sand_expsnd + + # simulate sandload. done after washload because washload affects sand transport if the colby method is used + # Following code is #$SANDLD() + sands = sand_ssed1 # save starting concentration value + if vol > 0.0: # rchres contains water + if rom > 0.0 and avdepe > 0.17: # there is outflow from the rchres- perform advection + # calculate potential value of sand + if sandfg == 1: # case 1 toffaleti equation + gsi = toffaleti(avvele, db50e, hrade, slope, tw, wsande) + psand = (gsi * twide * 10.5) / rom # convert potential sand transport rate to a concentration in mg/l + elif sandfg == 2: # case 2 colby equation + gsi, ferror, d50err, hrerr, velerr = colby(avvele, db50m, hrade, fsl, tw) + if ferror == 1: + pass # ERRMSG: fatal error ocurred in colby method- one or more variables went outside valid range- warn and switch to toffaleti method + gsi = toffaleti(avvele, db50e, hrade, slope, tw, wsande) # switch to toffaleti method + psand = (gsi * twide * 10.5) / rom # convert potential sand transport rate to conc in mg/l + elif sandfg == 3: # case 3 input power function + psand = ksand * avvele**expsnd + + prosnd = (sands * srovol) + (psand * erovol) # calculate potential outflow of sand during ivl + pscour = (vol * psand) - (svol * sands) + prosnd - ised1 # qty.vol/l.ivl # calculate potential bed scour from, or to deposition + if pscour < sand_wt_rsed4: # potential scour is satisfied by bed storage; + # new conc. of sandload is potential conc. + scour = pscour + sand_ssed1 = psand + sand_rsed1 = sand_ssed1 * vol + sand_wt_rsed4 -= scour + else: # potential scour cannot be satisfied; all of the available bed storage is scoured + scour = sand_wt_rsed4 + sand_wt_rsed4 = 0.0 + sand_ssed1 = (ised1 + scour + sands * (svol - srovol)) / (vol + erovol) # calculate new conc. of suspended sandload + sand_rsed1 = sand_ssed1 * vol # calculate new storage of suspended sandload + rosed1 = (srovol * sands) + (erovol * sand_ssed1) # calculate total amount of sand leaving rchres during ivl + osed1 = sovol * sands + eovol * sand_ssed1 # calculate amount of sand leaving through each exit gate in qty.vol/l.ivl + else: # no outflow (still water) or water depth less than two inches + sand_ssed1 = 0.0 + sand_rsed1 = 0.0 + scour = -ised1 - (sands * svol) + sand_wt_rsed4 -= scour + rosed1 = 0.0 + osed1 = zeros(nexits) + else: # rchres is dry; set sand equal to an undefined number + sand_ssed1 = -1.0e30 + sand_rsed1 = 0.0 + scour = -ised1 - (sands * svol) # calculate total amount of sand settling out during interval; this is equal to sand inflow + sand initially present + sand_wt_rsed4 -= scour # update bed storage + rosed1 = 0.0 + osed1 = zeros(nexits) + depscr1 = -scour # calculate depth of bed scour or deposition; positive for deposition + # end SANDLD() + + # set small concentrations to zero + if abs(sand_ssed1) < 1.0e-15: # small conc., set to zero + if depscr1 > 0.0: # deposition has occurred, add small storage to deposition + depscr1 += sand_rsed1 + sand_wt_rsed4 += sand_rsed1 + else: # add small storage to outflow + rosed1 += sand_rsed1 + depscr1 = 0.0 + if nexits > 1: + for n in range(0,nexits): + if osed1[n] > 0.0: + osed1[n] += sand_rsed1 + break + sand_rsed1 = 0.0 + sand_ssed1 = 0.0 + + if abs(silt_ssed2) < 1.0e-15: # small conc., set to zero + if depscr2 > 0.0: # deposition has occurred, add small storage to deposition + depscr2 += silt_rsed2 + silt_wt_rsed5 += silt_rsed2 + else: # add small storage to outflow + rosed2 += silt_rsed2 + depscr2 = 0.0 + if nexits > 1: + for n in range(0, nexits): + if osed2[n] > 0.0: + osed2[n] += silt_rsed2 + break + silt_rsed2 = 0.0 + silt_ssed2 = 0.0 + + if abs(clay_ssed3) < 1.0e-15: # small conc., set to zero + if depscr3 > 0.0: # deposition has occurred, add small storage to deposition + depscr3 += clay_rsed3 + clay_wt_rsed6 += clay_rsed3 + else: # add small storage to outflow + rosed3 += clay_rsed3 + depscr3 = 0.0 + if nexits > 1: + for n in range(0, nexits): + if osed3[n] > 0.0: + osed3[n] += clay_rsed3 + break + clay_rsed3 = 0.0 + clay_ssed3 = 0.0 + + osed4 = zeros(nexits) + # calculate total quantity of material in suspension and in the bed; check bed conditions + osed4 += osed1 + sand_rssed1 = sand_t_rsed7 = sand_rsed1 + sand_wt_rsed4 # total storage in mg.vol/l + if sand_wt_rsed4 == 0.0: # warn that bed is empty + # errmsg + errorsV[0] += 1 # The bed storage of sediment size fraction sand is empty. + + osed4 += osed2 + silt_rssed2 = silt_t_rsed8 = silt_rsed2 + silt_wt_rsed5 # total storage in mg.vol/l + if silt_wt_rsed5 == 0.0: # warn that bed is empty + # errmsg + errorsV[1] += 1 # The bed storage of sediment size fraction silt is empty. + + osed4 += osed3 + clay_rssed3 = clay_t_rsed9 = clay_rsed3 + clay_wt_rsed6 # total storage in mg.vol/l + if clay_wt_rsed6 == 0.0: # warn that bed is empty + # errmsg + errorsV[2] += 1 # The bed storage of sediment size fraction clay is empty. + + # find the volume occupied by each fraction of bed sediment- ft3 or m3 + volsed = (sand_wt_rsed4 / (sand_rho * 1.0e06) + + silt_wt_rsed5 / (silt_rho * 1.0e06) + + clay_wt_rsed6 / (clay_rho * 1.0e06)) + + total_ssed4 = sand_ssed1 + silt_ssed2 + clay_ssed3 + tsed1 = sand_rsed1 + silt_rsed2 + clay_rsed3 + tsed2 = sand_wt_rsed4 + silt_wt_rsed5 + clay_wt_rsed6 + tsed3 = total_rsed10 = sand_t_rsed7 + silt_t_rsed8 + clay_t_rsed9 + depscr4 = depscr1 + depscr2 + depscr3 + rosed4 = rosed1 + rosed2 + rosed3 + + # find total depth of sediment + volsed = volsed / (1.0 - por) # allow for porosit + beddep = volsed / (len_ * bedwid) # calculate thickness of bed- ft or m + if beddep > bedwrn: + # Errormsg: warn that bed depth appears excessive + errorsV[3] += 1 + + svol = vol # svol is volume at start of time step, update for next time thru + + SSED1[loop] = sand_ssed1 + SSED2[loop] = silt_ssed2 + SSED3[loop] = clay_ssed3 + SSED4[loop] = total_ssed4 + BEDDEP[loop]= beddep + if uunits == 1: + RSED1[loop] = sand_rsed1 * 3.121E-08 + RSED2[loop] = silt_rsed2 * 3.121E-08 + RSED3[loop] = clay_rsed3 * 3.121E-08 + RSED4[loop] = sand_wt_rsed4 * 3.121E-08 + RSED5[loop] = silt_wt_rsed5 * 3.121E-08 + RSED6[loop] = clay_wt_rsed6 * 3.121E-08 + RSED7[loop] = sand_t_rsed7 * 3.121E-08 + RSED8[loop] = silt_t_rsed8 * 3.121E-08 + RSED9[loop] = clay_t_rsed9 * 3.121E-08 + RSED10[loop] = total_rsed10 * 3.121E-08 + TSED1[loop] = tsed1 * 3.121E-08 + TSED2[loop] = tsed2 * 3.121E-08 + TSED3[loop] = tsed3 * 3.121E-08 + DEPSCR1[loop] = depscr1 * 3.121E-08 + DEPSCR2[loop] = depscr2 * 3.121E-08 + DEPSCR3[loop] = depscr3 * 3.121E-08 + DEPSCR4[loop] = depscr4 * 3.121E-08 + ROSED1[loop] = rosed1 * 3.121E-08 + ROSED2[loop] = rosed2 * 3.121E-08 + ROSED3[loop] = rosed3 * 3.121E-08 + ROSED4[loop] = rosed4 * 3.121E-08 + OSED1[loop] = osed1 * 3.121E-08 + OSED2[loop] = osed2 * 3.121E-08 + OSED3[loop] = osed3 * 3.121E-08 + OSED4[loop] = osed4 * 3.121E-08 + else: + RSED1[loop] = sand_rsed1 * 1E-06 + RSED2[loop] = silt_rsed2 * 1E-06 + RSED3[loop] = clay_rsed3 * 1E-06 + RSED4[loop] = sand_wt_rsed4 * 1E-06 + RSED5[loop] = silt_wt_rsed5 * 1E-06 + RSED6[loop] = clay_wt_rsed6 * 1E-06 + RSED7[loop] = sand_t_rsed7 * 1E-06 + RSED8[loop] = silt_t_rsed8 * 1E-06 + RSED9[loop] = clay_t_rsed9 * 1E-06 + RSED10[loop] = total_rsed10 * 1E-06 + TSED1[loop] = tsed1 * 1E-06 + TSED2[loop] = tsed2 * 1E-06 + TSED3[loop] = tsed3 * 1E-06 + DEPSCR1[loop] = depscr1 * 1E-06 + DEPSCR2[loop] = depscr2 * 1E-06 + DEPSCR3[loop] = depscr3 * 1E-06 + DEPSCR4[loop] = depscr4 * 1E-06 + ROSED1[loop] = rosed1 * 1E-06 # 2.83E-08 + ROSED2[loop] = rosed2 * 1E-06 + ROSED3[loop] = rosed3 * 1E-06 + ROSED4[loop] = rosed4 * 1E-06 + OSED1[loop] = osed1 * 1E-06 + OSED2[loop] = osed2 * 1E-06 + OSED3[loop] = osed3 * 1E-06 + OSED4[loop] = osed4 * 1E-06 + + if nexits > 1: + for i in range(nexits): + ts['OSED1' + str(i+1)] = OSED1[:, i] + ts['OSED2' + str(i + 1)] = OSED2[:, i] + ts['OSED3' + str(i + 1)] = OSED3[:, i] + ts['OSED4' + str(i + 1)] = OSED4[:, i] + + return errorsV + + +@njit(cache=True) +def bdexch (avdepm, w, tau, taucd, taucs, m, vol, frcsed, susp, bed): + ''' simulate deposition and scour of a cohesive sediment fraction- silt or clay''' + if w > 0.0 and tau < taucd and susp > 1.0e-30: # deposition will occur + expnt = -w / avdepm * (1.0 - tau / taucd) + depmas = susp * (1.0 - exp(expnt)) + susp -= depmas + bed += depmas + else: + depmas = 0.0 # no deposition- concentrations unchanged + + if tau > taucs and m > 0.0: # scour can occur- units are: m- kg/m2.ivl avdepm- m scr- mg/l + scr = frcsed * m / avdepm * 1000.0 * (tau/taucs - 1.0) + scrmas = min(bed, scr * vol) # check availability of material ??? + + # update storages + susp += scrmas + bed -= scrmas + else: # no scour + scrmas = 0.0 + return depmas - scrmas, susp, bed # net deposition or scour, susp, bed + + +''' Sediment Transport in Alluvial Channels, 1963-65 by Bruce Colby. +This report explains the following empirical algorithm.''' + +@njit(cache=True) +def colby(v, db50, fhrad, fsl, tempr): +# Colby's method to calculate the capacity of the flow to transport sand. +# +# The colby method has the following units and applicable ranges of variables. +# average velocity.............v.......fps.........1-10 fps +# hydraulic radius.............fhrad...ft..........1-100 ft +# median bed material size.....db50....mm..........0.1-0.8 mm +# temperature..................tmpr....deg f.......32-100 deg. +# fine sediment concentration..fsl.....mg/liter....0-200000 ppm +# total sediment load..........gsi.....ton/day.ft.. + G = zeros((5,9,7)) # defined by Figure 26 + G[1, 1, 1], G[2, 1, 1], G[3, 1, 1], G[4, 1, 1] = 1.0, 0.30, 0.06, 0.00 + G[1, 2, 1], G[2, 2, 1], G[3, 2, 1], G[4, 2, 1] = 3.00, 3.30, 2.50, 2.00 + G[1, 3, 1], G[2, 3, 1], G[3, 3, 1], G[4, 3, 1] = 5.40, 9.0, 10.0, 20.0 + G[1, 4, 1], G[2, 4, 1], G[3, 4, 1], G[4, 4, 1] = 11.0, 26.0, 50.0, 150.0 + G[1, 5, 1], G[2, 5, 1], G[3, 5, 1], G[4, 5, 1] = 17.0, 49.0, 130.0, 500.0 + G[1, 6, 1], G[2, 6, 1], G[3, 6, 1], G[4, 6, 1] = 29.0, 101.0, 400.0, 1350.0 + G[1, 7, 1], G[2, 7, 1], G[3, 7, 1], G[4, 7, 1] = 44.0, 160.0, 700.0, 2500.0 + G[1, 8, 1], G[2, 8, 1], G[3, 8, 1], G[4, 8, 1] = 60.0, 220.0, 1000.0, 4400.0 + G[1, 1, 2], G[2, 1, 2], G[3, 1, 2], G[4, 1, 2] = 0.38, 0.06, 0.0, 0.0 + G[1, 2, 2], G[2, 2, 2], G[3, 2, 2], G[4, 2, 2] = 1.60, 1.20, 0.65, 0.10 + G[1, 3, 2], G[2, 3, 2], G[3, 3, 2], G[4, 3, 2] = 3.70, 5.0, 4.0, 3.0 + G[1, 4, 2], G[2, 4, 2], G[3, 4, 2], G[4, 4, 2] = 10.0, 18.0, 30.0, 52.0 + G[1, 5, 2], G[2, 5, 2], G[3, 5, 2], G[4, 5, 2] = 17.0, 40.0, 80.0, 160.0 + G[1, 6, 2], G[2, 6, 2], G[3, 6, 2], G[4, 6, 2] = 36.0, 95.0, 230.0, 650.0 + G[1, 7, 2], G[2, 7, 2], G[3, 7, 2], G[4, 7, 2] = 60.0, 150.0, 415.0, 1200.0 + G[1, 8, 2], G[2, 8, 2], G[3, 8, 2], G[4, 8, 2] = 81.0, 215.0, 620.0, 1500.0 + G[1, 1, 3], G[2, 1, 3], G[3, 1, 3], G[4, 1, 3] = 0.14, 0.0, 0.0, 0.0 + G[1, 2, 3], G[2, 2, 3], G[3, 2, 3], G[4, 2, 3] = 1.0, 0.60, 0.15, 0.0 + G[1, 3, 3], G[2, 3, 3], G[3, 3, 3], G[4, 3, 3] = 3.30, 3.00, 1.70, 0.50 + G[1, 4, 3], G[2, 4, 3], G[3, 4, 3], G[4, 4, 3] = 11.0, 15.0, 17.0, 14.0 + G[1, 5, 3], G[2, 5, 3], G[3, 5, 3], G[4, 5, 3] = 20.0, 35.0, 49.0, 70.0 + G[1, 6, 3], G[2, 6, 3], G[3, 6, 3], G[4, 6, 3] = 44.0, 85.0, 150.0, 250.0 + G[1, 7, 3], G[2, 7, 3], G[3, 7, 3], G[4, 7, 3] = 71.0, 145.0, 290.0, 500.0 + G[1, 8, 3], G[2, 8, 3], G[3, 8, 3], G[4, 8, 3] = 100.0, 202.0, 400.0, 700.0 + G[1, 1, 4], G[2, 1, 4], G[3, 1, 4], G[4, 1, 4] = 0.0, 0.0, 0.0, 0.0 + G[1, 2, 4], G[2, 2, 4], G[3, 2, 4], G[4, 2, 4] = 0.70, 0.30, 0.06, 0.0 + G[1, 3, 4], G[2, 3, 4], G[3, 3, 4], G[4, 3, 4] = 2.9, 2.3, 1.0, 0.06 + G[1, 4, 4], G[2, 4, 4], G[3, 4, 4], G[4, 4, 4] = 11.5, 13.0, 12.0, 7.0 + G[1, 5, 4], G[2, 5, 4], G[3, 5, 4], G[4, 5, 4] = 22.0, 31.0, 40.0, 50.0 + G[1, 6, 4], G[2, 6, 4], G[3, 6, 4], G[4, 6, 4] = 47.0, 84.0, 135.0, 210.0 + G[1, 7, 4], G[2, 7, 4], G[3, 7, 4], G[4, 7, 4] = 75.0, 140.0, 240.0, 410.0 + G[1, 8, 4], G[2, 8, 4], G[3, 8, 4], G[4, 8, 4] = 106.0, 190.0, 350.0, 630.0 + G[1, 1, 5], G[2, 1, 5], G[3, 1, 5], G[4, 1, 5] = 0.0, 0.0, 0.0, 0.0 + G[1, 2, 5], G[2, 2, 5], G[3, 2, 5], G[4, 2, 5] = 0.44, 0.06, 0.0, 0.0 + G[1, 3, 5], G[2, 3, 5], G[3, 3, 5], G[4, 3, 5] = 2.8, 1.8, 0.6, 0.0 + G[1, 4, 5], G[2, 4, 5], G[3, 4, 5], G[4, 4, 5] = 12.0, 12.5, 10.0, 4.5 + G[1, 5, 5], G[2, 5, 5], G[3, 5, 5], G[4, 5, 5] = 24.0, 30.0, 35.0, 37.0 + G[1, 6, 5], G[2, 6, 5], G[3, 6, 5], G[4, 6, 5] = 52.0, 78.0, 120.0, 190.0 + G[1, 7, 5], G[2, 7, 5], G[3, 7, 5], G[4, 7, 5] = 83.0, 180.0, 215.0, 380.0 + G[1, 8, 5], G[2, 8, 5], G[3, 8, 5], G[4, 8, 5] = 120.0, 190.0, 305.0, 550.0 + G[1, 1, 6], G[2, 1, 6], G[3, 1, 6], G[4, 1, 6] = 0.0, 0.0, 0.0, 0.0 + G[1, 2, 6], G[2, 2, 6], G[3, 2, 6], G[4, 2, 6] = 0.3, 0.0, 0.0, 0.0 + G[1, 3, 6], G[2, 3, 6], G[3, 3, 6], G[4, 3, 6] = 2.9, 1.4, 0.3, 0.0 + G[1, 4, 6], G[2, 4, 6], G[3, 4, 6], G[4, 4, 6] = 14.0, 11.0, 7.7, 3.0 + G[1, 5, 6], G[2, 5, 6], G[3, 5, 6], G[4, 5, 6] = 27.0, 29.0, 30.0, 30.0 + G[1, 6, 6], G[2, 6, 6], G[3, 6, 6], G[4, 6, 6] = 57.0, 75.0, 110.0, 170.0 + G[1, 7, 6], G[2, 7, 6], G[3, 7, 6], G[4, 7, 6] = 90.0, 140.0, 200.0, 330.0 + G[1, 8, 6], G[2, 8, 6], G[3, 8, 6], G[4, 8, 6] = 135.0, 190.0, 290.0, 520.0 + + F = zeros((6,11)) # defined by Figure 24 + F[1, 1], F[2, 1], F[3, 1], F[4, 1], F[5, 1] = 1.0, 1.1, 1.6, 2.6, 4.2 + F[1, 2], F[2, 2], F[3, 2], F[4, 2], F[5, 2] = 1.0, 1.1, 1.65, 2.75, 4.9 + F[1, 3], F[2, 3], F[3, 3], F[4, 3], F[5, 3] = 1.0, 1.1, 1.7, 3.0, 5.5 + F[1, 4], F[2, 4], F[3, 4], F[4, 4], F[5, 4] = 1.0, 1.12, 1.9, 3.6, 7.0 + F[1, 5], F[2, 5], F[3, 5], F[4, 5], F[5, 5] = 1.0, 1.17, 2.05, 4.3, 8.7 + F[1, 6], F[2, 6], F[3, 6], F[4, 6], F[5, 6] = 1.0, 1.2, 2.3, 5.5, 11.2 + F[1, 7], F[2, 7], F[3, 7], F[4, 7], F[5, 7] = 1.0, 1.22, 2.75, 8.0, 22.0 + F[1, 8], F[2, 8], F[3, 8], F[4, 8], F[5, 8] = 1.0, 1.25, 3.0, 9.6, 29.0 + F[1, 9], F[2, 9], F[3, 9], F[4, 9], F[5, 9] = 1.0, 1.3, 3.5, 12.0, 43.0 + F[1, 10], F[2, 10], F[3, 10], F[4, 10], F[5, 10] = 1.0, 1.4, 4.9, 22.0, 120.0 + + # T = array([[-999, -999, -999, -999, -999, -999, -999, -999], + # [-999, 1.2, 1.15, 1.10, 0.96, 0.90, 0.85, 0.82], + # [-999, 1.35, 1.25, 1.12, 0.92, 0.86, 0.80, 0.75], + # [-999, 1.60, 1.40, 1.20, 0.89, 0.80, 0.72, 0.66], + # [-999, 2.00, 1.65, 1.30, 0.85, 0.72, 0.63, 0.55]]).T # Temperature adjustment, Figure 24 + + T = zeros((8,5)) + T[0, 0], T[0, 1], T[0, 2], T[0, 3], T[0, 4] = -999, -999, -999, -999, -999 + T[1, 0], T[1, 1], T[1, 2], T[1, 3], T[1, 4] = -999, 1.2, 1.35, 1.60, 2.00 + T[2, 0], T[2, 1], T[2, 2], T[2, 3], T[2, 4] = -999, 1.15, 1.25, 1.40, 1.65 + T[3, 0], T[3, 1], T[3, 2], T[3, 3], T[3, 4] = -999, 1.10, 1.12, 1.20, 1.30 + T[4, 0], T[4, 1], T[4, 2], T[4, 3], T[4, 4] = -999, 0.96, 0.92, 0.89, 0.85 + T[5, 0], T[5, 1], T[5, 2], T[5, 3], T[5, 4] = -999, 0.90, 0.86, 0.80, 0.72 + T[6, 0], T[6, 1], T[6, 2], T[6, 3], T[6, 4] = -999, 0.85, 0.80, 0.72, 0.63 + T[7, 0], T[7, 1], T[7, 2], T[7, 3], T[7, 4] = -999, 0.82, 0.75, 0.66, 0.55 + + DF = array([-999, 0.10, 0.20, 0.30, 0.60, 1.00, 2.00, 6.00, 10.00, 20.00, 1.E2]) # Depths for Figure 24 + CF = array([-999, 0.00, 1.E4, 5.E4, 1.E5, 1.5E5]) # Concentrations of sediment for Figure 24 + P = array([-999, 0.60, 0.90, 1.0, 1.0, 0.83, 0.60, 0.40, 0.25, 0.15, 0.09, 0.05]) # Percentage Effect for Figure 24 + DP = array([-999, 0.10, 0.15, 0.20, 0.30, 0.40, 0.50, 0.60, 0.70, 0.80, 0.90, 1.00]) # Median diameters for Figure 24 + DG = array([-999, 0.10, 1.00, 10.0, 100.0]) # Depth values for Figure 26 + VG = array([-999, 1.0, 1.5, 2.0, 3.0, 4.0, 6.0, 8.0, 10.0]) # Velocity values for Figure 26 + D50G = array([-999, 0.10, 0.20, 0.30, 0.40, 0.60, 0.80]) # Median values for figure 26 + TEMP = array([-999, 32.0, 40.0, 50.0, 70.0, 80.0, 90.0, 100.0]) # Temperatures for lookup in Figure 26 + + ferror = 0 + d50err = 0 + hrerr = 0 + velerr = 0 + + id501 = 0 + id502 = 0 + id1 = 0 + iv1 = 0 + it1 = 0 + if not 0.80 >= db50 >= 0.10: # D50G limits + ferror = 1 + d50err = 1 + return 0.0, ferror, d50err, hrerr, velerr + for id501, db50x in enumerate(D50G): + if db50x > db50: + break + id501 -= 1 + id502 = id501 + 1 + zz1 = log10(D50G[id501]) + zz2 = log10(D50G[id502]) + zzratio = (log10(db50) - zz1) / (zz2 - zz1) + + if not 100.0 >= fhrad >= 0.10: # DG limits + ferror = 1 + hrerr = 1 + return 0.0, ferror, d50err, hrerr, velerr + for id1,dgx in enumerate(DG): + if fhrad > dgx: + break + id1 = id1 + 1 + id2 = id1 + 1 + xx1 = log10(DG[id1]) + xx2 = log10(DG[id2]) + xxratio = (log10(fhrad) - xx1) / ((xx2 - xx1)) + + if not 10.0 >= v >= 1.0: # VG limits + ferror = 1 + velerr = 1 + return 0.0, ferror, d50err, hrerr, velerr + for iv1, vx in enumerate(VG): + if vx > v: + break + iv1 -= 1 + iv2 = iv1 + 1 + yy1 = log10(VG[iv1]) + yy2 = log10(VG[iv2]) + yyratio = (log10(v) - yy1) / (yy2 - yy1) + + tmpr = min(100.0, max(32.0, tempr * 1.8 + 32.0)) + + x = zeros((3,3)) + xa= zeros(3) + xg= zeros(3) + for i,i1 in [(1, id1), (2, id2)]: # DO 200 I= 1,2; I1 = II(I) + for j, j1 in [(1, iv1), (2, iv2)]: # DO 190 J= 1,2; J1 = JJ(J) + for k, k1 in [(1, id501), (2, id502)]: # DO 180 K= 1,2; K1 = KK(K) + if G[i1,j1,k1] > 0.0: + x[j,k] = log10(G[i1,j1,k1]) + else: + for j3 in range(j1,8): # DO 140 J3= J1,7 + if G[i1,j3,k1] > 0.0: + break + x[j,k] = log10(G[i1,j3,k1]) + (log10(VG[j1] / VG[j3])) * (log10(G[i1,j3+1,k1] / G[i1,j3,k1])) / (log10(VG[j3+1] / VG[j3])) + + xa[1] = x[1,1] + (x[1,2] - x[1,1]) * zzratio + xa[2] = x[2,1] + (x[2,2] - x[2,1]) * zzratio + xn3 = xa[2] - xa[1] + xg[i] = xa[1] + xn3 * yyratio + + xn4 = xg[2] - xg[1] + gtuc = 10.0**(xg[1] + (xn4 * xxratio)) # uncorrected gt in lb/sec/ft + + # Adjustment coefficient for temperature + if abs(tmpr - 60.0) <= 1.0e-5: + cft = 1.0 + else: + for it1, tempx in enumerate(TEMP): + if tempx > tmpr: + break + it2 = it1 + it1 -= 1 + + xt11 = log10(T[it1][id1]) + xt21 = log10(T[it2][id1]) + xt12 = log10(T[it1][id2]) + xt22 = log10(T[it2][id2]) + + xnt = log10(tmpr / TEMP[it1]) / log10(TEMP[it2] / TEMP[it1]) + xct1 = xt11 + xnt * (xt21 - xt11) + xct2 = xt12 + xnt * (xt22 - xt12) + cft = 10.0**(xct1 + (xct2 - xct1) * xxratio) + + # fine sediment load correction; (i.e. cohesive sediment or wash) load in mg/liter + if fsl <= 10.0: + cff = 1.0 + else: + for id1, dfx in enumerate(DF): + if dfx > fhrad: + break + id2 = id1 + 1 + + if1 = 0 + if fsl > 1.0E+4: + if1 = 4 + if2 = 5 + ERRMSG = '***** SUBROUTINE COLBY -- FSL WENT > 1.E+4' + else: + for if1, cfx in enumerate(CF): + if cfx > fsl: + break + if2 = if1 + 1 + + xf11 = log10(F[if1,id1]) + xf22 = log10(F[if2,id2]) + xf12 = log10(F[if1,id2]) + xf21 = log10(F[if2,id1]) + + xnt = (fsl - CF[if1]) / (CF[if2] - CF[if1]) + xct1 = xf11 + xnt * (xf21 - xf11) + xct2 = xf12 + xnt * (xf22 - xf12) + xnt = log10(fhrad / DF[id1]) / log10(DF[id2] / DF[id1]) + cff = 10.0**(xct1 + xnt * (xct2 - xct1)) + tcf = cft * cff - 1.0 + + # Percent effect correction for median diameter''' + if 0.30 >= db50 >= 0.20: + cfd = 1.0 + else: + for ip1, db50x in enumerate(DP): + if db50x > db50: + break + ip2 = ip1 + 1 + + p1 = log10(P[ip1]) + p2 = log10(P[ip2]) + xnt = log10(db50 / DP[ip1]) / log10(DP[ip2] / DP[ip1]) + + cfd = 10.0**(p1 + xnt * (p2 -p1)) + + return gtuc * (cfd * tcf + 1.0), ferror, d50err, hrerr, velerr + + +@njit(cache=True) +def toffaleti(v, fdiam, fhrad, slope, tempr, vset): + ''' Toffaleti's method to calculate the capacity of the flow to transport sand.''' + + tmpr = tempr * 1.80 + 32.0 # degrees c to degrees f + + # For water temperatures greater than 32f and less than 100f the kinematic viscosity is + vis = 4.106e-4 * tmpr**-0.864 + + # Assuming the d50 grain size is approximately equal to the Geometric mean grain size + # and sigma-g = 1.5, the d65 grain size can be determined as 1.17*d50. + d65 = 1.17 * fdiam + cnv = 0.1198 + 0.00048 * tmpr + cz = 260.67 - 0.667 * tmpr + tt = 1.10 * (0.051 + 0.00009 * tmpr) + zi = vset * v / (cz * fhrad * slope) + if zi < cnv: + zi = 1.5 * cnv + + # The manning-strickler equation is used here to Determine the hydraulic radius + # component due to Grain roughness (r'). Taken from the 1975 asce + # "sedimentation engineering",pg. 128. + rprime = ((v**1.5) * (d65**0.25) / (slope**0.75)) * 0.00349 + ustar = (rprime * slope * 32.2)**0.5 + + afunc = (vis * 1.0e5)**0.333 / (10.0 * ustar) + if afunc <= 0.500: ac = (afunc / 4.89)**-1.45 + elif afunc <= 0.660: ac = (afunc / 0.0036)**0.67 + elif afunc <= 0.720: ac = (afunc / 0.29)**4.17 + elif afunc <= 1.25: ac = 48.0 + elif afunc > 1.25: ac = (afunc / 0.304)**2.74 + + k4func = afunc * slope * d65 * 1.0e5 + if k4func <= 0.24: k4 = 1.0 + elif k4func <= 0.35: k4 = (k4func**1.10) * 4.81 + elif k4func > 0.35: k4 = (k4func** (-1.05)) * 0.49 + + ack4 = ac * k4 + if ack4 - 16.0 < 0.0: + ack4 = 16.0 + k4 = 16.0 / ac + oczu = 1.0 + cnv - 1.5 * zi + oczm = 1.0 + cnv - zi + oczl= 1.0 + cnv - 0.756 * zi + zinv = cnv - 0.758 * zi + zm = -zinv + zn = 1.0 + zinv + zo = -0.736 * zi + zp = 0.244 * zi + zq = 0.5 * zi + + # Cli has been multiplied by 1.0e30 to keep it from Exceeding the computer overflow limit + cli = (5.6e+22 * oczl * (v**2.333) / fhrad**(zm) / ((tt * ac * k4 * fdiam)**1.667) + / (1.0 + cnv) / ((fhrad / 11.24)**(zn) - (2.0 * fdiam)**oczl)) + p1 = (2.0 * fdiam / fhrad)**(zo / 2.0) + c2d = cli * p1 * p1 / 1.0e+30 + + # Check to see if the calculated value is reasonable (< 100.0), and adjust it if it is not. + if c2d > 100.0: + cli = cli * 100.0 / c2d + cmi = 43.2 * cli * (1.0 + cnv) * v * (fhrad**zm) # Cmi has been multiplied by 1.0e30 to keep it from computer overflow + + # upper layer transport capacity + fd11 = fhrad / 11.24 + fd25 = fhrad / 2.5 + gsu = (cmi * (fd11**zp) * (fd25**zq) * (fhrad**oczu - (fd25**oczu)))/(oczu * 1.0e+30) + + gsm = (cmi * (fd11**zp) * (fd25**(oczm) - (fd11**oczm))) / (oczm * 1.0e+30) # middle layer transport capacity + gsl = (cmi * ((fd11**(zn)) - ((2.0 * fdiam)**(oczl)))) / (oczl * 1.0e+30) # lower layer transport capacity + gsb = (cmi * ((2.0 * fdiam)**(zn))) / 1.0e+30 # bed layer transport capacity + + return max(0.0, gsu + gsm + gsl + gsb) # Total transport capacity of the rchres (tons/day/ft) + + +def expand_SEDTRN_masslinks(flags, uci, dat, recs): + if flags['SEDTRN']: + # ISED1 + rec = {} + rec['MFACTOR'] = dat.MFACTOR + rec['SGRPN'] = 'SEDTRN' + if dat.SGRPN == "ROFLOW": + rec['SMEMN'] = 'ROSED' + rec['SMEMSB1'] = '1' + rec['SMEMSB2'] = '' + else: + rec['SMEMN'] = 'OSED' + rec['SMEMSB1'] = '1' + rec['SMEMSB2'] = dat.SMEMSB1 + rec['TMEMN'] = 'ISED1' + rec['TMEMSB1'] = dat.TMEMSB1 + rec['TMEMSB2'] = dat.TMEMSB2 + rec['SVOL'] = dat.SVOL + recs.append(rec) + # ISED2 + rec = {} + rec['MFACTOR'] = dat.MFACTOR + rec['SGRPN'] = 'SEDTRN' + if dat.SGRPN == "ROFLOW": + rec['SMEMN'] = 'ROSED' + rec['SMEMSB1'] = '2' + rec['SMEMSB2'] = '' + else: + rec['SMEMN'] = 'OSED' + rec['SMEMSB1'] = '2' + rec['SMEMSB2'] = dat.SMEMSB1 + rec['TMEMN'] = 'ISED2' + rec['TMEMSB1'] = dat.TMEMSB1 + rec['TMEMSB2'] = dat.TMEMSB2 + rec['SVOL'] = dat.SVOL + recs.append(rec) + # ISED3 + rec = {} + rec['MFACTOR'] = dat.MFACTOR + rec['SGRPN'] = 'SEDTRN' + if dat.SGRPN == "ROFLOW": + rec['SMEMN'] = 'ROSED' + rec['SMEMSB1'] = '3' + rec['SMEMSB2'] = '' + else: + rec['SMEMN'] = 'OSED' + rec['SMEMSB1'] = '3' + rec['SMEMSB2'] = dat.SMEMSB1 + rec['TMEMN'] = 'ISED3' + rec['TMEMSB1'] = dat.TMEMSB1 + rec['TMEMSB2'] = dat.TMEMSB2 + rec['SVOL'] = dat.SVOL + recs.append(rec) return recs \ No newline at end of file diff --git a/HSP2/SPECL.py b/HSP2/SPECL.py index cde37346..d6fa62c0 100644 --- a/HSP2/SPECL.py +++ b/HSP2/SPECL.py @@ -1,65 +1,58 @@ -''' process special actions in this domain -Notes: - - code for parsing UCI SPEC-ACTIONS is in HSP2tools/readUCI.py - - code for object classes that transform parsed data into OP codes for OM and STATE support - is in this directory tree as om_special_[action type].py, - - Ex: om_special_action.py contains object support and runtime functions for classic ACTIONS -''' - -from numba import njit -from pandas import DataFrame, date_range -import h5py - -def specl_load_actions(state, io_manager, siminfo): - if 'ACTIONS' in state['specactions']: - dc = state['specactions']['ACTIONS'] - #print(dc.index) - #print("speca entry 0:0", dc[0:0]) - #print("speca entry 0:1", dc[0:1]) - #print("speca entry 1:2", dc[1:2]) - #print("speca entry 0:", dc[0:]) - #print("speca entry 1:", dc[1:]) - #print("speca entry 1:1", dc[1:1]) - for ix in dc.index: - # add the items to the state['model_data'] dict - speca = dc[ix:(ix+1)] - # need to add a name attribute - opname = 'SPEC' + 'ACTION' + str(ix) - state['model_data'][opname] = {} - state['model_data'][opname]['name'] = opname - for ik in speca.keys(): - #print("looking for speca key ", ik) - state['model_data'][opname][ik] = speca.to_dict()[ik][ix] # add subscripts? - if ik == 'VARI': - if len(speca.to_dict()['S1'][ix]) > 0: - state['model_data'][opname][ik] += speca.to_dict()['S1'][ix] - if len(speca.to_dict()['S2'][ix]) > 0: - state['model_data'][opname][ik] += speca.to_dict()['S2'][ix] - state['model_data'][opname]['object_class'] = 'SpecialAction' - #print("model_data", ix, " = ", state['model_data'][opname]) - return - -def state_load_dynamics_specl(state, io_manager, siminfo): - specl_load_actions(state, io_manager, siminfo) - # others defined below, like: - # specl_load_uvnames(state, io_manager, siminfo) - # ... - return - -''' -# the code specl() is deprecated in favor of execution inside OM -# see om_special_action.py for example of object support and runtime functions for classic ACTIONS -CALL: specl(ui, ts, step, state_info, state_paths, state_ix, specactions) - store is the Pandas/PyTable open store - siminfo is a dictionary with simulation level infor (OP_SEQUENCE for example) - ui is a dictionary with RID specific HSPF UCI like data - ts is a dictionary with RID specific timeseries - state is a dictionary with value of variables at ts[step - 1] - specl_actions is a dictionary with all SPEC-ACTIONS entries -''' - -@njit -def specl(ui, ts, step, state_info, state_paths, state_ix, specactions): - # ther eis no need for _specl_ because this code must already be njit - return +''' process special actions in this domain +Notes: + - code for parsing UCI SPEC-ACTIONS is in HSP2tools/readUCI.py + - code for object classes that transform parsed data into OP codes for OM and STATE support + is in this directory tree as om_special_[action type].py, + - Ex: om_special_action.py contains object support and runtime functions for classic ACTIONS +''' + +from numba import njit +from pandas import DataFrame, date_range +import h5py + +def specl_load_actions(state, io_manager, siminfo): + if 'ACTIONS' in state['specactions']: + dc = state['specactions']['ACTIONS'] + for ix in dc.index: + # add the items to the state['model_data'] dict + speca = dc[ix:(ix+1)] + # need to add a name attribute + opname = 'SPEC' + 'ACTION' + str(ix) + state['model_data'][opname] = {} + state['model_data'][opname]['name'] = opname + for ik in speca.keys(): + #print("looking for speca key ", ik) + state['model_data'][opname][ik] = speca.to_dict()[ik][ix] # add subscripts? + if ik == 'VARI': + if len(speca.to_dict()['S1'][ix]) > 0: + state['model_data'][opname][ik] += speca.to_dict()['S1'][ix] + if len(speca.to_dict()['S2'][ix]) > 0: + state['model_data'][opname][ik] += speca.to_dict()['S2'][ix] + state['model_data'][opname]['object_class'] = 'SpecialAction' + #print("model_data", ix, " = ", state['model_data'][opname]) + return + +def state_load_dynamics_specl(state, io_manager, siminfo): + specl_load_actions(state, io_manager, siminfo) + # others defined below, like: + # specl_load_uvnames(state, io_manager, siminfo) + # ... + return + +''' +# the code specl() is deprecated in favor of execution inside OM +# see om_special_action.py for example of object support and runtime functions for classic ACTIONS +CALL: specl(ui, ts, step, state_info, state_paths, state_ix, specactions) + store is the Pandas/PyTable open store + siminfo is a dictionary with simulation level infor (OP_SEQUENCE for example) + ui is a dictionary with RID specific HSPF UCI like data + ts is a dictionary with RID specific timeseries + state is a dictionary with value of variables at ts[step - 1] + specl_actions is a dictionary with all SPEC-ACTIONS entries +''' + +@njit +def specl(ui, ts, step, state_info, state_paths, state_ix, specactions): + # ther eis no need for _specl_ because this code must already be njit + return \ No newline at end of file diff --git a/HSP2/main.py b/HSP2/main.py index 194e2b86..0f37a429 100644 --- a/HSP2/main.py +++ b/HSP2/main.py @@ -1,458 +1,450 @@ -''' Copyright (c) 2020 by RESPEC, INC. -Author: Robert Heaphy, Ph.D. -License: LGPL2 -''' - -from re import S -from numpy import float64, float32 -from pandas import DataFrame, date_range -from pandas.tseries.offsets import Minute -from datetime import datetime as dt -import os -from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries -from HSP2.configuration import activities, noop, expand_masslinks -from HSP2.state import * -from HSP2.om import * -from HSP2.SPECL import * - -from HSP2IO.io import IOManager, SupportsReadTS, Category - -def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None: - """Runs main HSP2 program. - - Parameters - ---------- - - saveall: Boolean - [optional] Default is False. - Saves all calculated data ignoring SAVE tables. - jupyterlab: Boolean - [optional] Default is True. - Flag for specific output behavior for jupyter lab. - Return - ------------ - None - - """ - - hdfname = io_manager._input.file_path - if not os.path.exists(hdfname): - raise FileNotFoundError(f'{hdfname} HDF5 File Not Found') - - msg = messages() - msg(1, f'Processing started for file {hdfname}; saveall={saveall}') - - # read user control, parameters, states, and flags uci and map to local variables - uci_obj = io_manager.read_uci() - opseq = uci_obj.opseq - ddlinks = uci_obj.ddlinks - ddmasslinks = uci_obj.ddmasslinks - ddext_sources = uci_obj.ddext_sources - ddgener = uci_obj.ddgener - uci = uci_obj.uci - siminfo = uci_obj.siminfo - ftables = uci_obj.ftables - specactions = uci_obj.specactions - monthdata = uci_obj.monthdata - - start, stop = siminfo['start'], siminfo['stop'] - - copy_instances = {} - gener_instances = {} - - ####################################################################################### - # initialize STATE dicts - ####################################################################################### - # Set up Things in state that will be used in all modular activities like SPECL - state = init_state_dicts() - state_siminfo_hsp2(uci_obj, siminfo) - # Add support for dynamic functions to operate on STATE - # - Load any dynamic components if present, and store variables on objects - state_load_dynamics_hsp2(state, io_manager, siminfo) - # Iterate through all segments and add crucial paths to state - # before loading dynamic components that may reference them - for _, operation, segment, delt in opseq.itertuples(): - if operation != 'GENER' and operation != 'COPY': - for activity, function in activities[operation].items(): - if activity == 'HYDR': - state_context_hsp2(state, operation, segment, activity) - print("Init HYDR state context for domain", state['domain']) - hydr_init_ix(state['state_ix'], state['state_paths'], state['domain']) - elif activity == 'SEDTRN': - state_context_hsp2(state, operation, segment, activity) - sedtrn_init_ix(state['state_ix'], state['state_paths'], state['domain']) - # - finally stash specactions in state, not domain (segment) dependent so do it once - state['specactions'] = specactions # stash the specaction dict in state - state_load_dynamics_specl(state, io_manager, siminfo) # traditional special actions - state_load_dynamics_om(state, io_manager, siminfo) # operational model for custom python - # finalize all dynamically loaded components and prepare to run the model - state_om_model_run_prep(state, io_manager, siminfo) - ####################################################################################### - - # main processing loop - msg(1, f'Simulation Start: {start}, Stop: {stop}') - tscat = {} - for _, operation, segment, delt in opseq.itertuples(): - msg(2, f'{operation} {segment} DELT(minutes): {delt}') - siminfo['delt'] = delt - siminfo['tindex'] = date_range(start, stop, freq=Minute(delt))[1:] - siminfo['steps'] = len(siminfo['tindex']) - - if operation == 'COPY': - copy_instances[segment] = activities[operation](io_manager, siminfo, ddext_sources[(operation,segment)]) - elif operation == 'GENER': - try: - ts = get_timeseries(io_manager, ddext_sources[(operation, segment)], siminfo) - ts = get_gener_timeseries(ts, gener_instances, ddlinks[segment], ddmasslinks) - get_flows(io_manager, ts, {}, uci, segment, ddlinks, ddmasslinks, siminfo['steps'], msg) - gener_instances[segment] = activities[operation](segment, siminfo, copy_instances, gener_instances, ddlinks, ddmasslinks, ts, ddgener) - except NotImplementedError as e: - print(f"GENER '{segment}' may not function correctly. '{e}'") - else: - - # now conditionally execute all activity modules for the op, segment - ts = get_timeseries(io_manager,ddext_sources[(operation,segment)],siminfo) - ts = get_gener_timeseries(ts, gener_instances, ddlinks[segment],ddmasslinks) - flags = uci[(operation, 'GENERAL', segment)]['ACTIVITY'] - if operation == 'RCHRES': - # Add nutrient adsorption flags: - if flags['NUTRX'] == 1: - flags['TAMFG'] = uci[(operation, 'NUTRX', segment)]['FLAGS']['NH3FG'] - flags['ADNHFG'] = uci[(operation, 'NUTRX', segment)]['FLAGS']['ADNHFG'] - flags['PO4FG'] = uci[(operation, 'NUTRX', segment)]['FLAGS']['PO4FG'] - flags['ADPOFG'] = uci[(operation, 'NUTRX', segment)]['FLAGS']['ADPOFG'] - - get_flows(io_manager, ts, flags, uci, segment, ddlinks, ddmasslinks, siminfo['steps'], msg) - - for activity, function in activities[operation].items(): - if function == noop: #or not flags[activity]: - continue - - if (activity in flags) and (not flags[activity]): - continue - - if (activity == 'RQUAL') and (not flags['OXRX']) and (not flags['NUTRX']) and (not flags['PLANK']) and (not flags['PHCARB']): - continue - - msg(3, f'{activity}') - # Set context for dynamic executables and special actions - state_context_hsp2(state, operation, segment, activity) - - ui = uci[(operation, activity, segment)] # ui is a dictionary - if operation == 'PERLND' and activity == 'SEDMNT': - # special exception here to make CSNOFG available - ui['PARAMETERS']['CSNOFG'] = uci[(operation, 'PWATER', segment)]['PARAMETERS']['CSNOFG'] - if operation == 'PERLND' and activity == 'PSTEMP': - # special exception here to make AIRTFG available - ui['PARAMETERS']['AIRTFG'] = flags['ATEMP'] - if operation == 'PERLND' and activity == 'PWTGAS': - # special exception here to make CSNOFG available - ui['PARAMETERS']['CSNOFG'] = uci[(operation, 'PWATER', segment)]['PARAMETERS']['CSNOFG'] - if operation == 'RCHRES': - if not 'PARAMETERS' in ui: - ui['PARAMETERS'] = {} - ui['PARAMETERS']['NEXITS'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['NEXITS'] - if activity == 'ADCALC': - ui['PARAMETERS']['ADFG'] = flags['ADCALC'] - ui['PARAMETERS']['KS'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['KS'] - ui['PARAMETERS']['VOL'] = uci[(operation, 'HYDR', segment)]['STATES']['VOL'] - ui['PARAMETERS']['ROS'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['ROS'] - nexits = uci[(operation, 'HYDR', segment)]['PARAMETERS']['NEXITS'] - for index in range(nexits): - ui['PARAMETERS']['OS' + str(index + 1)] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['OS'+ str(index + 1)] - if activity == 'HTRCH': - ui['PARAMETERS']['ADFG'] = flags['ADCALC'] - ui['advectData'] = uci[(operation, 'ADCALC', segment)]['adcalcData'] - # ui['STATES']['VOL'] = uci[(operation, 'HYDR', segment)]['STATES']['VOL'] - if activity == 'CONS': - ui['advectData'] = uci[(operation, 'ADCALC', segment)]['adcalcData'] - if activity == 'SEDTRN': - ui['PARAMETERS']['ADFG'] = flags['ADCALC'] - ui['advectData'] = uci[(operation, 'ADCALC', segment)]['adcalcData'] - # ui['STATES']['VOL'] = uci[(operation, 'HYDR', segment)]['STATES']['VOL'] - ui['PARAMETERS']['HTFG'] = flags['HTRCH'] - ui['PARAMETERS']['AUX3FG'] = 0 - if flags['HYDR']: - ui['PARAMETERS']['LEN'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LEN'] - ui['PARAMETERS']['DELTH'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['DELTH'] - ui['PARAMETERS']['DB50'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['DB50'] - ui['PARAMETERS']['AUX3FG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['AUX3FG'] - if activity == 'GQUAL': - ui['advectData'] = uci[(operation, 'ADCALC', segment)]['adcalcData'] - ui['PARAMETERS']['HTFG'] = flags['HTRCH'] - ui['PARAMETERS']['SEDFG'] = flags['SEDTRN'] - # ui['PARAMETERS']['REAMFG'] = uci[(operation, 'OXRX', segment)]['PARAMETERS']['REAMFG'] - ui['PARAMETERS']['HYDRFG'] = flags['HYDR'] - if flags['HYDR']: - ui['PARAMETERS']['LKFG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LKFG'] - ui['PARAMETERS']['AUX1FG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['AUX1FG'] - ui['PARAMETERS']['AUX2FG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['AUX2FG'] - ui['PARAMETERS']['LEN'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LEN'] - ui['PARAMETERS']['DELTH'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['DELTH'] - if flags['OXRX']: - ui['PARAMETERS']['LKFG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LKFG'] - ui['PARAMETERS']['CFOREA'] = uci[(operation, 'OXRX', segment)]['PARAMETERS']['CFOREA'] - if flags['SEDTRN']: - ui['PARAMETERS']['SSED1'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED1'] - ui['PARAMETERS']['SSED2'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED2'] - ui['PARAMETERS']['SSED3'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED3'] - if flags['HTRCH']: - ui['PARAMETERS']['CFSAEX'] = uci[(operation, 'HTRCH', segment)]['PARAMETERS']['CFSAEX'] - elif flags['PLANK']: - if 'CFSAEX' in uci[(operation, 'PLANK', segment)]['PARAMETERS']: - ui['PARAMETERS']['CFSAEX'] = uci[(operation, 'PLANK', segment)]['PARAMETERS']['CFSAEX'] - - if activity == 'RQUAL': - # RQUAL inputs: - ui['advectData'] = uci[(operation, 'ADCALC', segment)]['adcalcData'] - if flags['HYDR']: - ui['PARAMETERS']['LKFG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LKFG'] - - ui['FLAGS']['HTFG'] = flags['HTRCH'] - ui['FLAGS']['SEDFG'] = flags['SEDTRN'] - ui['FLAGS']['GQFG'] = flags['GQUAL'] - ui['FLAGS']['OXFG'] = flags['OXFG'] - ui['FLAGS']['NUTFG'] = flags['NUTRX'] - ui['FLAGS']['PLKFG'] = flags['PLANK'] - ui['FLAGS']['PHFG'] = flags['PHCARB'] - if flags['CONS']: - if 'PARAMETERS' in uci[(operation, 'CONS', segment)]: - if 'NCONS' in uci[(operation, 'CONS', segment)]['PARAMETERS']: - ui['PARAMETERS']['NCONS'] = uci[(operation, 'CONS', segment)]['PARAMETERS']['NCONS'] - - # OXRX module inputs: - ui_oxrx = uci[(operation, 'OXRX', segment)] - - if flags['HYDR']: - ui_oxrx['PARAMETERS']['LEN'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LEN'] - ui_oxrx['PARAMETERS']['DELTH'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['DELTH'] - - if flags['HTRCH']: - ui_oxrx['PARAMETERS']['ELEV'] = uci[(operation, 'HTRCH', segment)]['PARAMETERS']['ELEV'] - - if flags['SEDTRN']: - ui['PARAMETERS']['SSED1'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED1'] - ui['PARAMETERS']['SSED2'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED2'] - ui['PARAMETERS']['SSED3'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED3'] - - # PLANK module inputs: - if flags['HTRCH']: - ui['PARAMETERS']['CFSAEX'] = uci[(operation, 'HTRCH', segment)]['PARAMETERS']['CFSAEX'] - - # NUTRX, PLANK, PHCARB module inputs: - ui_nutrx = uci[(operation, 'NUTRX', segment)] - ui_plank = uci[(operation, 'PLANK', segment)] - ui_phcarb = uci[(operation, 'PHCARB', segment)] - - ############ calls activity function like snow() ############## - if operation not in ['COPY','GENER']: - if (activity == 'HYDR'): - errors, errmessages = function(io_manager, siminfo, ui, ts, ftables, state) - elif (activity == 'SEDTRN'): - errors, errmessages = function(io_manager, siminfo, ui, ts, state) - elif (activity != 'RQUAL'): - errors, errmessages = function(io_manager, siminfo, ui, ts) - else: - errors, errmessages = function(io_manager, siminfo, ui, ui_oxrx, ui_nutrx, ui_plank, ui_phcarb, ts, monthdata) - ############################################################### - - for errorcnt, errormsg in zip(errors, errmessages): - if errorcnt > 0: - msg(4, f'Error count {errorcnt}: {errormsg}') - - # default to hourly output - outstep = 2 - outstep_oxrx = 2 - outstep_nutrx = 2 - outstep_plank = 2 - outstep_phcarb = 2 - if 'BINOUT' in uci[(operation, 'GENERAL', segment)]: - if activity in uci[(operation, 'GENERAL', segment)]['BINOUT']: - outstep = uci[(operation, 'GENERAL', segment)]['BINOUT'][activity] - elif activity == 'RQUAL': - outstep_oxrx = uci[(operation, 'GENERAL', segment)]['BINOUT']['OXRX'] - outstep_nutrx = uci[(operation, 'GENERAL', segment)]['BINOUT']['NUTRX'] - outstep_plank = uci[(operation, 'GENERAL', segment)]['BINOUT']['PLANK'] - outstep_phcarb = uci[(operation, 'GENERAL', segment)]['BINOUT']['PHCARB'] - - if 'SAVE' in ui: - save_timeseries(io_manager,ts,ui['SAVE'],siminfo,saveall,operation,segment,activity,jupyterlab,outstep) - - if (activity == 'RQUAL'): - if 'SAVE' in ui_oxrx: save_timeseries(io_manager,ts,ui_oxrx['SAVE'],siminfo,saveall,operation,segment,'OXRX',jupyterlab,outstep_oxrx) - if 'SAVE' in ui_nutrx and flags['NUTRX'] == 1: save_timeseries(io_manager,ts,ui_nutrx['SAVE'],siminfo,saveall,operation,segment,'NUTRX',jupyterlab,outstep_nutrx) - if 'SAVE' in ui_plank and flags['PLANK'] == 1: save_timeseries(io_manager,ts,ui_plank['SAVE'],siminfo,saveall,operation,segment,'PLANK',jupyterlab,outstep_plank) - if 'SAVE' in ui_phcarb and flags['PHCARB'] == 1: save_timeseries(io_manager,ts,ui_phcarb['SAVE'],siminfo,saveall,operation,segment,'PHCARB',jupyterlab,outstep_phcarb) - - msglist = msg(1, 'Done', final=True) - - df = DataFrame(msglist, columns=['logfile']) - io_manager.write_log(df) - - if jupyterlab: - df = versions(['jupyterlab', 'notebook']) - io_manager.write_versioning(df) - print('\n\n', df) - return - -def messages(): - '''Closure routine; msg() prints messages to screen and run log''' - start = dt.now() - mlist = [] - def msg(indent, message, final=False): - now = dt.now() - m = str(now)[:22] + ' ' * indent + message - if final: - mn,sc = divmod((now-start).seconds, 60) - ms = (now-start).microseconds // 100_000 - m = '; '.join((m, f'Run time is about {mn:02}:{sc:02}.{ms} (mm:ss)')) - print(m) - mlist.append(m) - return mlist - return msg - -def get_flows(io_manager:SupportsReadTS, ts, flags, uci, segment, ddlinks, ddmasslinks, steps, msg): - # get inflows to this operation - for x in ddlinks[segment]: - if x.SVOL != 'GENER': # gener already handled in get_gener_timeseries - recs = [] - if x.MLNO == '': # Data from NETWORK part of Links table - rec = {} - rec['MFACTOR'] = x.MFACTOR - rec['SGRPN'] = x.SGRPN - rec['SMEMN'] = x.SMEMN - rec['SMEMSB1'] = x.SMEMSB1 - rec['SMEMSB2'] = x.SMEMSB2 - rec['TMEMN'] = x.TMEMN - rec['TMEMSB1'] = x.TMEMSB1 - rec['TMEMSB2'] = x.TMEMSB2 - rec['SVOL'] = x.SVOL - recs.append(rec) - else: # Data from SCHEMATIC part of Links table - mldata = ddmasslinks[x.MLNO] - for dat in mldata: - if dat.SMEMN != '': - rec = {} - rec['MFACTOR'] = dat.MFACTOR - rec['SGRPN'] = dat.SGRPN - rec['SMEMN'] = dat.SMEMN - rec['SMEMSB1'] = dat.SMEMSB1 - rec['SMEMSB2'] = dat.SMEMSB2 - rec['TMEMN'] = dat.TMEMN - rec['TMEMSB1'] = dat.TMEMSB1 - rec['TMEMSB2'] = dat.TMEMSB2 - rec['SVOL'] = dat.SVOL - recs.append(rec) - else: - # this is the kind that needs to be expanded - if dat.SGRPN == "ROFLOW" or dat.SGRPN == "OFLOW": - recs = expand_masslinks(flags,uci,dat,recs) - - for rec in recs: - mfactor = rec['MFACTOR'] - sgrpn = rec['SGRPN'] - smemn = rec['SMEMN'] - smemsb1 = rec['SMEMSB1'] - smemsb2 = rec['SMEMSB2'] - tmemn = rec['TMEMN'] - tmemsb1 = rec['TMEMSB1'] - tmemsb2 = rec['TMEMSB2'] - - if x.AFACTR != '': - afactr = x.AFACTR - factor = afactr * mfactor - else: - factor = mfactor - - # KLUDGE until remaining HSP2 modules are available. - if tmemn not in {'IVOL', 'ICON', 'IHEAT', 'ISED', 'ISED1', 'ISED2', 'ISED3', - 'IDQAL', 'ISQAL1', 'ISQAL2', 'ISQAL3', - 'OXIF', 'NUIF1', 'NUIF2', 'PKIF', 'PHIF', - 'ONE', 'TWO'}: - continue - if (sgrpn == 'OFLOW' and smemn == 'OVOL') or (sgrpn == 'ROFLOW' and smemn == 'ROVOL'): - sgrpn = 'HYDR' - if (sgrpn == 'OFLOW' and smemn == 'OHEAT') or (sgrpn == 'ROFLOW' and smemn == 'ROHEAT'): - sgrpn = 'HTRCH' - if (sgrpn == 'OFLOW' and smemn == 'OSED') or (sgrpn == 'ROFLOW' and smemn == 'ROSED'): - sgrpn = 'SEDTRN' - if (sgrpn == 'OFLOW' and smemn == 'ODQAL') or (sgrpn == 'ROFLOW' and smemn == 'RODQAL'): - sgrpn = 'GQUAL' - if (sgrpn == 'OFLOW' and smemn == 'OSQAL') or (sgrpn == 'ROFLOW' and smemn == 'ROSQAL'): - sgrpn = 'GQUAL' - if (sgrpn == 'OFLOW' and smemn == 'OXCF2') or (sgrpn == 'ROFLOW' and smemn == 'OXCF1'): - sgrpn = 'OXRX' - if (sgrpn == 'OFLOW' and (smemn == 'NUCF9' or smemn == 'OSNH4' or smemn == 'OSPO4')) or (sgrpn == 'ROFLOW' and (smemn == 'NUCF1' or smemn == 'NUFCF2')): - sgrpn = 'NUTRX' - if (sgrpn == 'OFLOW' and smemn == 'PKCF2') or (sgrpn == 'ROFLOW' and smemn == 'PKCF1'): - sgrpn = 'PLANK' - if (sgrpn == 'OFLOW' and smemn == 'PHCF2') or (sgrpn == 'ROFLOW' and smemn == 'PHCF1'): - sgrpn = 'PHCARB' - - if tmemn == 'ISED' or tmemn == 'ISQAL': - tmemn = tmemn + tmemsb1 # need to add sand, silt, clay subscript - if (sgrpn == 'HYDR' and smemn == 'OVOL') or (sgrpn == 'HTRCH' and smemn == 'OHEAT'): - smemsb2 = '' - if sgrpn == 'GQUAL' and smemsb2 == '': - smemsb2 = '1' - - smemn, tmemn = expand_timeseries_names(sgrpn, smemn, smemsb1, smemsb2, tmemn, tmemsb1, tmemsb2) - - path = f'RESULTS/{x.SVOL}_{x.SVOLNO}/{sgrpn}' - MFname = f'{x.SVOL}{x.SVOLNO}_MFACTOR' - AFname = f'{x.SVOL}{x.SVOLNO}_AFACTR' - data = f'{smemn}{smemsb1}{smemsb2}' - - data_frame = io_manager.read_ts(Category.RESULTS,x.SVOL,x.SVOLNO, sgrpn) - try: - if data in data_frame.columns: t = data_frame[data].astype(float64).to_numpy()[0:steps] - else: t = data_frame[smemn].astype(float64).to_numpy()[0:steps] - - if MFname in ts and AFname in ts: - t *= ts[MFname][:steps] * ts[AFname][0:steps] - msg(4, f'MFACTOR modified by timeseries {MFname}') - msg(4, f'AFACTR modified by timeseries {AFname}') - elif MFname in ts: - t *= afactr * ts[MFname][0:steps] - msg(4, f'MFACTOR modified by timeseries {MFname}') - elif AFname in ts: - t *= mfactor * ts[AFname][0:steps] - msg(4, f'AFACTR modified by timeseries {AFname}') - else: - t *= factor - - # if poht to iheat, imprecision in hspf conversion factor requires a slight adjustment - if (smemn == 'POHT' or smemn == 'SOHT') and tmemn == 'IHEAT': - t *= 0.998553 - if (smemn == 'PODOXM' or smemn == 'SODOXM') and tmemn == 'OXIF1': - t *= 1.000565 - - # ??? ISSUE: can fetched data be at different frequency - don't know how to transform. - if tmemn in ts: - ts[tmemn] += t - else: - ts[tmemn] = t - - except KeyError: - print('ERROR in FLOWS, cant resolve ', path + ' ' + smemn) - - return - -''' - - # This table defines the expansion to INFLOW, ROFLOW, OFLOW for RCHRES networks - d = [ - ['IVOL', 'ROVOL', 'OVOL', 'HYDRFG', 'HYDR'], - ['ICON', 'ROCON', 'OCON', 'CONSFG', 'CONS'], - ['IHEAT', 'ROHEAT', 'OHEAT', 'HTFG', 'HTRCH'], - ['ISED', 'ROSED', 'OSED', 'SEDFG', 'SEDTRN'], - ['IDQAL', 'RODQAL', 'ODQAL', 'GQALFG', 'GQUAL'], - ['ISQAL', 'ROSQAL', 'OSQAL', 'GQALFG', 'GQUAL'], - ['OXIF', 'OXCF1', 'OXCF2', 'OXFG', 'OXRX'], - ['NUIF1', 'NUCF1', 'NUCF1', 'NUTFG', 'NUTRX'], - ['NUIF2', 'NUCF2', 'NUCF9', 'NUTFG', 'NUTRX'], - ['PKIF', 'PKCF1', 'PKCH2', 'PLKFG', 'PLANK'], - ['PHIF', 'PHCF1', 'PHCF2', 'PHFG', 'PHCARB']] - df = pd.DataFrame(d, columns=['INFLOW', 'ROFLOW', 'OFLOW', 'Flag', 'Name']) - df.to_hdf(h2name, '/FLOWEXPANSION', format='t', data_columns=True) - - -''' +''' Copyright (c) 2020 by RESPEC, INC. +Author: Robert Heaphy, Ph.D. +License: LGPL2 +''' + +from re import S +from numpy import float64, float32 +from pandas import DataFrame, date_range +from pandas.tseries.offsets import Minute +from datetime import datetime as dt +import os +from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries +from HSP2.configuration import activities, noop, expand_masslinks +from HSP2.state import * +from HSP2.om import * +from HSP2.SPECL import * + +from HSP2IO.io import IOManager, SupportsReadTS, Category + +def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None: + """Runs main HSP2 program. + + Parameters + ---------- + + saveall: Boolean - [optional] Default is False. + Saves all calculated data ignoring SAVE tables. + jupyterlab: Boolean - [optional] Default is True. + Flag for specific output behavior for jupyter lab. + Return + ------------ + None + + """ + + hdfname = io_manager._input.file_path + if not os.path.exists(hdfname): + raise FileNotFoundError(f'{hdfname} HDF5 File Not Found') + + msg = messages() + msg(1, f'Processing started for file {hdfname}; saveall={saveall}') + + # read user control, parameters, states, and flags uci and map to local variables + uci_obj = io_manager.read_uci() + opseq = uci_obj.opseq + ddlinks = uci_obj.ddlinks + ddmasslinks = uci_obj.ddmasslinks + ddext_sources = uci_obj.ddext_sources + ddgener = uci_obj.ddgener + uci = uci_obj.uci + siminfo = uci_obj.siminfo + ftables = uci_obj.ftables + specactions = uci_obj.specactions + monthdata = uci_obj.monthdata + + start, stop = siminfo['start'], siminfo['stop'] + + copy_instances = {} + gener_instances = {} + + ####################################################################################### + # initialize STATE dicts + ####################################################################################### + # Set up Things in state that will be used in all modular activities like SPECL + state = init_state_dicts() + state_siminfo_hsp2(uci_obj, siminfo) + # Add support for dynamic functions to operate on STATE + # - Load any dynamic components if present, and store variables on objects + state_load_dynamics_hsp2(state, io_manager, siminfo) + # Iterate through all segments and add crucial paths to state + # before loading dynamic components that may reference them + state_init_hsp2(state, opseq, activities) + # - finally stash specactions in state, not domain (segment) dependent so do it once + state['specactions'] = specactions # stash the specaction dict in state + state_initialize_om(state) + state_load_dynamics_specl(state, io_manager, siminfo) # traditional special actions + state_load_dynamics_om(state, io_manager, siminfo) # operational model for custom python + # finalize all dynamically loaded components and prepare to run the model + state_om_model_run_prep(state, io_manager, siminfo) + ####################################################################################### + + # main processing loop + msg(1, f'Simulation Start: {start}, Stop: {stop}') + tscat = {} + for _, operation, segment, delt in opseq.itertuples(): + msg(2, f'{operation} {segment} DELT(minutes): {delt}') + siminfo['delt'] = delt + siminfo['tindex'] = date_range(start, stop, freq=Minute(delt))[1:] + siminfo['steps'] = len(siminfo['tindex']) + + if operation == 'COPY': + copy_instances[segment] = activities[operation](io_manager, siminfo, ddext_sources[(operation,segment)]) + elif operation == 'GENER': + try: + ts = get_timeseries(io_manager, ddext_sources[(operation, segment)], siminfo) + ts = get_gener_timeseries(ts, gener_instances, ddlinks[segment], ddmasslinks) + get_flows(io_manager, ts, {}, uci, segment, ddlinks, ddmasslinks, siminfo['steps'], msg) + gener_instances[segment] = activities[operation](segment, siminfo, copy_instances, gener_instances, ddlinks, ddmasslinks, ts, ddgener) + except NotImplementedError as e: + print(f"GENER '{segment}' may not function correctly. '{e}'") + else: + + # now conditionally execute all activity modules for the op, segment + ts = get_timeseries(io_manager,ddext_sources[(operation,segment)],siminfo) + ts = get_gener_timeseries(ts, gener_instances, ddlinks[segment],ddmasslinks) + flags = uci[(operation, 'GENERAL', segment)]['ACTIVITY'] + if operation == 'RCHRES': + # Add nutrient adsorption flags: + if flags['NUTRX'] == 1: + flags['TAMFG'] = uci[(operation, 'NUTRX', segment)]['FLAGS']['NH3FG'] + flags['ADNHFG'] = uci[(operation, 'NUTRX', segment)]['FLAGS']['ADNHFG'] + flags['PO4FG'] = uci[(operation, 'NUTRX', segment)]['FLAGS']['PO4FG'] + flags['ADPOFG'] = uci[(operation, 'NUTRX', segment)]['FLAGS']['ADPOFG'] + + get_flows(io_manager, ts, flags, uci, segment, ddlinks, ddmasslinks, siminfo['steps'], msg) + + for activity, function in activities[operation].items(): + if function == noop: #or not flags[activity]: + continue + + if (activity in flags) and (not flags[activity]): + continue + + if (activity == 'RQUAL') and (not flags['OXRX']) and (not flags['NUTRX']) and (not flags['PLANK']) and (not flags['PHCARB']): + continue + + msg(3, f'{activity}') + # Set context for dynamic executables and special actions + state_context_hsp2(state, operation, segment, activity) + + ui = uci[(operation, activity, segment)] # ui is a dictionary + if operation == 'PERLND' and activity == 'SEDMNT': + # special exception here to make CSNOFG available + ui['PARAMETERS']['CSNOFG'] = uci[(operation, 'PWATER', segment)]['PARAMETERS']['CSNOFG'] + if operation == 'PERLND' and activity == 'PSTEMP': + # special exception here to make AIRTFG available + ui['PARAMETERS']['AIRTFG'] = flags['ATEMP'] + if operation == 'PERLND' and activity == 'PWTGAS': + # special exception here to make CSNOFG available + ui['PARAMETERS']['CSNOFG'] = uci[(operation, 'PWATER', segment)]['PARAMETERS']['CSNOFG'] + if operation == 'RCHRES': + if not 'PARAMETERS' in ui: + ui['PARAMETERS'] = {} + ui['PARAMETERS']['NEXITS'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['NEXITS'] + if activity == 'ADCALC': + ui['PARAMETERS']['ADFG'] = flags['ADCALC'] + ui['PARAMETERS']['KS'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['KS'] + ui['PARAMETERS']['VOL'] = uci[(operation, 'HYDR', segment)]['STATES']['VOL'] + ui['PARAMETERS']['ROS'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['ROS'] + nexits = uci[(operation, 'HYDR', segment)]['PARAMETERS']['NEXITS'] + for index in range(nexits): + ui['PARAMETERS']['OS' + str(index + 1)] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['OS'+ str(index + 1)] + if activity == 'HTRCH': + ui['PARAMETERS']['ADFG'] = flags['ADCALC'] + ui['advectData'] = uci[(operation, 'ADCALC', segment)]['adcalcData'] + # ui['STATES']['VOL'] = uci[(operation, 'HYDR', segment)]['STATES']['VOL'] + if activity == 'CONS': + ui['advectData'] = uci[(operation, 'ADCALC', segment)]['adcalcData'] + if activity == 'SEDTRN': + ui['PARAMETERS']['ADFG'] = flags['ADCALC'] + ui['advectData'] = uci[(operation, 'ADCALC', segment)]['adcalcData'] + # ui['STATES']['VOL'] = uci[(operation, 'HYDR', segment)]['STATES']['VOL'] + ui['PARAMETERS']['HTFG'] = flags['HTRCH'] + ui['PARAMETERS']['AUX3FG'] = 0 + if flags['HYDR']: + ui['PARAMETERS']['LEN'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LEN'] + ui['PARAMETERS']['DELTH'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['DELTH'] + ui['PARAMETERS']['DB50'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['DB50'] + ui['PARAMETERS']['AUX3FG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['AUX3FG'] + if activity == 'GQUAL': + ui['advectData'] = uci[(operation, 'ADCALC', segment)]['adcalcData'] + ui['PARAMETERS']['HTFG'] = flags['HTRCH'] + ui['PARAMETERS']['SEDFG'] = flags['SEDTRN'] + # ui['PARAMETERS']['REAMFG'] = uci[(operation, 'OXRX', segment)]['PARAMETERS']['REAMFG'] + ui['PARAMETERS']['HYDRFG'] = flags['HYDR'] + if flags['HYDR']: + ui['PARAMETERS']['LKFG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LKFG'] + ui['PARAMETERS']['AUX1FG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['AUX1FG'] + ui['PARAMETERS']['AUX2FG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['AUX2FG'] + ui['PARAMETERS']['LEN'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LEN'] + ui['PARAMETERS']['DELTH'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['DELTH'] + if flags['OXRX']: + ui['PARAMETERS']['LKFG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LKFG'] + ui['PARAMETERS']['CFOREA'] = uci[(operation, 'OXRX', segment)]['PARAMETERS']['CFOREA'] + if flags['SEDTRN']: + ui['PARAMETERS']['SSED1'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED1'] + ui['PARAMETERS']['SSED2'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED2'] + ui['PARAMETERS']['SSED3'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED3'] + if flags['HTRCH']: + ui['PARAMETERS']['CFSAEX'] = uci[(operation, 'HTRCH', segment)]['PARAMETERS']['CFSAEX'] + elif flags['PLANK']: + if 'CFSAEX' in uci[(operation, 'PLANK', segment)]['PARAMETERS']: + ui['PARAMETERS']['CFSAEX'] = uci[(operation, 'PLANK', segment)]['PARAMETERS']['CFSAEX'] + + if activity == 'RQUAL': + # RQUAL inputs: + ui['advectData'] = uci[(operation, 'ADCALC', segment)]['adcalcData'] + if flags['HYDR']: + ui['PARAMETERS']['LKFG'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LKFG'] + + ui['FLAGS']['HTFG'] = flags['HTRCH'] + ui['FLAGS']['SEDFG'] = flags['SEDTRN'] + ui['FLAGS']['GQFG'] = flags['GQUAL'] + ui['FLAGS']['OXFG'] = flags['OXFG'] + ui['FLAGS']['NUTFG'] = flags['NUTRX'] + ui['FLAGS']['PLKFG'] = flags['PLANK'] + ui['FLAGS']['PHFG'] = flags['PHCARB'] + if flags['CONS']: + if 'PARAMETERS' in uci[(operation, 'CONS', segment)]: + if 'NCONS' in uci[(operation, 'CONS', segment)]['PARAMETERS']: + ui['PARAMETERS']['NCONS'] = uci[(operation, 'CONS', segment)]['PARAMETERS']['NCONS'] + + # OXRX module inputs: + ui_oxrx = uci[(operation, 'OXRX', segment)] + + if flags['HYDR']: + ui_oxrx['PARAMETERS']['LEN'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LEN'] + ui_oxrx['PARAMETERS']['DELTH'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['DELTH'] + + if flags['HTRCH']: + ui_oxrx['PARAMETERS']['ELEV'] = uci[(operation, 'HTRCH', segment)]['PARAMETERS']['ELEV'] + + if flags['SEDTRN']: + ui['PARAMETERS']['SSED1'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED1'] + ui['PARAMETERS']['SSED2'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED2'] + ui['PARAMETERS']['SSED3'] = uci[(operation, 'SEDTRN', segment)]['STATES']['SSED3'] + + # PLANK module inputs: + if flags['HTRCH']: + ui['PARAMETERS']['CFSAEX'] = uci[(operation, 'HTRCH', segment)]['PARAMETERS']['CFSAEX'] + + # NUTRX, PLANK, PHCARB module inputs: + ui_nutrx = uci[(operation, 'NUTRX', segment)] + ui_plank = uci[(operation, 'PLANK', segment)] + ui_phcarb = uci[(operation, 'PHCARB', segment)] + + ############ calls activity function like snow() ############## + if operation not in ['COPY','GENER']: + if (activity == 'HYDR'): + errors, errmessages = function(io_manager, siminfo, ui, ts, ftables, state) + elif (activity == 'SEDTRN'): + errors, errmessages = function(io_manager, siminfo, ui, ts, state) + elif (activity != 'RQUAL'): + errors, errmessages = function(io_manager, siminfo, ui, ts) + else: + errors, errmessages = function(io_manager, siminfo, ui, ui_oxrx, ui_nutrx, ui_plank, ui_phcarb, ts, monthdata) + ############################################################### + + for errorcnt, errormsg in zip(errors, errmessages): + if errorcnt > 0: + msg(4, f'Error count {errorcnt}: {errormsg}') + + # default to hourly output + outstep = 2 + outstep_oxrx = 2 + outstep_nutrx = 2 + outstep_plank = 2 + outstep_phcarb = 2 + if 'BINOUT' in uci[(operation, 'GENERAL', segment)]: + if activity in uci[(operation, 'GENERAL', segment)]['BINOUT']: + outstep = uci[(operation, 'GENERAL', segment)]['BINOUT'][activity] + elif activity == 'RQUAL': + outstep_oxrx = uci[(operation, 'GENERAL', segment)]['BINOUT']['OXRX'] + outstep_nutrx = uci[(operation, 'GENERAL', segment)]['BINOUT']['NUTRX'] + outstep_plank = uci[(operation, 'GENERAL', segment)]['BINOUT']['PLANK'] + outstep_phcarb = uci[(operation, 'GENERAL', segment)]['BINOUT']['PHCARB'] + + if 'SAVE' in ui: + save_timeseries(io_manager,ts,ui['SAVE'],siminfo,saveall,operation,segment,activity,jupyterlab,outstep) + + if (activity == 'RQUAL'): + if 'SAVE' in ui_oxrx: save_timeseries(io_manager,ts,ui_oxrx['SAVE'],siminfo,saveall,operation,segment,'OXRX',jupyterlab,outstep_oxrx) + if 'SAVE' in ui_nutrx and flags['NUTRX'] == 1: save_timeseries(io_manager,ts,ui_nutrx['SAVE'],siminfo,saveall,operation,segment,'NUTRX',jupyterlab,outstep_nutrx) + if 'SAVE' in ui_plank and flags['PLANK'] == 1: save_timeseries(io_manager,ts,ui_plank['SAVE'],siminfo,saveall,operation,segment,'PLANK',jupyterlab,outstep_plank) + if 'SAVE' in ui_phcarb and flags['PHCARB'] == 1: save_timeseries(io_manager,ts,ui_phcarb['SAVE'],siminfo,saveall,operation,segment,'PHCARB',jupyterlab,outstep_phcarb) + + msglist = msg(1, 'Done', final=True) + + df = DataFrame(msglist, columns=['logfile']) + io_manager.write_log(df) + + if jupyterlab: + df = versions(['jupyterlab', 'notebook']) + io_manager.write_versioning(df) + print('\n\n', df) + return + +def messages(): + '''Closure routine; msg() prints messages to screen and run log''' + start = dt.now() + mlist = [] + def msg(indent, message, final=False): + now = dt.now() + m = str(now)[:22] + ' ' * indent + message + if final: + mn,sc = divmod((now-start).seconds, 60) + ms = (now-start).microseconds // 100_000 + m = '; '.join((m, f'Run time is about {mn:02}:{sc:02}.{ms} (mm:ss)')) + print(m) + mlist.append(m) + return mlist + return msg + +def get_flows(io_manager:SupportsReadTS, ts, flags, uci, segment, ddlinks, ddmasslinks, steps, msg): + # get inflows to this operation + for x in ddlinks[segment]: + if x.SVOL != 'GENER': # gener already handled in get_gener_timeseries + recs = [] + if x.MLNO == '': # Data from NETWORK part of Links table + rec = {} + rec['MFACTOR'] = x.MFACTOR + rec['SGRPN'] = x.SGRPN + rec['SMEMN'] = x.SMEMN + rec['SMEMSB1'] = x.SMEMSB1 + rec['SMEMSB2'] = x.SMEMSB2 + rec['TMEMN'] = x.TMEMN + rec['TMEMSB1'] = x.TMEMSB1 + rec['TMEMSB2'] = x.TMEMSB2 + rec['SVOL'] = x.SVOL + recs.append(rec) + else: # Data from SCHEMATIC part of Links table + mldata = ddmasslinks[x.MLNO] + for dat in mldata: + if dat.SMEMN != '': + rec = {} + rec['MFACTOR'] = dat.MFACTOR + rec['SGRPN'] = dat.SGRPN + rec['SMEMN'] = dat.SMEMN + rec['SMEMSB1'] = dat.SMEMSB1 + rec['SMEMSB2'] = dat.SMEMSB2 + rec['TMEMN'] = dat.TMEMN + rec['TMEMSB1'] = dat.TMEMSB1 + rec['TMEMSB2'] = dat.TMEMSB2 + rec['SVOL'] = dat.SVOL + recs.append(rec) + else: + # this is the kind that needs to be expanded + if dat.SGRPN == "ROFLOW" or dat.SGRPN == "OFLOW": + recs = expand_masslinks(flags,uci,dat,recs) + + for rec in recs: + mfactor = rec['MFACTOR'] + sgrpn = rec['SGRPN'] + smemn = rec['SMEMN'] + smemsb1 = rec['SMEMSB1'] + smemsb2 = rec['SMEMSB2'] + tmemn = rec['TMEMN'] + tmemsb1 = rec['TMEMSB1'] + tmemsb2 = rec['TMEMSB2'] + + if x.AFACTR != '': + afactr = x.AFACTR + factor = afactr * mfactor + else: + factor = mfactor + + # KLUDGE until remaining HSP2 modules are available. + if tmemn not in {'IVOL', 'ICON', 'IHEAT', 'ISED', 'ISED1', 'ISED2', 'ISED3', + 'IDQAL', 'ISQAL1', 'ISQAL2', 'ISQAL3', + 'OXIF', 'NUIF1', 'NUIF2', 'PKIF', 'PHIF', + 'ONE', 'TWO'}: + continue + if (sgrpn == 'OFLOW' and smemn == 'OVOL') or (sgrpn == 'ROFLOW' and smemn == 'ROVOL'): + sgrpn = 'HYDR' + if (sgrpn == 'OFLOW' and smemn == 'OHEAT') or (sgrpn == 'ROFLOW' and smemn == 'ROHEAT'): + sgrpn = 'HTRCH' + if (sgrpn == 'OFLOW' and smemn == 'OSED') or (sgrpn == 'ROFLOW' and smemn == 'ROSED'): + sgrpn = 'SEDTRN' + if (sgrpn == 'OFLOW' and smemn == 'ODQAL') or (sgrpn == 'ROFLOW' and smemn == 'RODQAL'): + sgrpn = 'GQUAL' + if (sgrpn == 'OFLOW' and smemn == 'OSQAL') or (sgrpn == 'ROFLOW' and smemn == 'ROSQAL'): + sgrpn = 'GQUAL' + if (sgrpn == 'OFLOW' and smemn == 'OXCF2') or (sgrpn == 'ROFLOW' and smemn == 'OXCF1'): + sgrpn = 'OXRX' + if (sgrpn == 'OFLOW' and (smemn == 'NUCF9' or smemn == 'OSNH4' or smemn == 'OSPO4')) or (sgrpn == 'ROFLOW' and (smemn == 'NUCF1' or smemn == 'NUFCF2')): + sgrpn = 'NUTRX' + if (sgrpn == 'OFLOW' and smemn == 'PKCF2') or (sgrpn == 'ROFLOW' and smemn == 'PKCF1'): + sgrpn = 'PLANK' + if (sgrpn == 'OFLOW' and smemn == 'PHCF2') or (sgrpn == 'ROFLOW' and smemn == 'PHCF1'): + sgrpn = 'PHCARB' + + if tmemn == 'ISED' or tmemn == 'ISQAL': + tmemn = tmemn + tmemsb1 # need to add sand, silt, clay subscript + if (sgrpn == 'HYDR' and smemn == 'OVOL') or (sgrpn == 'HTRCH' and smemn == 'OHEAT'): + smemsb2 = '' + if sgrpn == 'GQUAL' and smemsb2 == '': + smemsb2 = '1' + + smemn, tmemn = expand_timeseries_names(sgrpn, smemn, smemsb1, smemsb2, tmemn, tmemsb1, tmemsb2) + + path = f'RESULTS/{x.SVOL}_{x.SVOLNO}/{sgrpn}' + MFname = f'{x.SVOL}{x.SVOLNO}_MFACTOR' + AFname = f'{x.SVOL}{x.SVOLNO}_AFACTR' + data = f'{smemn}{smemsb1}{smemsb2}' + + data_frame = io_manager.read_ts(Category.RESULTS,x.SVOL,x.SVOLNO, sgrpn) + try: + if data in data_frame.columns: t = data_frame[data].astype(float64).to_numpy()[0:steps] + else: t = data_frame[smemn].astype(float64).to_numpy()[0:steps] + + if MFname in ts and AFname in ts: + t *= ts[MFname][:steps] * ts[AFname][0:steps] + msg(4, f'MFACTOR modified by timeseries {MFname}') + msg(4, f'AFACTR modified by timeseries {AFname}') + elif MFname in ts: + t *= afactr * ts[MFname][0:steps] + msg(4, f'MFACTOR modified by timeseries {MFname}') + elif AFname in ts: + t *= mfactor * ts[AFname][0:steps] + msg(4, f'AFACTR modified by timeseries {AFname}') + else: + t *= factor + + # if poht to iheat, imprecision in hspf conversion factor requires a slight adjustment + if (smemn == 'POHT' or smemn == 'SOHT') and tmemn == 'IHEAT': + t *= 0.998553 + if (smemn == 'PODOXM' or smemn == 'SODOXM') and tmemn == 'OXIF1': + t *= 1.000565 + + # ??? ISSUE: can fetched data be at different frequency - don't know how to transform. + if tmemn in ts: + ts[tmemn] += t + else: + ts[tmemn] = t + + except KeyError: + print('ERROR in FLOWS, cant resolve ', path + ' ' + smemn) + + return + +''' + + # This table defines the expansion to INFLOW, ROFLOW, OFLOW for RCHRES networks + d = [ + ['IVOL', 'ROVOL', 'OVOL', 'HYDRFG', 'HYDR'], + ['ICON', 'ROCON', 'OCON', 'CONSFG', 'CONS'], + ['IHEAT', 'ROHEAT', 'OHEAT', 'HTFG', 'HTRCH'], + ['ISED', 'ROSED', 'OSED', 'SEDFG', 'SEDTRN'], + ['IDQAL', 'RODQAL', 'ODQAL', 'GQALFG', 'GQUAL'], + ['ISQAL', 'ROSQAL', 'OSQAL', 'GQALFG', 'GQUAL'], + ['OXIF', 'OXCF1', 'OXCF2', 'OXFG', 'OXRX'], + ['NUIF1', 'NUCF1', 'NUCF1', 'NUTFG', 'NUTRX'], + ['NUIF2', 'NUCF2', 'NUCF9', 'NUTFG', 'NUTRX'], + ['PKIF', 'PKCF1', 'PKCH2', 'PLKFG', 'PLANK'], + ['PHIF', 'PHCF1', 'PHCF2', 'PHFG', 'PHCARB']] + df = pd.DataFrame(d, columns=['INFLOW', 'ROFLOW', 'OFLOW', 'Flag', 'Name']) + df.to_hdf(h2name, '/FLOWEXPANSION', format='t', data_columns=True) + + +''' diff --git a/HSP2/om.py b/HSP2/om.py index c2d72f2c..46f06612 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -1,613 +1,622 @@ -# set up libraries to import for the load_sim_dicts function -# later, this will be drawing from the hdf5, but for now we -# are hard-wiring a set of components for testing. -# Note: these import calls must be done down here AFTER the helper functions -# defined aove that are called by the object classes -import random # this is only used for a demo so may be deprecated -import json -import requests -from requests.auth import HTTPBasicAuth -import csv -import pandas as pd -import numpy as np -import time -from numba.typed import Dict -from numpy import zeros, int32 -from numba import int8, float32, njit, types, typed # import the types -import random # this is only used for a demo so may be deprecated -from HSP2.state import * - - -def get_exec_order(model_exec_list, var_ix): - """ - Find the integer key of a variable name in state_ix - """ - model_exec_list = dict(enumerate(model_exec_list.flatten(), 1)) - for exec_order, ix in model_exec_list.items(): - if var_ix == ix: - # we need to add this to the state - return exec_order - return False - -def init_op_tokens(op_tokens, tops, eq_ix): - """ - Iinitialize the op_tokens Dict - This contains the runtime op code for every dynamic operation to be used - """ - for j in range(len(tops)): - if isinstance(tops[j], str): - # must add this to the state array as a constant - s_ix = append_state(state_ix, float(tops[j])) - tops[j] = s_ix - - op_tokens[eq_ix] = np.asarray(tops, dtype="i8") - -def is_float_digit(n: str) -> bool: - """ - Helper Function to determine if a variable is numeric - """ - try: - float(n) - return True - except ValueError: - return False - -# Import Code Classes -from HSP2.om_model_object import * -from HSP2.om_sim_timer import * -#from HSP2.om_equation import * -from HSP2.om_model_linkage import * -from HSP2.om_special_action import * -#from HSP2.om_data_matrix import * -#from HSP2.om_model_broadcast import * -#from HSP2.om_simple_channel import * -#from HSP2.om_impoundment import * -from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries - -def init_om_dicts(): - """ - The base dictionaries used to store model object info - """ - op_tokens = ModelObject.make_op_tokens() # this is just to start, layer it is resized to the object needs - # Was - #op_tokens = Dict.empty(key_type=types.int64, value_type=types.i8[:]) - model_object_cache = {} # this does not need to be a special Dict as it is not used in numba - return op_tokens, model_object_cache - - -def state_load_om_json(state, io_manager, siminfo): - # - model objects defined in file named '[model h5 base].json -- this will populate an array of object definitions that will - # be loadable by "model_loader_recursive()" - model_data = state['model_data'] - # JSON file would be in same path as hdf5 - hdf5_path = io_manager._input.file_path - (fbase, fext) = os.path.splitext(hdf5_path) - # see if there is custom json - fjson = fbase + ".json" - print("Looking for custom om json ", fjson) - if (os.path.isfile(fjson)): - print("Found local json file", fjson) - jfile = open(fjson) - json_data = json.load(jfile) - # dict.update() combines the arg dict with the base - model_data.update(json_data) - state['model_data'] = model_data - return - -def state_load_om_python(state, io_manager, siminfo): - # Look for a [hdf5 file base].py file with specific named functions - # - function "om_init_model": This function can be defined in the [model h5 base].py file containing things to be done - # early in the model loading, like setting up model objects. This file will already have been loaded by the state module, - # and will be present in the module variable hsp2_local_py (we should rename to state_local_py?) - # - this file may also contain other dynamically redefined functions such as state_step_hydr() - # which can contain code that is executed every timestep inside the _hydr_() function - # and can literally supply hooks for any desired user customizable code - hdf5_path = io_manager._input.file_path - (fbase, fext) = os.path.splitext(hdf5_path) - # see if there is a code module with custom python - print("Looking for custom om loader in python code ", (fbase + ".py")) - hsp2_local_py = state['hsp2_local_py'] - # Load a function from code if it exists - if 'om_init_model' in dir(hsp2_local_py): - hsp2_local_py.om_init_model(io_manager, siminfo, state['op_tokens'], state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'], state['model_object_cache']) - - -def state_load_dynamics_om(state, io_manager, siminfo): - # this function will check to see if any of the multiple paths to loading - # dynamic operational model objects has been supplied for the model. - # Grab globals from state for easy handling - op_tokens, model_object_cache = init_om_dicts() - state_paths, state_ix, dict_ix, ts_ix = state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] - # set globals on ModelObject, this makes them persistent throughout all subsequent object instantiation and use - ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache, ModelObject.ts_ix = ( - op_tokens, state_paths, state_ix, dict_ix, model_object_cache, ts_ix - ) - state['op_tokens'], state['model_object_cache'] = op_tokens, model_object_cache - # load dynamic coding libraries if defined by user - # note: this used to be inside this function, I think that the loaded module should be no problem - # occuring within this function call, since this function is also called from another runtime engine - # but if things fail post develop-specact-1 pull requests we may investigate here - # also, it may be that this should be loaded elsewhere? - # comment state_load_om_python() to disable dynamic python - state_load_om_python(state, io_manager, siminfo) - state_load_om_json(state, io_manager, siminfo) - return - -def state_om_model_run_prep(state, io_manager, siminfo): - # Create the base that everything is added to. this object does nothing except host the rest. - model_root_object = ModelObject("") - # set up the timer as the first element - timer = SimTimer('timer', model_root_object, siminfo) - - # now instantiate and link objects - # state['model_data'] has alread been prepopulated from json, .py files, hdf5, etc. - model_loader_recursive(state['model_data'], model_root_object) - print("Loaded objects & paths: insures all paths are valid, connects models as inputs") - # both state['model_object_cache'] and the model_object_cache property of the ModelObject class def - # will hold a global repo for this data this may be redundant? They DO point to the same datset? - # since this is a function that accepts state as an argument and these were both set in state_load_dynamics_om - # we can assume they are there and functioning - model_object_cache = state['model_object_cache'] - model_path_loader(model_object_cache) - # len() will be 1 if we only have a simtimer, but > 1 if we have a river being added - model_exec_list = [] - # put all objects in token form for fast runtime execution and sort according to dependency order - print("Tokenizing models") - ModelObject.op_tokens = ModelObject.make_op_tokens(max(ModelObject.state_ix.keys()) + 1) - model_tokenizer_recursive(model_root_object, model_object_cache, model_exec_list) - op_tokens = ModelObject.op_tokens - print("op_tokens has", len(op_tokens),"elements") - # model_exec_list is the ordered list of component operations - #print("model_exec_list(", len(model_exec_list),"items):", model_exec_list) - # This is used to stash the model_exec_list in the dict_ix, this might be slow, need to verify. - # the resulting set of objects is returned. - state['state_step_om'] = 'disabled' - state['model_object_cache'] = model_object_cache - state['model_exec_list'] = np.asarray(model_exec_list, dtype="i8") - if ModelObject.ops_data_type == 'ndarray': - state['state_ix'] = np.asarray(list(state['state_ix'].values()), dtype="float64") - state['op_tokens'] = op_tokens - if len(op_tokens) > 0: - state['state_step_om'] = 'enabled' - return - -# model class reader -# get model class to guess object type in this lib -# the parent object must be known -def model_class_loader(model_name, model_props, container = False): - # todo: check first to see if the model_name is an attribute on the container - # Use: if hasattr(container, model_name): - # if so, we set the value on the container, if not, we create a new subcomp on the container - if model_props == None: - return False - if type(model_props) is str: - if is_float_digit(model_props): - model_object = ModelConstant(model_name, container, float(model_props) ) - return model_object - else: - return False - elif type(model_props) is dict: - object_class = model_props.get('object_class') - if object_class == None: - # return as this is likely an attribute that is used for the containing class as attribute - # and is handled by the container - # todo: we may want to handle this here? Or should this be a method on the class? - # Use: if hasattr(container, model_name): - return False - model_object = False - # Note: this routine uses the ".get()" method of the dict class type - # for attributes to pass in. - # ".get()" will return NoValue if it does not exist or the value. - if object_class == 'Equation': - model_object = Equation(model_props.get('name'), container, model_props ) - #remove_used_keys(model_props, - elif object_class == 'SimpleChannel': - model_object = SimpleChannel(model_props.get('name'), container, model_props ) - elif object_class == 'Impoundment': - model_object = Impoundment(model_props.get('name'), container, model_props ) - elif object_class == 'Constant': - model_object = ModelConstant(model_props.get('name'), container, model_props.get('value') ) - elif ( object_class.lower() == 'datamatrix'): - # add a matrix with the data, then add a matrix accessor for each required variable - has_props = DataMatrix.check_properties(model_props) - if has_props == False: - print("Matrix object must have", DataMatrix.required_properties()) - return False - # create it - model_object = DataMatrix(model_props.get('name'), container, model_props) - elif object_class == 'ModelBroadcast': - # add a matrix with the data, then add a matrix accessor for each required variable - has_props = ModelBroadcast.check_properties(model_props) - if has_props == False: - print("ModelBroadcast object must have", ModelBroadcast.required_properties()) - return False - # create it - model_object = ModelBroadcast(model_props.get('name'), container, model_props) - elif object_class == 'MicroWatershedModel': - # add a matrix with the data, then add a matrix accessor for each required variable - has_props = MicroWatershedModel.check_properties(model_props) - if has_props == False: - print("MicroWatershedModel object must have", MicroWatershedModel.required_properties()) - return False - # create it - model_object = DataMatrix(model_props.get('name'), container, model_props) - elif object_class == 'ModelLinkage': - model_object = ModelLinkage(model_props.get('name'), container, model_props) - elif object_class == 'SpecialAction': - model_object = SpecialAction(model_props.get('name'), container, model_props) - else: - #print("Loading", model_props.get('name'), "with object_class", object_class,"as ModelObject") - model_object = ModelObject(model_props.get('name'), container, model_props) - # one way to insure no class attributes get parsed as sub-comps is: - # model_object.remove_used_keys() - if len(model_object.model_props_parsed) == 0: - # attach these to the object for posterity - model_object.model_props_parsed = model_props - # better yet to just NOT send those attributes as typed object_class arrays, instead just name : value - return model_object - -def model_class_translate(model_props, object_class): - # make adjustments to non-standard items - # this might better be moved to methods on the class handlers - if object_class == 'hydroImpoundment': - # special handling of matrix/storage_stage_area column - # we need to test to see if the storage table has been renamed - # make table from matrix or storage_stage_area - # then make accessors from - storage_stage_area = model_props.get('storage_stage_area') - matrix = model_props.get('matrix') - if ( (storage_stage_area == None) and (matrix != None)): - model_props['storage_stage_area'] = matrix - del model_props['matrix'] - if object_class == 'broadCastObject': - model_props['object_class'] = 'ModelBroadcast' - model_props['broadcast_channel'] = model_props['broadcast_class'] - if object_class == 'USGSChannelGeomObject_sub': - model_props['object_class'] = 'SimpleChannel' - print("Handling USGSChannelGeomObject_sub as SimpleChannel") - if object_class == 'hydroImpoundment': - model_props['object_class'] = 'Impoundment' - print("Handling hydroImpoundment as Impoundment") - if object_class == 'hydroImpSmall': - model_props['object_class'] = 'Impoundment' - print("Handling hydroImpSmall as Impoundment") - -def model_loader_recursive(model_data, container): - k_list = model_data.keys() - object_names = dict.fromkeys(k_list , 1) - if type(object_names) is not dict: - return False - for object_name in object_names: - #print("Handling", object_name) - if object_name in {'name', 'object_class', 'id', 'value', 'default'}: - # we should ask the class what properties are part of the class and also skips these - # therefore, we can assume that anything else must be a child object that needs to - # be handled first -- but how to do this? - continue - model_props = model_data[object_name] - if type(model_props) is not dict: - # this is a constant, the loader is built to handle this, but this causes errors with - # properties on the class that are expected so we just skip and trust that all constants - # are formally declared as type Constant - continue - if type(model_props) is dict: - if not ('object_class' in model_props): - # this is either a class attribute or an un-handleable meta-data - # if the class atttribute exists, we should pass it to container to load - #print("Skipping un-typed", object_name) - continue - #print("Translating", object_name) - # this is a kludge, but can be important - object_class = model_props['object_class'] - model_class_translate(model_props, object_class) - # now we either have a constant (key and value), or a - # fully defined object. Either one should work OK. - #print("Trying to load", object_name) - model_object = model_class_loader(object_name, model_props, container) - if model_object == False: - print("Could not load", object_name) - continue # not handled, but for now we will continue, tho later we should bail? - # now for container type objects, go through its properties and handle - if type(model_props) is dict: - model_loader_recursive(model_props, model_object) - -def model_path_loader(model_object_cache): - k_list = model_object_cache.keys() - model_names = dict.fromkeys(k_list , 1) - for model_name in model_names: - #print("Loading paths for", model_name) - model_object = model_object_cache[model_name] - model_object.find_paths() - - -def model_tokenizer_recursive(model_object, model_object_cache, model_exec_list, model_touch_list = []): - """ - Given a root model_object, trace the inputs to load things in order - Store this order in model_exec_list - Note: All ordering is as-needed organic, except Broadcasts - - read from children is completed after all other inputs - - read from parent is completed before all other inputs - - could this be accomplished by more sophisticated handling of read - broadcasts? - - When loading a read broadcast, can we iterate through items - that are sending to that broadcast? - - Or is it better to let it as it is, - """ - #print("Handling", model_object.name, " ", model_object.state_path) - if model_object.ix in model_exec_list: - return - if model_object.ix in model_touch_list: - #print("Already touched", model_object.name, model_object.ix, model_object.state_path) - return - # record as having been called, and will ultimately return, to prevent recursions - model_touch_list.append(model_object.ix) - k_list = model_object.inputs.keys() - input_names = dict.fromkeys(k_list , 1) - if type(input_names) is not dict: - return - # isolate broadcasts, and sort out -- what happens if an equation references a broadcast var? - # is this a limitation of treating all children as inputs? - # alternative, leave broadcasts organic, but load children first? - # children first, then local sub-comps is old method? old method: - # - read parent broadcasts - # - get inputs (essentially, linked vars) - # - send child broadcasts (will send current step parent reads, last step local proc data) - # - execute children - # - execute local sub-comps - for input_name in input_names: - #print("Checking input", input_name) - input_path = model_object.inputs[input_name] - if input_path in model_object_cache.keys(): - input_object = model_object_cache[input_path] - model_tokenizer_recursive(input_object, model_object_cache, model_exec_list, model_touch_list) - else: - if input_path in model_object.state_paths.keys(): - # this is a valid state reference without an object - # thus, it is likely part of internals that are manually added - # which should be fine. tho perhaps we should have an object for these too. - continue - print("Problem loading input", input_name, "input_path", input_path, "not in model_object_cache.keys()") - return - # now after tokenizing all inputs this should be OK to tokenize - model_object.add_op_tokens() - if model_object.optype in ModelObject.runnables: - model_exec_list.append(model_object.ix) - - -def model_order_recursive(model_object, model_object_cache, model_exec_list, model_touch_list = []): - """ - Given a root model_object, trace the inputs to load things in order - Store this order in model_exec_list - Note: All ordering is as-needed organic, except Broadcasts - - read from children is completed after all other inputs - - read from parent is completed before all other inputs - - could this be accomplished by more sophisticated handling of read - broadcasts? - - When loading a read broadcast, can we iterate through items - that are sending to that broadcast? - - Or is it better to let it as it is, - """ - if model_object.ix in model_exec_list: - return - if model_object.ix in model_touch_list: - #print("Already touched", model_object.name, model_object.ix, model_object.state_path) - return - # record as having been called, and will ultimately return, to prevent recursions - model_touch_list.append(model_object.ix) - k_list = model_object.inputs.keys() - input_names = dict.fromkeys(k_list , 1) - if type(input_names) is not dict: - return - # isolate broadcasts, and sort out -- what happens if an equation references a broadcast var? - # is this a limitation of treating all children as inputs? - # alternative, leave broadcasts organic, but load children first? - # children first, then local sub-comps is old method? old method: - # - read parent broadcasts - # - get inputs (essentially, linked vars) - # - send child broadcasts (will send current step parent reads, last step local proc data) - # - execute children - # - execute local sub-comps - for input_name in input_names: - #print("Checking input", input_name) - input_path = model_object.inputs[input_name] - if input_path in model_object_cache.keys(): - input_object = model_object_cache[input_path] - model_order_recursive(input_object, model_object_cache, model_exec_list, model_touch_list) - else: - if input_path in model_object.state_paths.keys(): - # this is a valid state reference without an object - # thus, it is likely part of internals that are manually added - # which should be fine. tho perhaps we should have an object for these too. - continue - print("Problem loading input", input_name, "input_path", input_path, "not in model_object_cache.keys()") - return - # now after loading input dependencies, add this to list - model_exec_list.append(model_object.ix) - -def save_object_ts(io_manager, siminfo, op_tokens, ts_ix, ts): - # Decide on using from utilities.py: - # - save_timeseries(io_manager, ts, savedict, siminfo, saveall, operation, segment, activity, compress=True) - # Or, skip the save_timeseries wrapper and call write_ts() directly in io.py: - # write_ts(self, data_frame:pd.DataFrame, save_columns: List[str], category:Category, operation:Union[str,None]=None, segment:Union[str,None]=None, activity:Union[str,None]=None) - # see line 317 in utilities.py for use example of write_ts() - x = 0 # dummy - return - -@njit -def iterate_models(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps, dstep = -1): - checksum = 0.0 - for step in range(steps): - pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) - step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) - #print("Steps completed", step) - return checksum - -@njit -def pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): - for i in model_exec_list: - if op_tokens[i][0] == 12: - # register type data (like broadcast accumulators) - pass#pre_step_register(op_tokens[i], state_ix, dict_ix) - return - -@njit -def step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): - for i in model_exec_list: - step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) - return - - - -@njit -def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): - # op_tokens is passed in for ops like matrices that have lookups from other - # locations. All others rely only on ops - # todo: decide if all step_[class() functions should set value in state_ix instead of returning value? - val = 0 - if debug == 1: - print("DEBUG: Operator ID", ops[1], "is op type", ops[0]) - if ops[0] == 1: - pass #step_equation(ops, state_ix) - elif ops[0] == 2: - # todo: this should be moved into a single function, - # with the conforming name step_matrix(op_tokens, ops, state_ix, dict_ix) - if (ops[1] == ops[2]): - if debug == 1: - print("DEBUG: Calling exec_tbl_values", ops) - # this insures a matrix with variables in it is up to date - # only need to do this if the matrix data and matrix config are on same object - # otherwise, the matrix data is an input and has already been evaluated - pass #exec_tbl_values(ops, state_ix, dict_ix) - if (ops[3] > 0): - # this evaluates a single value from a matrix if the matrix is configured to do so. - if debug == 1: - print("DEBUG: Calling exec_tbl_eval", ops) - pass #exec_tbl_eval(op_tokens, ops, state_ix, dict_ix) - elif ops[0] == 3: - step_model_link(ops, state_ix, ts_ix, step) - elif ops[0] == 4: - val = 0 - elif ops[0] == 5: - step_sim_timer(ops, state_ix, dict_ix, ts_ix, step) - elif ops[0] == 9: - val = 0 - elif ops[0] == 13: - pass #step_simple_channel(ops, state_ix, dict_ix, step) - # Op 100 is Basic ACTION in Special Actions - elif ops[0] == 100: - step_special_action(ops, state_ix, dict_ix, step) - return - -@njit -def step_model_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step, debug_step = -1): - val = 0 - for i in model_exec_list: - ops = op_tokens[i] - if (step == debug_step): - print("Exec'ing step ", step, " model ID", i) - # op_tokens is passed in for ops like matrices that have lookups from other - # locations. All others rely only on ops - # todo: decide if all step_[class() functions should set value in state_ix instead of returning value? - val = 0 - if ops[0] == 1: - step_equation(ops, state_ix) - elif ops[0] == 2: - # todo: this should be moved into a single function, - # with the conforming name step_matrix(op_tokens, ops, state_ix, dict_ix) - if (ops[1] == ops[2]): - # this insures a matrix with variables in it is up to date - # only need to do this if the matrix data and matrix config are on same object - # otherwise, the matrix data is an input and has already been evaluated - state_ix[ops[1]] = exec_tbl_values(ops, state_ix, dict_ix) - if (ops[3] > 0): - # this evaluates a single value from a matrix if the matrix is configured to do so. - state_ix[ops[1]] = exec_tbl_eval(op_tokens, ops, state_ix, dict_ix) - elif ops[0] == 3: - step_model_link(ops, state_ix, ts_ix, step) - continue - elif ops[0] == 5: - step_sim_timer(ops, state_ix, dict_ix, ts_ix, step) - elif ops[0] == 9: - continue - elif ops[0] == 13: - step_simple_channel(ops, state_ix, dict_ix, step) - # Op 100 is Basic ACTION in Special Actions - elif ops[0] == 100: - step_special_action(ops, state_ix, dict_ix, step) - return - -@njit -def step_model_pcode(model_exec_list, op_tokens, state_info, state_paths, state_ix, dict_ix, ts_ix, step): - ''' - This routine includes support for dynamically loaded python code which is powerful but slow - This is not yet implemented anywhere, just an idea. But in theory it would allow easy switching between - the faster runtime without dynamic code if the user did not request it. - At minimum, this could be used to more efficiently enable/disable this feature for testing by simply calling - a separate routine. - - to do so we would need to add state_paths to the variables passed to step_model which should be OK? - ''' - hydr_ix = hydr_get_ix(state_ix, state_paths, state_info['domain']) # could be done more efficiently, once per model run - state_step_hydr(state_info, state_paths, state_ix, dict_ix, ts_ix, hydr_ix, step) - val = 0 - for i in model_exec_list: - step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) - return - -@njit -def post_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): - return - -@njit -def test_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): - val = 0 - for i in model_exec_list: - print(i) - print(op_tokens[i][0]) - print(op_tokens[i]) - step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) - return - -def step_object(thisobject, step): - # this calls the step for a given model object and timestep - # this is a workaround since the object method ModelObject.step() fails to find the step_one() function ? - step_one(thisobject.op_tokens, thisobject.op_tokens[thisobject.ix], thisobject.state_ix, thisobject.dict_ix, thisobject.ts_ix, step) - - -@njit -def pre_step_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): - for i in model_exec_list: - ops = op_tokens[i] - #for i in model_exec_list: - # op = op_tokens[i] - if ops[0] == 12: - # register type data (like broadcast accumulators) - pre_step_register(ops, state_ix, dict_ix) - continue - #elif ops[0] == 1: - # # register type data (like broadcast accumulators) - # continue - return - -@njit -def test_perf(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): -# for i in model_exec_list: -# ops = op_tokens[i] - for ops in op_tokens: - #step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step) - continue - return - -@njit -def iterate_perf(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps, debug_step = -1): - checksum = 0.0 - for step in range(steps): - pre_step_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) - step_model_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step, debug_step) - #print("Steps completed", step) - return checksum - -def time_perf(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps): - start = time.time() - iterate_perf(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps) - end = time.time() - print(len(model_exec_list), "components iterated over", siminfo['steps'], "time steps took" , end - start, "seconds") +# set up libraries to import for the load_sim_dicts function +# later, this will be drawing from the hdf5, but for now we +# are hard-wiring a set of components for testing. +# Note: these import calls must be done down here AFTER the helper functions +# defined aove that are called by the object classes +import random # this is only used for a demo so may be deprecated +import json +import requests +from requests.auth import HTTPBasicAuth +import csv +import pandas as pd +import numpy as np +import time +from numba.typed import Dict +from numpy import zeros, int32 +from numba import int8, float32, njit, types, typed # import the types +import random # this is only used for a demo so may be deprecated +from HSP2.state import * + + +def get_exec_order(model_exec_list, var_ix): + """ + Find the integer key of a variable name in state_ix + """ + model_exec_list = dict(enumerate(model_exec_list.flatten(), 1)) + for exec_order, ix in model_exec_list.items(): + if var_ix == ix: + # we need to add this to the state + return exec_order + return False + +def init_op_tokens(op_tokens, tops, eq_ix): + """ + Iinitialize the op_tokens Dict + This contains the runtime op code for every dynamic operation to be used + """ + for j in range(len(tops)): + if isinstance(tops[j], str): + # must add this to the state array as a constant + s_ix = append_state(state_ix, float(tops[j])) + tops[j] = s_ix + + op_tokens[eq_ix] = np.asarray(tops, dtype="i8") + +def is_float_digit(n: str) -> bool: + """ + Helper Function to determine if a variable is numeric + """ + try: + float(n) + return True + except ValueError: + return False + +# Import Code Classes +from HSP2.om_model_object import * +from HSP2.om_sim_timer import * +from HSP2.om_equation import * +from HSP2.om_model_linkage import * +from HSP2.om_special_action import * +from HSP2.om_data_matrix import * +from HSP2.om_model_broadcast import * +#from HSP2.om_simple_channel import * +#from HSP2.om_impoundment import * +from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries + +def init_om_dicts(): + """ + The base dictionaries used to store model object info + """ + op_tokens = ModelObject.make_op_tokens() # this is just to start, layer it is resized to the object needs + # Was + #op_tokens = Dict.empty(key_type=types.int64, value_type=types.i8[:]) + model_object_cache = {} # this does not need to be a special Dict as it is not used in numba + return op_tokens, model_object_cache + + +def state_load_om_json(state, io_manager, siminfo): + # - model objects defined in file named '[model h5 base].json -- this will populate an array of object definitions that will + # be loadable by "model_loader_recursive()" + # JSON file would be in same path as hdf5 + hdf5_path = io_manager._input.file_path + (fbase, fext) = os.path.splitext(hdf5_path) + # see if there is custom json + fjson = fbase + ".json" + print("Looking for custom om json ", fjson) + if (os.path.isfile(fjson)): + print("Found local json file", fjson) + jfile = open(fjson) + json_data = json.load(jfile) + # dict.update() combines the arg dict with the base + state['model_data'].update(json_data) + # merge in the json siminfo data + if 'siminfo' in state['model_data'].keys(): + siminfo.update(state['model_data']['siminfo']) + return + +def state_load_om_python(state, io_manager, siminfo): + # Look for a [hdf5 file base].py file with specific named functions + # - function "om_init_model": This function can be defined in the [model h5 base].py file containing things to be done + # early in the model loading, like setting up model objects. This file will already have been loaded by the state module, + # and will be present in the module variable hsp2_local_py (we should rename to state_local_py?) + # - this file may also contain other dynamically redefined functions such as state_step_hydr() + # which can contain code that is executed every timestep inside the _hydr_() function + # and can literally supply hooks for any desired user customizable code + hdf5_path = io_manager._input.file_path + (fbase, fext) = os.path.splitext(hdf5_path) + # see if there is a code module with custom python + print("Looking for custom om loader in python code ", (fbase + ".py")) + hsp2_local_py = state['hsp2_local_py'] + # Load a function from code if it exists + if 'om_init_model' in dir(hsp2_local_py): + hsp2_local_py.om_init_model(io_manager, siminfo, state['op_tokens'], state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'], state['model_object_cache']) + +def state_initialize_om(state): + # this function will check to see if any of the multiple paths to loading + # dynamic operational model objects has been supplied for the model. + # Grab globals from state for easy handling + op_tokens, model_object_cache = init_om_dicts() + state_paths, state_ix, dict_ix, ts_ix = state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] + # set globals on ModelObject, this makes them persistent throughout all subsequent object instantiation and use + ModelObject.op_tokens, ModelObject.state_paths, ModelObject.state_ix, ModelObject.dict_ix, ModelObject.model_object_cache, ModelObject.ts_ix = ( + op_tokens, state_paths, state_ix, dict_ix, model_object_cache, ts_ix + ) + state['op_tokens'], state['model_object_cache'] = op_tokens, model_object_cache + + +def state_load_dynamics_om(state, io_manager, siminfo): + # this function will check to see if any of the multiple paths to loading + # dynamic operational model objects has been supplied for the model. + # state_initialize_om(state) must have been called already + # load dynamic coding libraries if defined by user + # note: this used to be inside this function, I think that the loaded module should be no problem + # occuring within this function call, since this function is also called from another runtime engine + # but if things fail post develop-specact-1 pull requests we may investigate here + # also, it may be that this should be loaded elsewhere? + # comment state_load_om_python() to disable dynamic python + state_load_om_python(state, io_manager, siminfo) + state_load_om_json(state, io_manager, siminfo) + return + +def state_om_model_root_object(state, siminfo): + # Create the base that everything is added to. this object does nothing except host the rest. + if 'model_root_object' not in state.keys(): + model_root_object = ModelObject("") # we give this no name so that it does not interfer with child paths like timer, year, etc (i.e. /STATE/year, ...) + state['model_root_object'] = model_root_object + # set up the timer as the first element + if '/STATE/timer' not in state['state_paths'].keys(): + timer = SimTimer('timer', model_root_object, siminfo) + # add base object for the HSP2 domains and other things already added to state so they can be influenced + for (seg_name,seg_path) in state['hsp_segments'].items(): + if (seg_path not in state['model_object_cache'].keys()): + # Create an object shell for this + new_object = ModelObject(seg_name) + + +def state_om_model_run_prep(state, io_manager, siminfo): + # insure model base is set + state_om_model_root_object(state, siminfo) + # now instantiate and link objects + # state['model_data'] has alread been prepopulated from json, .py files, hdf5, etc. + model_root_object = state['model_root_object'] + model_loader_recursive(state['model_data'], model_root_object) + print("Loaded objects & paths: insures all paths are valid, connects models as inputs") + # both state['model_object_cache'] and the model_object_cache property of the ModelObject class def + # will hold a global repo for this data this may be redundant? They DO point to the same datset? + # since this is a function that accepts state as an argument and these were both set in state_load_dynamics_om + # we can assume they are there and functioning + if 'model_object_cache' in state.keys(): + model_object_cache = state['model_object_cache'] + else: + model_object_cache = ModelObject.model_object_cache + model_path_loader(model_object_cache) + # len() will be 1 if we only have a simtimer, but > 1 if we have a river being added + model_exec_list = [] + # put all objects in token form for fast runtime execution and sort according to dependency order + print("Tokenizing models") + if 'ops_data_type' in siminfo.keys(): + ModelObject.ops_data_type = siminfo['ops_data_type'] # allow override of dat astructure settings + ModelObject.op_tokens = ModelObject.make_op_tokens(max(ModelObject.state_ix.keys()) + 1) + model_tokenizer_recursive(model_root_object, model_object_cache, model_exec_list) + op_tokens = ModelObject.op_tokens + # model_exec_list is the ordered list of component operations + #print("model_exec_list(", len(model_exec_list),"items):", model_exec_list) + # This is used to stash the model_exec_list in the dict_ix, this might be slow, need to verify. + # the resulting set of objects is returned. + state['state_step_om'] = 'disabled' + state['model_object_cache'] = model_object_cache + state['model_exec_list'] = np.asarray(model_exec_list, dtype="i8") + if ModelObject.ops_data_type == 'ndarray': + state_keyvals = np.asarray(zeros(max(ModelObject.state_ix.keys()) + 1), dtype="float32") + for ix, val in ModelObject.state_ix.items(): + state_keyvals[ix] = val + state['state_ix'] = state_keyvals + else: + state['state_ix'] = ModelObject.state_ix + state['op_tokens'] = op_tokens + if len(op_tokens) > 0: + state['state_step_om'] = 'enabled' + print("op_tokens is type", type(op_tokens)) + print("state_ix is type", type(state['state_ix'])) + print("op_tokens has", len(op_tokens),"elements, with ", len(model_exec_list),"executable elements") + return + +# model class reader +# get model class to guess object type in this lib +# the parent object must be known +def model_class_loader(model_name, model_props, container = False): + # todo: check first to see if the model_name is an attribute on the container + # Use: if hasattr(container, model_name): + # if so, we set the value on the container, if not, we create a new subcomp on the container + if model_props == None: + return False + if type(model_props) is str: + if is_float_digit(model_props): + model_object = ModelConstant(model_name, container, float(model_props) ) + return model_object + else: + return False + elif type(model_props) is dict: + object_class = model_props.get('object_class') + if object_class == None: + # return as this is likely an attribute that is used for the containing class as attribute + # and is handled by the container + # todo: we may want to handle this here? Or should this be a method on the class? + # Use: if hasattr(container, model_name): + return False + model_object = False + # Note: this routine uses the ".get()" method of the dict class type + # for attributes to pass in. + # ".get()" will return NoValue if it does not exist or the value. + if object_class == 'Equation': + model_object = Equation(model_props.get('name'), container, model_props ) + #remove_used_keys(model_props, + elif object_class == 'SimpleChannel': + model_object = SimpleChannel(model_props.get('name'), container, model_props ) + elif object_class == 'Impoundment': + model_object = Impoundment(model_props.get('name'), container, model_props ) + elif object_class == 'Constant': + model_object = ModelConstant(model_props.get('name'), container, model_props.get('value') ) + elif ( object_class.lower() == 'datamatrix'): + # add a matrix with the data, then add a matrix accessor for each required variable + has_props = DataMatrix.check_properties(model_props) + if has_props == False: + print("Matrix object must have", DataMatrix.required_properties()) + return False + # create it + model_object = DataMatrix(model_props.get('name'), container, model_props) + elif object_class == 'ModelBroadcast': + # add a matrix with the data, then add a matrix accessor for each required variable + has_props = ModelBroadcast.check_properties(model_props) + if has_props == False: + print("ModelBroadcast object must have", ModelBroadcast.required_properties()) + return False + # create it + model_object = ModelBroadcast(model_props.get('name'), container, model_props) + elif object_class == 'MicroWatershedModel': + # add a matrix with the data, then add a matrix accessor for each required variable + has_props = MicroWatershedModel.check_properties(model_props) + if has_props == False: + print("MicroWatershedModel object must have", MicroWatershedModel.required_properties()) + return False + # create it + model_object = DataMatrix(model_props.get('name'), container, model_props) + elif object_class == 'ModelLinkage': + model_object = ModelLinkage(model_props.get('name'), container, model_props) + elif object_class == 'SpecialAction': + model_object = SpecialAction(model_props.get('name'), container, model_props) + else: + #print("Loading", model_props.get('name'), "with object_class", object_class,"as ModelObject") + model_object = ModelObject(model_props.get('name'), container, model_props) + # one way to insure no class attributes get parsed as sub-comps is: + # model_object.remove_used_keys() + if len(model_object.model_props_parsed) == 0: + # attach these to the object for posterity + model_object.model_props_parsed = model_props + # better yet to just NOT send those attributes as typed object_class arrays, instead just name : value + return model_object + +def model_class_translate(model_props, object_class): + # make adjustments to non-standard items + # this might better be moved to methods on the class handlers + if object_class == 'hydroImpoundment': + # special handling of matrix/storage_stage_area column + # we need to test to see if the storage table has been renamed + # make table from matrix or storage_stage_area + # then make accessors from + storage_stage_area = model_props.get('storage_stage_area') + matrix = model_props.get('matrix') + if ( (storage_stage_area == None) and (matrix != None)): + model_props['storage_stage_area'] = matrix + del model_props['matrix'] + if object_class == 'broadCastObject': + model_props['object_class'] = 'ModelBroadcast' + model_props['broadcast_channel'] = model_props['broadcast_class'] + if object_class == 'USGSChannelGeomObject_sub': + model_props['object_class'] = 'SimpleChannel' + print("Handling USGSChannelGeomObject_sub as SimpleChannel") + if object_class == 'hydroImpoundment': + model_props['object_class'] = 'Impoundment' + print("Handling hydroImpoundment as Impoundment") + if object_class == 'hydroImpSmall': + model_props['object_class'] = 'Impoundment' + print("Handling hydroImpSmall as Impoundment") + # now handle disabled classes - this is temporary to prevent having to comment and uncomment + disabled_classes = {'SimpleChannel', 'Impoundment', 'DataMatrix', 'dataMatrix'} + if model_props['object_class'] in disabled_classes: + print("Disabling class", model_props['object_class'], 'rendering as ModelObject') + model_props['object_class'] = 'ModelObject' + +def model_loader_recursive(model_data, container): + k_list = model_data.keys() + object_names = dict.fromkeys(k_list , 1) + if type(object_names) is not dict: + return False + for object_name in object_names: + #print("Handling", object_name) + if object_name in {'name', 'object_class', 'id', 'value', 'default'}: + # we should ask the class what properties are part of the class and also skips these + # therefore, we can assume that anything else must be a child object that needs to + # be handled first -- but how to do this? + continue + model_props = model_data[object_name] + if type(model_props) is not dict: + # this is a constant, the loader is built to handle this, but this causes errors with + # properties on the class that are expected so we just skip and trust that all constants + # are formally declared as type Constant + continue + if type(model_props) is dict: + if not ('object_class' in model_props): + # this is either a class attribute or an un-handleable meta-data + # if the class atttribute exists, we should pass it to container to load + #print("Skipping un-typed", object_name) + continue + #print("Translating", object_name) + # this is a kludge, but can be important + object_class = model_props['object_class'] + model_class_translate(model_props, object_class) + # now we either have a constant (key and value), or a + # fully defined object. Either one should work OK. + #print("Trying to load", object_name) + model_object = model_class_loader(object_name, model_props, container) + if model_object == False: + print("Could not load", object_name) + continue # not handled, but for now we will continue, tho later we should bail? + # now for container type objects, go through its properties and handle + if type(model_props) is dict: + model_loader_recursive(model_props, model_object) + +def model_path_loader(model_object_cache): + k_list = model_object_cache.keys() + model_names = dict.fromkeys(k_list , 1) + for model_name in model_names: + #print("Loading paths for", model_name) + model_object = model_object_cache[model_name] + model_object.find_paths() + + +def model_tokenizer_recursive(model_object, model_object_cache, model_exec_list, model_touch_list = []): + """ + Given a root model_object, trace the inputs to load things in order + Store this order in model_exec_list + Note: All ordering is as-needed organic, except Broadcasts + - read from children is completed after all other inputs + - read from parent is completed before all other inputs + - could this be accomplished by more sophisticated handling of read + broadcasts? + - When loading a read broadcast, can we iterate through items + that are sending to that broadcast? + - Or is it better to let it as it is, + """ + #print("Handling", model_object.name, " ", model_object.state_path) + if model_object.ix in model_exec_list: + return + if model_object.ix in model_touch_list: + #print("Already touched", model_object.name, model_object.ix, model_object.state_path) + return + # record as having been called, and will ultimately return, to prevent recursions + model_touch_list.append(model_object.ix) + k_list = model_object.inputs.keys() + input_names = dict.fromkeys(k_list , 1) + if type(input_names) is not dict: + return + # isolate broadcasts, and sort out -- what happens if an equation references a broadcast var? + # is this a limitation of treating all children as inputs? + # alternative, leave broadcasts organic, but load children first? + # children first, then local sub-comps is old method? old method: + # - read parent broadcasts + # - get inputs (essentially, linked vars) + # - send child broadcasts (will send current step parent reads, last step local proc data) + # - execute children + # - execute local sub-comps + for input_name in input_names: + #print("Checking input", input_name) + input_path = model_object.inputs[input_name] + if input_path in model_object_cache.keys(): + input_object = model_object_cache[input_path] + model_tokenizer_recursive(input_object, model_object_cache, model_exec_list, model_touch_list) + else: + if input_path in model_object.state_paths.keys(): + # this is a valid state reference without an object + # thus, it is likely part of internals that are manually added + # which should be fine. tho perhaps we should have an object for these too. + continue + print("Problem loading input", input_name, "input_path", input_path, "not in model_object_cache.keys()") + return + # now after tokenizing all inputs this should be OK to tokenize + model_object.add_op_tokens() + if model_object.optype in ModelObject.runnables: + model_exec_list.append(model_object.ix) + + +def model_order_recursive(model_object, model_object_cache, model_exec_list, model_touch_list = []): + """ + Given a root model_object, trace the inputs to load things in order + Store this order in model_exec_list + Note: All ordering is as-needed organic, except Broadcasts + - read from children is completed after all other inputs + - read from parent is completed before all other inputs + - could this be accomplished by more sophisticated handling of read + broadcasts? + - When loading a read broadcast, can we iterate through items + that are sending to that broadcast? + - Or is it better to let it as it is, + """ + if model_object.ix in model_exec_list: + return + if model_object.ix in model_touch_list: + #print("Already touched", model_object.name, model_object.ix, model_object.state_path) + return + # record as having been called, and will ultimately return, to prevent recursions + model_touch_list.append(model_object.ix) + k_list = model_object.inputs.keys() + input_names = dict.fromkeys(k_list , 1) + if type(input_names) is not dict: + return + # isolate broadcasts, and sort out -- what happens if an equation references a broadcast var? + # is this a limitation of treating all children as inputs? + # alternative, leave broadcasts organic, but load children first? + # children first, then local sub-comps is old method? old method: + # - read parent broadcasts + # - get inputs (essentially, linked vars) + # - send child broadcasts (will send current step parent reads, last step local proc data) + # - execute children + # - execute local sub-comps + for input_name in input_names: + #print("Checking input", input_name) + input_path = model_object.inputs[input_name] + if input_path in model_object_cache.keys(): + input_object = model_object_cache[input_path] + model_order_recursive(input_object, model_object_cache, model_exec_list, model_touch_list) + else: + if input_path in model_object.state_paths.keys(): + # this is a valid state reference without an object + # thus, it is likely part of internals that are manually added + # which should be fine. tho perhaps we should have an object for these too. + continue + print("Problem loading input", input_name, "input_path", input_path, "not in model_object_cache.keys()") + return + # now after loading input dependencies, add this to list + model_exec_list.append(model_object.ix) + +def model_domain_dependencies(state, domain, ep_list): + """ + Given an hdf5 style path to a domain, and a list of variable endpoints in that domain, + Find all model elements that influence the endpoints state + Returns them as a sorted list of index values suitable as a model_exec_list + """ + mello = [] + for ep in ep_list: + mel = [] + mtl = [] + # if the given element is NOT in model_object_cache, then nothing is acting on it, so we return empty list + if (domain + '/' + ep) in state['model_object_cache'].keys(): + endpoint = state['model_object_cache'][domain + '/' + ep] + model_order_recursive(endpoint, state['model_object_cache'], mel, mtl) + mello = mello + mel + + return mello + +def save_object_ts(io_manager, siminfo, op_tokens, ts_ix, ts): + # Decide on using from utilities.py: + # - save_timeseries(io_manager, ts, savedict, siminfo, saveall, operation, segment, activity, compress=True) + # Or, skip the save_timeseries wrapper and call write_ts() directly in io.py: + # write_ts(self, data_frame:pd.DataFrame, save_columns: List[str], category:Category, operation:Union[str,None]=None, segment:Union[str,None]=None, activity:Union[str,None]=None) + # see line 317 in utilities.py for use example of write_ts() + x = 0 # dummy + return + +@njit +def iterate_models(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps, dstep = -1): + checksum = 0.0 + for step in range(steps): + pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) + step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) + #print("Steps completed", step) + return checksum + +@njit +def pre_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + for i in model_exec_list: + if op_tokens[i][0] == 12: + # register type data (like broadcast accumulators) + pre_step_register(op_tokens[i], state_ix) + return + +@njit +def step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + for i in model_exec_list: + step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) + return + + + +@njit +def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): + # op_tokens is passed in for ops like matrices that have lookups from other + # locations. All others rely only on ops + # todo: decide if all step_[class() functions should set value in state_ix instead of returning value? + val = 0 + if debug == 1: + print("DEBUG: Operator ID", ops[1], "is op type", ops[0]) + if ops[0] == 1: + step_equation(ops, state_ix) + elif ops[0] == 2: + # todo: this should be moved into a single function, + # with the conforming name step_matrix(op_tokens, ops, state_ix, dict_ix) + if (ops[1] == ops[2]): + if debug == 1: + print("DEBUG: Calling exec_tbl_values", ops) + # this insures a matrix with variables in it is up to date + # only need to do this if the matrix data and matrix config are on same object + # otherwise, the matrix data is an input and has already been evaluated + pass #exec_tbl_values(ops, state_ix, dict_ix) + if (ops[3] > 0): + # this evaluates a single value from a matrix if the matrix is configured to do so. + if debug == 1: + print("DEBUG: Calling exec_tbl_eval", ops) + pass #exec_tbl_eval(op_tokens, ops, state_ix, dict_ix) + elif ops[0] == 3: + step_model_link(ops, state_ix, ts_ix, step) + elif ops[0] == 4: + val = 0 + elif ops[0] == 5: + step_sim_timer(ops, state_ix, dict_ix, ts_ix, step) + elif ops[0] == 9: + val = 0 + elif ops[0] == 13: + pass #step_simple_channel(ops, state_ix, dict_ix, step) + # Op 100 is Basic ACTION in Special Actions + elif ops[0] == 100: + step_special_action(ops, state_ix, dict_ix, step) + return + +@njit +def step_model_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step, debug_step = -1): + for i in model_exec_list: + ops = op_tokens[i] + val = 0 + if (step == debug_step): + print("Exec'ing step ", step, " model ID", i) + # op_tokens is passed in for ops like matrices that have lookups from other + # locations. All others rely only on ops + step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) + return + +@njit +def step_model_pcode(model_exec_list, op_tokens, state_info, state_paths, state_ix, dict_ix, ts_ix, step): + ''' + This routine includes support for dynamically loaded python code which is powerful but slow + This is not yet implemented anywhere, just an idea. But in theory it would allow easy switching between + the faster runtime without dynamic code if the user did not request it. + At minimum, this could be used to more efficiently enable/disable this feature for testing by simply calling + a separate routine. + - to do so we would need to add state_paths to the variables passed to step_model which should be OK? + ''' + hydr_ix = hydr_get_ix(state_ix, state_paths, state_info['domain']) # could be done more efficiently, once per model run + state_step_hydr(state_info, state_paths, state_ix, dict_ix, ts_ix, hydr_ix, step) + val = 0 + for i in model_exec_list: + step_one(op_tokens, op_tokens[i], state_ix, dict_ix, ts_ix, step, 0) + return + +@njit +def post_step_model(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + return + +def step_object(thisobject, step): + # this calls the step for a given model object and timestep + # this is a workaround since the object method ModelObject.step() fails to find the step_one() function ? + step_one(thisobject.op_tokens, thisobject.op_tokens[thisobject.ix], thisobject.state_ix, thisobject.dict_ix, thisobject.ts_ix, step) + + +@njit +def pre_step_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step): + for i in model_exec_list: + ops = op_tokens[i] + #for i in model_exec_list: + # op = op_tokens[i] + if ops[0] == 12: + # register type data (like broadcast accumulators) + pre_step_register(ops, state_ix) + #continue + #elif ops[0] == 1: + # # register type data (like broadcast accumulators) + # continue + return + +@njit +def iterate_perf(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps, debug_step = -1): + checksum = 0.0 + for step in range(steps): + pre_step_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step) + #step_model_test(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, step, debug_step) + #print("Steps completed", step) + return checksum + +def time_perf(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps): + start = time.time() + iterate_perf(model_exec_list, op_tokens, state_ix, dict_ix, ts_ix, steps) + end = time.time() + print(len(model_exec_list), "components iterated over", siminfo['steps'], "time steps took" , end - start, "seconds") diff --git a/HSP2/om_model_linkage.py b/HSP2/om_model_linkage.py index d4b4ac61..eae74f76 100644 --- a/HSP2/om_model_linkage.py +++ b/HSP2/om_model_linkage.py @@ -1,145 +1,126 @@ -""" -The class ModelLinkage is used to translate copy data from one state location to another. -It is also used to make an implicit parent child link to insure that an object is loaded -during a model simulation. -""" -from HSP2.state import * -from HSP2.om import * -from HSP2.om_model_object import ModelObject -from numba import njit -class ModelLinkage(ModelObject): - def __init__(self, name, container = False, model_props = {}): - super(ModelLinkage, self).__init__(name, container, model_props) - # ModelLinkage copies a values from right to left - # right_path: is the data source for the link - # left_path: is the destination of the link - # - is implicit in types 1-3, i.e., the ModelLinkage object path itself is the left_path - # - left_path parameter is only needed for pushes (type 4 and 5) - # - the push is functionally equivalent to a pull whose path resolves to the specified left_path - # - but the push allows the potential for multiple objects to set a single state - # This can be dangerous or difficult to debug, but essential to replicate old HSPF behaviour - # especially in the case of If/Then type structures. - # it is also useful for the broadcast objects, see om_model_broadcast for those - # link_type: 1 - local parent-child, 2 - local property link (state data), 3 - remote linkage (ts data only), 4 - push to accumulator (like a hub), 5 - overwrite remote value - self.optype = 3 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - ModelLinkage, 4 - - if container == False: - # this is required - print("Error: a link must have a container object to serve as the destination") - return False - self.right_path = self.handle_prop(model_props, 'right_path') - self.link_type = self.handle_prop(model_props, 'link_type', False, 0) - self.left_path = self.handle_prop(model_props, 'left_path') - - if self.left_path == False: - # self.state_path gets set when creating at the parent level - self.left_path = self.state_path - if (self.link_type == 0): - # if this is a simple input we remove the object from the model_object_cache, and pass back to parent as an input - del self.model_object_cache[self.state_path] - del self.state_ix[self.ix] - container.add_input(self.name, self.right_path) - # this breaks for some reason, doesn't like the input name being different than the variable path ending? - # maybe because we should be adding the input to the container, not the self? - self.add_input(self.right_path, self.right_path) - - def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): - # parent method handles most cases, but subclass handles special situations. - prop_val = super().handle_prop(model_props, prop_name, strict, default_value) - if ( (prop_name == 'right_path') and (prop_val == None) or (prop_val == '')): - raise Exception("right_path cannot be empty. Object creation halted. Path to object with error is " + self.state_path) - if ( (prop_name == 'right_path')): - # check for special keyword [parent] - pre_val = prop_val - prop_val.replace("[parent]", self.container.state_path) - #print("Changed ", pre_val, " to ", prop_val) - return prop_val - - @staticmethod - def required_properties(): - # returns a list or minimum properties to create. - # see ModelConstant below for how to call this in a sub-class - # note: - # req_props = super(DataMatrix, DataMatrix).required_properties() - req_props = ['name', 'right_path'] - return req_props - - def find_paths(self): - # this should be needed if this is a PUSH link_type = 4 or 5 - super().find_paths() - self.paths_found = False # override parent setting until we verify everything - # do we need to do this, or just trust it exists? - #self.insure_path(self, self.right_path) - # the left path, if this is type 4 or 5, is a push, so we must require it - if ( (self.link_type == 4) or (self.link_type == 5) ): - self.insure_path(self.left_path) - self.paths_found = True - return - - def tokenize(self): - super().tokenize() - # - if this is a data property link then we add op codes to do a copy of data from one state address to another - # - if this is simply a parent-child connection, we do not render op-codes, but we do use this for assigning - # - execution hierarchy - #print("Linkage/link_type ", self.name, self.link_type,"created with params", self.model_props_parsed) - if self.link_type in (2, 3): - src_ix = get_state_ix(self.state_ix, self.state_paths, self.right_path) - if not (src_ix == False): - self.ops = self.ops + [src_ix, self.link_type] - else: - print("Error: link ", self.name, "does not have a valid source path") - #print(self.name,"tokenize() result", self.ops) - if (self.link_type == 4) or (self.link_type == 5): - # we push to the remote path in this one - left_ix = get_state_ix(self.state_ix, self.state_paths, self.left_path) - right_ix = get_state_ix(self.state_ix, self.state_paths, self.right_path) - if (left_ix != False) and (right_ix != False): - self.ops = self.ops + [left_ix, self.link_type, right_ix] - else: - print("Error: link ", self.name, "does not have valid paths", "(left = ", self.left_path, left_ix, "right = ", self.right_path, right_ix, ")") - #print("tokenize() result", self.ops) - -# Function for use during model simulations of tokenized objects -@njit -def step_model_link(op_token, state_ix, ts_ix, step): - #if step == 2: - #print("step_model_link() called at step 2 with op_token=", op_token) - if op_token[3] == 1: - return True - elif op_token[3] == 2: - state_ix[op_token[1]] = state_ix[op_token[2]] - elif op_token[3] == 3: - # read from ts variable TBD - # state_ix[op_token[1]] = ts_ix[op_token[2]][step] - return True - elif op_token[3] == 4: - # add value in local state to the remote broadcast hub+register state - state_ix[op_token[2]] = state_ix[op_token[2]] + state_ix[op_token[4]] - return True - elif op_token[3] == 5: - # overwrite remote variable state with value in another paths state - if step == 2: - print("Setting state_ix[", op_token[2], "] =", state_ix[op_token[4]]) - state_ix[op_token[2]] = state_ix[op_token[4]] - return True - - -def test_model_link(op_token, state_ix, ts_ix, step): - if op_token[3] == 1: - return True - elif op_token[3] == 2: - state_ix[op_token[1]] = state_ix[op_token[2]] - elif op_token[3] == 3: - # read from ts variable TBD - # state_ix[op_token[1]] = ts_ix[op_token[2]][step] - return True - elif op_token[3] == 4: - print("Remote Broadcast accumulator type link.") - print("Setting op ID", str(op_token[2]), "to value from ID", str(op_token[4]), "with value of ") - # add value in local state to the remote broadcast hub+register state - state_ix[op_token[2]] = state_ix[op_token[2]] + state_ix[op_token[4]] - print(str(state_ix[op_token[2]]) + " = ", str(state_ix[op_token[2]]) + "+" + str(state_ix[op_token[4]])) - return True - elif op_token[3] == 5: - # push value in local state to the remote broadcast hub+register state - state_ix[op_token[2]] = state_ix[op_token[4]] - return True \ No newline at end of file +""" +The class ModelLinkage is used to translate copy data from one state location to another. +It is also used to make an implicit parent child link to insure that an object is loaded +during a model simulation. +""" +from HSP2.state import * +from HSP2.om import * +from HSP2.om_model_object import ModelObject +from numba import njit +class ModelLinkage(ModelObject): + def __init__(self, name, container = False, model_props = {}): + super(ModelLinkage, self).__init__(name, container, model_props) + # ModelLinkage copies a values from right to left + # right_path: is the data source for the link + # left_path: is the destination of the link + # - is implicit in types 1-3, i.e., the ModelLinkage object path itself is the left_path + # - left_path parameter is only needed for pushes (type 4 and 5) + # - the push is functionally equivalent to a pull whose path resolves to the specified left_path + # - but the push allows the potential for multiple objects to set a single state + # This can be dangerous or difficult to debug, but essential to replicate old HSPF behaviour + # especially in the case of If/Then type structures. + # it is also useful for the broadcast objects, see om_model_broadcast for those + # link_type: 1 - local parent-child, 2 - local property link (state data), 3 - remote linkage (ts data only), 4 - push to accumulator (like a hub), 5 - overwrite remote value + self.optype = 3 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - ModelLinkage, 4 - + if container == False: + # this is required + print("Error: a link must have a container object to serve as the destination") + return False + self.right_path = self.handle_prop(model_props, 'right_path') + self.link_type = self.handle_prop(model_props, 'link_type', False, 0) + self.left_path = self.handle_prop(model_props, 'left_path') + + if self.left_path == False: + # self.state_path gets set when creating at the parent level + self.left_path = self.state_path + if (self.link_type == 0): + # if this is a simple input we remove the object from the model_object_cache, and pass back to parent as an input + del self.model_object_cache[self.state_path] + del self.state_ix[self.ix] + container.add_input(self.name, self.right_path) + # this breaks for some reason, doesn't like the input name being different than the variable path ending? + # maybe because we should be adding the input to the container, not the self? + self.add_input(self.right_path, self.right_path) + + def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): + # parent method handles most cases, but subclass handles special situations. + prop_val = super().handle_prop(model_props, prop_name, strict, default_value) + if ( (prop_name == 'right_path') and (prop_val == None) or (prop_val == '')): + raise Exception("right_path cannot be empty. Object creation halted. Path to object with error is " + self.state_path) + if ( (prop_name == 'right_path')): + # check for special keyword [parent] + pre_val = prop_val + prop_val.replace("[parent]", self.container.state_path) + #print("Changed ", pre_val, " to ", prop_val) + return prop_val + + @staticmethod + def required_properties(): + # returns a list or minimum properties to create. + # see ModelConstant below for how to call this in a sub-class + # note: + # req_props = super(DataMatrix, DataMatrix).required_properties() + req_props = ['name', 'right_path'] + return req_props + + def find_paths(self): + # this should be needed if this is a PUSH link_type = 4 or 5 + super().find_paths() + self.paths_found = False # override parent setting until we verify everything + # do we need to do this, or just trust it exists? + #self.insure_path(self, self.right_path) + # the left path, if this is type 4 or 5, is a push, so we must require it + if ( (self.link_type == 4) or (self.link_type == 5) or (self.link_type == 6) ): + self.insure_path(self.left_path) + self.paths_found = True + return + + def tokenize(self): + super().tokenize() + # - if this is a data property link then we add op codes to do a copy of data from one state address to another + # - if this is simply a parent-child connection, we do not render op-codes, but we do use this for assigning + # - execution hierarchy + #print("Linkage/link_type ", self.name, self.link_type,"created with params", self.model_props_parsed) + if self.link_type in (2, 3): + src_ix = get_state_ix(self.state_ix, self.state_paths, self.right_path) + if not (src_ix == False): + self.ops = self.ops + [src_ix, self.link_type] + else: + print("Error: link ", self.name, "does not have a valid source path") + #print(self.name,"tokenize() result", self.ops) + if (self.link_type == 4) or (self.link_type == 5) or (self.link_type == 6): + # we push to the remote path in this one + left_ix = get_state_ix(self.state_ix, self.state_paths, self.left_path) + right_ix = get_state_ix(self.state_ix, self.state_paths, self.right_path) + if (left_ix != False) and (right_ix != False): + self.ops = self.ops + [left_ix, self.link_type, right_ix] + else: + print("Error: link ", self.name, "does not have valid paths", "(left = ", self.left_path, left_ix, "right = ", self.right_path, right_ix, ")") + #print("tokenize() result", self.ops) + +# Function for use during model simulations of tokenized objects +@njit +def step_model_link(op_token, state_ix, ts_ix, step): + #if step == 2: + #print("step_model_link() called at step 2 with op_token=", op_token) + if op_token[3] == 1: + return True + elif op_token[3] == 2: + state_ix[op_token[1]] = state_ix[op_token[2]] + return True + elif op_token[3] == 3: + # read from ts variable TBD + # state_ix[op_token[1]] = ts_ix[op_token[2]][step] + return True + elif op_token[3] == 4: + # add value in local state to the remote broadcast hub+register state + state_ix[op_token[2]] = state_ix[op_token[2]] + state_ix[op_token[4]] + return True + elif op_token[3] == 5: + # overwrite remote variable state with value in another paths state + state_ix[op_token[2]] = state_ix[op_token[4]] + return True + elif op_token[3] == 6: + # set value in a timerseries + ts_ix[op_token[2]][step] = state_ix[op_token[4]] + return True diff --git a/HSP2/om_model_object.py b/HSP2/om_model_object.py index 0a13a95c..2b03a918 100644 --- a/HSP2/om_model_object.py +++ b/HSP2/om_model_object.py @@ -1,390 +1,445 @@ -""" -The class ModelObject is the base class upon which all other dynamic model objects are built on. -It handles all Dict management functions, but provides for no runtime execution of it's own. -All runtime exec is done by child classes. -""" -from HSP2.state import * -from HSP2.om import * -from pandas import Series, DataFrame, concat, HDFStore, set_option, to_numeric -from pandas import Timestamp, Timedelta, read_hdf, read_csv -from numpy import pad - -class ModelObject: - state_ix = {} # Shared Dict with the numerical state of each object - state_paths = {} # Shared Dict with the hdf5 path of each object - dict_ix = {} # Shared Dict with the hdf5 path of each object - ts_ix = {} # Shared Dict with the hdf5 path of each object - op_tokens = {} # Shared Dict with the tokenized representation of each object, will be turned into array of ints - model_object_cache = {} # Shared with actual objects, keyed by their path - model_exec_list = {} # Shared with actual objects, keyed by their path - max_token_length = 64 # limit on complexity of tokenized objects since op_tokens must be fixed dimensions for numba - runnables = [1,2,5,6,8,9,10,11,12,13,14,15, 100] # runnable components important for optimization - ops_data_type = 'ndarray' # options are ndarray or Dict - Dict appears slower, but unsure of the cause, so keep as option. - - def __init__(self, name, container = False, model_props = {}): - self.name = name - self.container = container # will be a link to another object - self.log_path = "" # Ex: "/RESULTS/RCHRES_001/SPECL" - self.attribute_path = "" # - self.model_props_parsed = {} # a place to stash parse record for debugging - if (hasattr(self,'state_path') == False): - # if the state_path has already been set, we accept it. - # this allows sub-classes to override the standard path guessing approach. - self.state_path = "" # Ex: "/STATE/RCHRES_001" # the pointer to this object state - self.inputs = {} # associative array with key=local_variable_name, value=hdf5_path Ex: [ 'Qin' : '/STATE/RCHRES_001/IVOL' ] - self.inputs_ix = {} # associative array with key=local_variable_name, value=state_ix integer key - self.ix = False - self.paths_found = False # this should be False at start - self.default_value = 0.0 - self.ops = [] - self.optype = 0 # OpTypes are as follows: - # 0 - model object, 1 - equation, 2 - datamatrix, 3 - input/ModelLinkage, - # 4 - broadcastChannel, 5 - SimTimer, 6 - Conditional, 7 - ModelConstant (numeric), - # 8 - matrix accessor, 9 - MicroWatershedModel, 10 - MicroWatershedNetwork, 11 - ModelTimeseries, - # 12 - ModelRegister, 13 - SimpleChannel, 14 - SimpleImpoundment, 15 - FlowBy - self.register_path() # note this registers the path AND stores the object in model_object_cache - self.parse_model_props(model_props) - - @staticmethod - def required_properties(): - # returns a list or minimum properties to create. - # see ModelConstant below for how to call this in a sub-class - # note: - # req_props = super(DataMatrix, DataMatrix).required_properties() - req_props = ['name'] - return req_props - - @staticmethod - def make_op_tokens(num_ops = 5000): - if (ModelObject.ops_data_type == 'ndarray'): - op_tokens = int32(zeros((num_ops,64))) # was Dict.empty(key_type=types.int64, value_type=types.i8[:]) - else: - op_tokens = Dict.empty(key_type=types.int64, value_type=types.i8[:]) - return op_tokens - - @staticmethod - def runnable_op_list(op_tokens, meo): - # only return those objects that do something at runtime - rmeo = [] - run_ops = {} - for ops in ModelObject.op_tokens: - if ops[0] in ModelObject.runnables: - run_ops[ops[1]] = ops - print("Found runnable", ops[1], "type", ops[0]) - for ix in meo: - if ix in run_ops.keys(): - rmeo.append(ix) - rmeo = np.asarray(rmeo, dtype="i8") - return rmeo - - @staticmethod - def model_format_ops(ops): - if (ModelObject.ops_data_type == 'ndarray'): - ops = pad(ops,(0,ModelObject.max_token_length))[0:ModelObject.max_token_length] - else: - ops = np.asarray(ops, dtype="i8") - return ops - - def format_ops(self): - # this can be sub-classed if needed, but should not be since it is based on the ops_data_type - # See ModelObject.model_format_ops() - return ModelObject.model_format_ops(self.ops) - - @classmethod - def check_properties(cls, model_props): - # this is for pre-screening properties for validity in model creation routines - # returns True or False and can be as simple as checking the list of required_properties - # or a more detailed examination of suitability of what those properties contain - req_props = cls.required_properties() - matching_props = set(model_props).intersection(set(req_props)) - if len(matching_props) < len(req_props): - return False - return True - - def handle_inputs(self, model_props): - if 'inputs' in model_props.keys(): - for i_pair in model_props['inputs']: - i_name = i_pair[0] - i_target = i_pair[1] - i_target.replace("[parent]", self.container.state_path) - self.add_input(i_name, i_target) - - def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): - # this checks to see if the prop is in dict with value form, or just a value - # strict = True causes an exception if property is missing from model_props dict - prop_val = model_props.get(prop_name) - if type(prop_val) == list: # this doesn't work, but nothing gets passed in like this? Except broadcast params, but they are handled in the sub-class - prop_val = prop_val - elif type(prop_val) == dict: - prop_val = prop_val.get('value') - if strict and (prop_val == None): - raise Exception("Cannot find property " + prop_name + " in properties passed to "+ self.name + " and strict = True. Object creation halted. Path to object with error is " + self.state_path) - if (prop_val == None) and not (default_value == None): - prop_val = default_value - return prop_val - - def parse_model_props(self, model_props, strict = False ): - # sub-classes will allow an create argument "model_props" and handle them here. - # - subclasses should insure that they call super().parse_model_props() or include all code below - # see also: handle_prop(), which will be called y parse_model_props - # for all attributes supported by the class - # this base object only handles inputs - self.handle_inputs(model_props) - self.model_props_parsed = model_props - return True - - def set_state(self, set_value): - var_ix = set_state(self.state_ix, self.state_paths, self.state_path, set_value) - return var_ix - - def load_state_dicts(self, op_tokens, state_paths, state_ix, dict_ix): - self.op_tokens = op_tokens - self.state_paths = state_paths - self.state_ix = state_ix - self.dict_ix = dict_ix - - def save_object_hdf(self, hdfname, overwrite = False ): - # save the object in the full hdf5 path - # if overwrite = True replace this and all children, otherwise, just save this. - # note: "with" statement helps prevent unclosed resources, see: https://www.geeksforgeeks.org/with-statement-in-python/ - with HDFStore(hdfname, mode = 'a') as store: - dummy_var = True - - def make_paths(self, base_path = False): - if base_path == False: # we are NOT forcing paths - if not (self.container == False): - self.state_path = self.container.state_path + "/" + str(self.name) - self.attribute_path = self.container.attribute_path + "/" + str(self.name) - elif self.name == "": - self.state_path = "/STATE" - self.attribute_path = "/OBJECTS" - else: - self.state_path = "/STATE/" + str(self.name) - self.attribute_path = "/OBJECTS/" + str(self.name) - else: - # base_path is a Dict with state_path and attribute_path set - self.state_path = base_path['STATE'] + self.name - self.attribute_path = base_path['OBJECTS'] + self.name - return self.state_path - - def get_state(self, var_name = False): - if var_name == False: - return self.state_ix[self.ix] - else: - var_path = self.find_var_path(var_name) - var_ix = get_state_ix(self.state_ix, self.state_paths, var_path) - if (var_ix == False): - return False - return self.state_ix[var_ix] - - def get_exec_order(self, var_name = False): - if var_name == False: - var_ix = self.ix - else: - var_path = self.find_var_path(var_name) - var_ix = get_state_ix(self.state_ix, self.state_paths, var_path) - exec_order = get_exec_order(self.model_exec_list,var_ix) - return exec_order - - def get_object(self, var_name = False): - if var_name == False: - return self.model_object_cache[self.state_path] - else: - var_path = self.find_var_path(var_name) - return self.model_object_cache[var_path] - - - def find_var_path(self, var_name, local_only = False): - # check local inputs for name - if var_name in self.inputs.keys(): - return self.inputs[var_name] - if local_only: - return False # we are limiting the scope, so just return - # check parent for name - if not (self.container == False): - return self.container.find_var_path(var_name) - # check for root state vars STATE + var_name - if ("/STATE/" + var_name) in self.state_paths.keys(): - #return self.state_paths[("/STATE/" + var_name)] - return ("/STATE/" + var_name) - # check for root state vars - if var_name in self.state_paths.keys(): - #return self.state_paths[var_name] - return var_name - return False - - def constant_or_path(self, keyname, keyval, trust = False): - if is_float_digit(keyval): - # we are given a constant value, not a variable reference - k = ModelConstant(keyname, self, float(keyval)) - kix = k.ix - else: - kix = self.add_input(keyname, keyval, 2, trust) - return kix - - def register_path(self): - # initialize the path variable if not already set - if self.state_path == '': - self.make_paths() - self.ix = set_state(self.state_ix, self.state_paths, self.state_path, self.default_value) - # store object in model_object_cache - if not (self.state_path in self.model_object_cache.keys()): - self.model_object_cache[self.state_path] = self - # this should check to see if this object has a parent, and if so, register the name on the parent - # default is as a child object. - if not (self.container == False): - # since this is a request to actually create a new path, we instruct trust = True as last argument - return self.container.add_input(self.name, self.state_path, 1, True) - return self.ix - - def add_input(self, var_name, var_path, input_type = 1, trust = False): - # this will add to the inputs, but also insure that this - # requested path gets added to the state/exec stack via an input object if it does - # not already exist. - # - var_name = the local name for this linked entity/attribute - # - var_path = the full path of the entity/attribute we are linking to - # - input types: 1: parent-child link, 2: state property link, 3: timeseries object property link - # - trust = False means fail if the path does not already exist, True means assume it will be OK which is bad policy, except for the case where the path points to an existing location - # do we have a path here already or can we find on the parent? - # how do we check if this is a path already, in which case we trust it? - # todo: we should be able to alias a var_name to a var_path, for example - # calling add_input('movar', 'month', 1, True) - # this *should* search for month and find the STATE/month variable - # BUT this only works if both var_name and var_path are month - # so add_input('month', 'month', 1, True) works. - found_path = self.find_var_path(var_path) - #print("Searched", var_name, "with path", var_path,"found", found_path) - var_ix = get_state_ix(self.state_ix, self.state_paths, found_path) - if var_ix == False: - if (trust == False): - raise Exception("Cannot find variable path: " + var_path + " when adding input to object " + self.name + " as input named " + var_name + " ... process terminated. Path to object with error is " + self.state_path) - var_ix = self.insure_path(var_path) - else: - # if we are to trust the path, this might be a child property just added, - # and therefore, we don't look further than this - # otherwise, we use found_path, whichever it is, as - # we know that this path is better, as we may have been given a simple variable name - # and so found_path will look more like /STATE/RCHRES_001/... - if trust == False: - var_path = found_path - self.inputs[var_name] = var_path - self.inputs_ix[var_name] = var_ix - # Should we create a register for the input to be reported here? - # i.e., if we have an input named Qin on RCHRES_R001, shouldn't we be able - # to find the data in /STATE/RCHRES_R001/Qin ??? It is redundant data and writing - # but matches a complete data model and prevents stale data? - return self.inputs_ix[var_name] - - def add_object_input(self, var_name, var_object, link_type = 1): - # See above for details. - # this adds an object as a link to another object - self.inputs[var_name] = var_object.state_path - self.inputs_ix[var_name] = var_object.ix - return self.inputs_ix[var_name] - - def create_parent_var(self, parent_var_name, source): - # see decision points: https://github.com/HARPgroup/HSPsquared/issues/78 - # This is used when an object sets an additional property on its parent - # Like in simple_channel sets [channel prop name]_Qout on its parent - # Generally, this should have 2 components. - # 1 - a state variable on the child (this could be an implicit sub-comp, or a constant sub-comp, the child handles the setup of this) see constant_or_path() - # 2 - an input link - # the beauty of this is that the parent object and any of it's children will find the variable "[source_object]_varname" - if type(source) == str: - self.container.add_input(parent_var_name, source, 1, False) - elif isinstance(source, ModelObject): - self.container.add_object_input(parent_var_name, source, 1) - - def insure_path(self, var_path): - # if this path can be found in the hdf5 make sure that it is registered in state - # and that it has needed object class to render it at runtime (some are automatic) - # RIGHT NOW THIS DOES NOTHING TO CHECK IF THE VAR EXISTS THIS MUST BE FIXED - var_ix = set_state(self.state_ix, self.state_paths, var_path, 0.0) - return var_ix - - def get_dict_state(self, ix = -1): - if ix >= 0: - return self.dict_ix[ix] - return self.dict_ix[self.ix] - - def find_paths(self): - # Note: every single piece of data used by objects, even constants, are resolved to a PATH in the hdf5 - # find_paths() is called to insure that all of these can be found, and then, are added to inputs/inputs_ix - # - We wait to find the index values for those variables after all things have been loaded - # - base ModelObject does not have any "implicit" inputs, since all of its inputs are - # explicitly added children objects, thus we default to True - self.paths_found = True - # - But children such as Equation and DataMatrix, etc - # so they mark paths_found = False and then - # should go through their own locally defined data - # and call add_input() for any data variables encountered - # - add_input() will handle searching for the paths and ix values - # and should also handle deciding if this is a constant, like a numeric value - # or a variable data and should handle them accordingly - return True - - def tokenize(self): - # renders tokens for high speed execution - if (self.paths_found == False): - raise Exception("path_found False for object" + self.name + "(" + self.state_path + "). " + "Tokens cannot be generated until method '.find_paths()' is run for all model objects ... process terminated. (see function `model_path_loader(model_object_cache)`)") - self.ops = [self.optype, self.ix] - - def add_op_tokens(self): - # this puts the tokens into the global simulation queue - # can be customized by subclasses to add multiple lines if needed. - if self.ops == []: - self.tokenize() - #print(self.state_path, "tokens", self.ops) - if len(self.ops) > self.max_token_length: - raise Exception("op tokens cannot exceed max length of" + self.max_token_length + "(" + self.state_path + "). ") - self.op_tokens[self.ix] = self.format_ops() - - def step(self, step): - # this tests the model for a single timestep. - # this is not the method that is used for high-speed runs, but can theoretically be used for - # easier to understand demonstrations - step_one(self.op_tokens, self.op_tokens[self.ix], self.state_ix, self.dict_ix, self.ts_ix, step) - #step_model({self.op_tokens[self.ix]}, self.state_ix, self.dict_ix, self.ts_ix, step) - - def dddstep_model(op_tokens, state_ix, dict_ix, ts_ix, step): - for i in op_tokens.keys(): - if op_tokens[i][0] == 1: - state_ix[i] = step_equation(op_tokens[i], state_ix) - elif op_tokens[i][0] == 2: - state_ix[i] = exec_tbl_eval(op_tokens[i], state_ix, dict_ix) - elif op_tokens[i][0] == 3: - step_model_link(op_tokens[i], state_ix, ts_ix, step) - elif op_tokens[i][0] == 4: - return False - elif op_tokens[i][0] == 5: - step_sim_timer(op_tokens[i], state_ix, dict_ix, ts_ix, step) - return - -""" -The class ModelConstant is for storing constants. It must be loaded here because ModelObject calls it. -Is this useful or just clutter? Useful I think since there are numerical constants... -""" -class ModelConstant(ModelObject): - def __init__(self, name, container = False, value = 0.0, state_path = False): - if (state_path != False): - # this allows us to mandate the location. useful for placeholders, broadcasts, etc. - self.state_path = state_path - super(ModelConstant, self).__init__(name, container) - self.default_value = float(value) - self.optype = 7 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - input, 4 - broadcastChannel, 5 - SimTimer, 6 - Conditional, 7 - ModelConstant (numeric) - #print("ModelConstant named",self.name, "with path", self.state_path,"and ix", self.ix, "value", value) - var_ix = self.set_state(float(value)) - self.paths_found = True - # self.state_ix[self.ix] = self.default_value - - def required_properties(): - req_props = super(ModelConstant, ModelConstant).required_properties() - req_props.extend(['value']) - return req_props - -# njit functions for runtime - -@njit -def exec_model_object( op, state_ix, dict_ix): - ix = op[1] - return 0.0 \ No newline at end of file +""" +The class ModelObject is the base class upon which all other dynamic model objects are built on. +It handles all Dict management functions, but provides for no runtime execution of it's own. +All runtime exec is done by child classes. +""" +from HSP2.state import * +from HSP2.om import * +from pandas import Series, DataFrame, concat, HDFStore, set_option, to_numeric +from pandas import Timestamp, Timedelta, read_hdf, read_csv +from numpy import pad + +class ModelObject: + state_ix = {} # Shared Dict with the numerical state of each object + state_paths = {} # Shared Dict with the hdf5 path of each object + dict_ix = {} # Shared Dict with the hdf5 path of each object + ts_ix = {} # Shared Dict with the hdf5 path of each object + op_tokens = {} # Shared Dict with the tokenized representation of each object, will be turned into array of ints + model_object_cache = {} # Shared with actual objects, keyed by their path + model_exec_list = {} # Shared with actual objects, keyed by their path + max_token_length = 64 # limit on complexity of tokenized objects since op_tokens must be fixed dimensions for numba + runnables = [1,2,5,6,8,9,10,11,12,13,14,15, 100] # runnable components important for optimization + ops_data_type = 'ndarray' # options are ndarray or Dict - Dict appears slower, but unsure of the cause, so keep as option. + + def __init__(self, name, container = False, model_props = {}): + self.name = name + self.container = container # will be a link to another object + self.log_path = "" # Ex: "/RESULTS/RCHRES_001/SPECL" + self.attribute_path = "" # + self.model_props_parsed = {} # a place to stash parse record for debugging + if (hasattr(self,'state_path') == False): + # if the state_path has already been set, we accept it. + # this allows sub-classes to override the standard path guessing approach. + self.state_path = "" # Ex: "/STATE/RCHRES_001" # the pointer to this object state + self.inputs = {} # associative array with key=local_variable_name, value=hdf5_path Ex: [ 'Qin' : '/STATE/RCHRES_001/IVOL' ] + self.inputs_ix = {} # associative array with key=local_variable_name, value=state_ix integer key + self.ix = False + self.paths_found = False # this should be False at start + self.default_value = 0.0 + self.ops = [] + self.optype = 0 # OpTypes are as follows: + # 0 - model object, 1 - equation, 2 - datamatrix, 3 - input/ModelLinkage, + # 4 - broadcastChannel, 5 - SimTimer, 6 - Conditional, 7 - ModelConstant (numeric), + # 8 - matrix accessor, 9 - MicroWatershedModel, 10 - MicroWatershedNetwork, 11 - ModelTimeseries, + # 12 - ModelRegister, 13 - SimpleChannel, 14 - SimpleImpoundment, 15 - FlowBy + self.register_path() # note this registers the path AND stores the object in model_object_cache + self.parse_model_props(model_props) + + @staticmethod + def required_properties(): + # returns a list or minimum properties to create. + # see ModelConstant below for how to call this in a sub-class + # note: + # req_props = super(DataMatrix, DataMatrix).required_properties() + req_props = ['name'] + return req_props + + @staticmethod + def make_op_tokens(num_ops = 5000): + if (ModelObject.ops_data_type == 'ndarray'): + op_tokens = int32(zeros((num_ops,64))) # was Dict.empty(key_type=types.int64, value_type=types.i8[:]) + else: + op_tokens = Dict.empty(key_type=types.int64, value_type=types.i8[:]) + return op_tokens + + @staticmethod + def runnable_op_list(op_tokens, meo, debug = False): + # only return those objects that do something at runtime + rmeo = [] + run_ops = {} + for ops in ModelObject.op_tokens: + if ops[0] in ModelObject.runnables: + run_ops[ops[1]] = ops + if debug == True: + print("Found runnable", ops[1], "type", ops[0]) + for ix in meo: + if ix in run_ops.keys(): + rmeo.append(ix) + rmeo = np.asarray(rmeo, dtype="i8") + return rmeo + + @staticmethod + def model_format_ops(ops): + if (ModelObject.ops_data_type == 'ndarray'): + ops = pad(ops,(0,ModelObject.max_token_length))[0:ModelObject.max_token_length] + else: + ops = np.asarray(ops, dtype="i8") + return ops + + def format_ops(self): + # this can be sub-classed if needed, but should not be since it is based on the ops_data_type + # See ModelObject.model_format_ops() + return ModelObject.model_format_ops(self.ops) + + @classmethod + def check_properties(cls, model_props): + # this is for pre-screening properties for validity in model creation routines + # returns True or False and can be as simple as checking the list of required_properties + # or a more detailed examination of suitability of what those properties contain + req_props = cls.required_properties() + matching_props = set(model_props).intersection(set(req_props)) + if len(matching_props) < len(req_props): + return False + return True + + def handle_inputs(self, model_props): + if 'inputs' in model_props.keys(): + for i_pair in model_props['inputs']: + i_name = i_pair[0] + i_target = i_pair[1] + i_target.replace("[parent]", self.container.state_path) + self.add_input(i_name, i_target) + + def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): + # this checks to see if the prop is in dict with value form, or just a value + # strict = True causes an exception if property is missing from model_props dict + prop_val = model_props.get(prop_name) + if type(prop_val) == list: # this doesn't work, but nothing gets passed in like this? Except broadcast params, but they are handled in the sub-class + prop_val = prop_val + elif type(prop_val) == dict: + prop_val = prop_val.get('value') + if strict and (prop_val == None): + raise Exception("Cannot find property " + prop_name + " in properties passed to "+ self.name + " and strict = True. Object creation halted. Path to object with error is " + self.state_path) + if (prop_val == None) and not (default_value == None): + prop_val = default_value + return prop_val + + def parse_model_props(self, model_props, strict = False ): + # sub-classes will allow an create argument "model_props" and handle them here. + # - subclasses should insure that they call super().parse_model_props() or include all code below + # see also: handle_prop(), which will be called y parse_model_props + # for all attributes supported by the class + # this base object only handles inputs + self.handle_inputs(model_props) + self.model_props_parsed = model_props + return True + + def set_state(self, set_value): + var_ix = set_state(self.state_ix, self.state_paths, self.state_path, set_value) + return var_ix + + def load_state_dicts(self, op_tokens, state_paths, state_ix, dict_ix): + self.op_tokens = op_tokens + self.state_paths = state_paths + self.state_ix = state_ix + self.dict_ix = dict_ix + + def save_object_hdf(self, hdfname, overwrite = False ): + # save the object in the full hdf5 path + # if overwrite = True replace this and all children, otherwise, just save this. + # note: "with" statement helps prevent unclosed resources, see: https://www.geeksforgeeks.org/with-statement-in-python/ + with HDFStore(hdfname, mode = 'a') as store: + dummy_var = True + + def make_paths(self, base_path = False): + if base_path == False: # we are NOT forcing paths + if not (self.container == False): + self.state_path = self.container.state_path + "/" + str(self.name) + self.attribute_path = self.container.attribute_path + "/" + str(self.name) + elif self.name == "": + self.state_path = "/STATE" + self.attribute_path = "/OBJECTS" + else: + self.state_path = "/STATE/" + str(self.name) + self.attribute_path = "/OBJECTS/" + str(self.name) + else: + # base_path is a Dict with state_path and attribute_path set + self.state_path = base_path['STATE'] + self.name + self.attribute_path = base_path['OBJECTS'] + self.name + return self.state_path + + def get_state(self, var_name = False): + if var_name == False: + return self.state_ix[self.ix] + else: + var_path = self.find_var_path(var_name) + var_ix = get_state_ix(self.state_ix, self.state_paths, var_path) + if (var_ix == False): + return False + return self.state_ix[var_ix] + + def get_exec_order(self, var_name = False): + if var_name == False: + var_ix = self.ix + else: + var_path = self.find_var_path(var_name) + var_ix = get_state_ix(self.state_ix, self.state_paths, var_path) + exec_order = get_exec_order(self.model_exec_list,var_ix) + return exec_order + + def get_object(self, var_name = False): + if var_name == False: + return self.model_object_cache[self.state_path] + else: + var_path = self.find_var_path(var_name) + return self.model_object_cache[var_path] + + + def find_var_path(self, var_name, local_only = False): + # check local inputs for name + if var_name in self.inputs.keys(): + return self.inputs[var_name] + if local_only: + return False # we are limiting the scope, so just return + # check parent for name + if not (self.container == False): + return self.container.find_var_path(var_name) + # check for root state vars STATE + var_name + if ("/STATE/" + var_name) in self.state_paths.keys(): + #return self.state_paths[("/STATE/" + var_name)] + return ("/STATE/" + var_name) + # check for root state vars + if var_name in self.state_paths.keys(): + #return self.state_paths[var_name] + return var_name + return False + + def constant_or_path(self, keyname, keyval, trust = False): + if is_float_digit(keyval): + # we are given a constant value, not a variable reference + k = ModelConstant(keyname, self, float(keyval)) + kix = k.ix + else: + kix = self.add_input(keyname, keyval, 2, trust) + return kix + + def register_path(self): + # initialize the path variable if not already set + if self.state_path == '': + self.make_paths() + self.ix = set_state(self.state_ix, self.state_paths, self.state_path, self.default_value) + # store object in model_object_cache + if not (self.state_path in self.model_object_cache.keys()): + self.model_object_cache[self.state_path] = self + # this should check to see if this object has a parent, and if so, register the name on the parent + # default is as a child object. + if not (self.container == False): + # since this is a request to actually create a new path, we instruct trust = True as last argument + return self.container.add_input(self.name, self.state_path, 1, True) + return self.ix + + def add_input(self, var_name, var_path, input_type = 1, trust = False): + # this will add to the inputs, but also insure that this + # requested path gets added to the state/exec stack via an input object if it does + # not already exist. + # - var_name = the local name for this linked entity/attribute + # - var_path = the full path of the entity/attribute we are linking to + # - input types: 1: parent-child link, 2: state property link, 3: timeseries object property link + # - trust = False means fail if the path does not already exist, True means assume it will be OK which is bad policy, except for the case where the path points to an existing location + # do we have a path here already or can we find on the parent? + # how do we check if this is a path already, in which case we trust it? + # todo: we should be able to alias a var_name to a var_path, for example + # calling add_input('movar', 'month', 1, True) + # this *should* search for month and find the STATE/month variable + # BUT this only works if both var_name and var_path are month + # so add_input('month', 'month', 1, True) works. + found_path = self.find_var_path(var_path) + #print("Searched", var_name, "with path", var_path,"found", found_path) + var_ix = get_state_ix(self.state_ix, self.state_paths, found_path) + if var_ix == False: + if (trust == False): + raise Exception("Cannot find variable path: " + var_path + " when adding input to object " + self.name + " as input named " + var_name + " ... process terminated. Path to object with error is " + self.state_path) + var_ix = self.insure_path(var_path) + else: + # if we are to trust the path, this might be a child property just added, + # and therefore, we don't look further than this + # otherwise, we use found_path, whichever it is, as + # we know that this path is better, as we may have been given a simple variable name + # and so found_path will look more like /STATE/RCHRES_001/... + if trust == False: + var_path = found_path + self.inputs[var_name] = var_path + self.inputs_ix[var_name] = var_ix + # Should we create a register for the input to be reported here? + # i.e., if we have an input named Qin on RCHRES_R001, shouldn't we be able + # to find the data in /STATE/RCHRES_R001/Qin ??? It is redundant data and writing + # but matches a complete data model and prevents stale data? + return self.inputs_ix[var_name] + + def add_object_input(self, var_name, var_object, link_type = 1): + # See above for details. + # this adds an object as a link to another object + self.inputs[var_name] = var_object.state_path + self.inputs_ix[var_name] = var_object.ix + return self.inputs_ix[var_name] + + def create_parent_var(self, parent_var_name, source): + # see decision points: https://github.com/HARPgroup/HSPsquared/issues/78 + # This is used when an object sets an additional property on its parent + # Like in simple_channel sets [channel prop name]_Qout on its parent + # Generally, this should have 2 components. + # 1 - a state variable on the child (this could be an implicit sub-comp, or a constant sub-comp, the child handles the setup of this) see constant_or_path() + # 2 - an input link + # the beauty of this is that the parent object and any of it's children will find the variable "[source_object]_varname" + if type(source) == str: + self.container.add_input(parent_var_name, source, 1, False) + elif isinstance(source, ModelObject): + self.container.add_object_input(parent_var_name, source, 1) + + def insure_path(self, var_path): + # if this path can be found in the hdf5 make sure that it is registered in state + # and that it has needed object class to render it at runtime (some are automatic) + # RIGHT NOW THIS DOES NOTHING TO CHECK IF THE VAR EXISTS THIS MUST BE FIXED + var_ix = set_state(self.state_ix, self.state_paths, var_path, 0.0) + return var_ix + + def get_dict_state(self, ix = -1): + if ix >= 0: + return self.dict_ix[ix] + return self.dict_ix[self.ix] + + def find_paths(self): + # Note: every single piece of data used by objects, even constants, are resolved to a PATH in the hdf5 + # find_paths() is called to insure that all of these can be found, and then, are added to inputs/inputs_ix + # - We wait to find the index values for those variables after all things have been loaded + # - base ModelObject does not have any "implicit" inputs, since all of its inputs are + # explicitly added children objects, thus we default to True + self.paths_found = True + # - But children such as Equation and DataMatrix, etc + # so they mark paths_found = False and then + # should go through their own locally defined data + # and call add_input() for any data variables encountered + # - add_input() will handle searching for the paths and ix values + # and should also handle deciding if this is a constant, like a numeric value + # or a variable data and should handle them accordingly + return True + + def insure_register(self, var_name, default_value, register_container, register_path = False, is_accumulator = True): + # we send with local_only = True so it won't go upstream + if register_path == False: + register_path = register_container.find_var_path(var_name, True) + if (register_path == False) or (register_path not in self.model_object_cache.keys()): + # create a register as a placeholder for the data at the hub path + # in case there are no senders, or in the case of a timeseries logger, we need to register it so that its path can be set to hold data + #print("Creating a register for data for hub ", register_container.name, "(", register_container.state_path, ")", " var name ",var_name) + if (is_accumulator == True): + var_register = ModelRegister(var_name, register_container, default_value, register_path) + else: + # this is just a standard numerical data holder so set up a constant + var_register = ModelConstant(var_name, register_container, default_value, register_path) + else: + var_register = self.model_object_cache[register_path] + return var_register + + def tokenize(self): + # renders tokens for high speed execution + if (self.paths_found == False): + raise Exception("path_found False for object" + self.name + "(" + self.state_path + "). " + "Tokens cannot be generated until method '.find_paths()' is run for all model objects ... process terminated. (see function `model_path_loader(model_object_cache)`)") + self.ops = [self.optype, self.ix] + + def add_op_tokens(self): + # this puts the tokens into the global simulation queue + # can be customized by subclasses to add multiple lines if needed. + if self.ops == []: + self.tokenize() + #print(self.state_path, "tokens", self.ops) + if len(self.ops) > self.max_token_length: + raise Exception("op tokens cannot exceed max length of" + self.max_token_length + "(" + self.state_path + "). ") + self.op_tokens[self.ix] = self.format_ops() + + def step(self, step): + # this tests the model for a single timestep. + # this is not the method that is used for high-speed runs, but can theoretically be used for + # easier to understand demonstrations + step_one(self.op_tokens, self.op_tokens[self.ix], self.state_ix, self.dict_ix, self.ts_ix, step) + #step_model({self.op_tokens[self.ix]}, self.state_ix, self.dict_ix, self.ts_ix, step) + + def dddstep_model(op_tokens, state_ix, dict_ix, ts_ix, step): + for i in op_tokens.keys(): + if op_tokens[i][0] == 1: + state_ix[i] = step_equation(op_tokens[i], state_ix) + elif op_tokens[i][0] == 2: + state_ix[i] = exec_tbl_eval(op_tokens[i], state_ix, dict_ix) + elif op_tokens[i][0] == 3: + step_model_link(op_tokens[i], state_ix, ts_ix, step) + elif op_tokens[i][0] == 4: + return False + elif op_tokens[i][0] == 5: + step_sim_timer(op_tokens[i], state_ix, dict_ix, ts_ix, step) + return + +""" +The class ModelConstant is for storing constants. It must be loaded here because ModelObject calls it. +Is this useful or just clutter? Useful I think since there are numerical constants... +""" +class ModelConstant(ModelObject): + def __init__(self, name, container = False, value = 0.0, state_path = False): + if (state_path != False): + # this allows us to mandate the location. useful for placeholders, broadcasts, etc. + self.state_path = state_path + super(ModelConstant, self).__init__(name, container) + self.default_value = float(value) + self.optype = 7 # 0 - shell object, 1 - equation, 2 - datamatrix, 3 - input, 4 - broadcastChannel, 5 - SimTimer, 6 - Conditional, 7 - ModelConstant (numeric) + #print("ModelConstant named",self.name, "with path", self.state_path,"and ix", self.ix, "value", value) + var_ix = self.set_state(float(value)) + self.paths_found = True + # self.state_ix[self.ix] = self.default_value + + def required_properties(): + req_props = super(ModelConstant, ModelConstant).required_properties() + req_props.extend(['value']) + return req_props + + +""" +The class ModelRegister is for storing push values. +Behavior is to zero each timestep. This could be amended later. +Maybe combined with stack behavior? Or accumulator? +""" +class ModelRegister(ModelConstant): + def __init__(self, name, container = False, value = 0.0, state_path = False): + super(ModelRegister, self).__init__(name, container, value, state_path) + self.optype = 12 # + # self.state_ix[self.ix] = self.default_value + + def required_properties(): + req_props = super(ModelConstant, ModelConstant).required_properties() + req_props.extend(['value']) + return req_props + +# njit functions for runtime +@njit +def pre_step_register(op, state_ix): + ix = op[1] + #print("Resetting register", ix,"to zero") + state_ix[ix] = 0.0 + return + +# Note: ModelConstant has not runtime execution + +@njit +def exec_model_object( op, state_ix, dict_ix): + ix = op[1] + return 0.0 + + +# njit functions for end of model run +@njit +def finish_model_object(op_token, state_ix, ts_ix): + return + + +@njit +def finish_register(op_token, state_ix, ts_ix): + # todo: push the values of ts_ix back to the hdf5? or does this happen in larger simulation as it is external to OM? + return \ No newline at end of file diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index 800dbf51..c8d81278 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -1,168 +1,176 @@ -""" -The class SpecialAction is used to support original HSPF ACTIONS. -""" -from HSP2.state import * -from HSP2.om import * -from HSP2.om_model_object import ModelObject -from numba import njit -class SpecialAction(ModelObject): - def __init__(self, name, container = False, model_props = {}): - super(SpecialAction, self).__init__(name, container, model_props) - - self.optype = 100 # Special Actions start indexing at 100 - - def parse_model_props(self, model_props, strict=False): - super().parse_model_props(model_props, strict) - # comes in as row from special ACTIONS table - # ex: { - # 'OPTYP': 'RCHRES', 'RANGE1': '1', 'RANGE2': '', 'DC': 'DY', 'DS': '', - # 'YR': '1986', 'MO': '3', 'DA': '1', 'HR': '12', 'MN': '', - # 'D': '2', 'T': 3, 'VARI': 'IVOL', 'S1': '', 'S2': '', - # 'AC': '+=', 'VALUE': 30.0, 'TC': '', 'TS': '', 'NUM': '', 'CURLVL': 0, - # defined by: - # - operand1, i.e. variable to access + update, path = /STATE/[OPTYP]_[op_abbrev][RANGE1]/[VARI] - # - action(operation) to perform = AC - # - operand2, a numeric value for simple ACTION = [VALUE] - # note: [op_abbrev] is *maybe* the first letter of the OPTYP? Not a very good idea to have a coded convention like that - self.op_type = self.handle_prop(model_props, 'OPTYP') - self.range1 = self.handle_prop(model_props, 'RANGE1') - self.range2 = self.handle_prop(model_props, 'RANGE2') - self.ac = '=' # set the default, and also adds a property for later testing. - self.ac = self.handle_prop(model_props, 'AC') # must handle this before we handle the operand VALUE to check for DIV by Zero - self.vari = self.handle_prop(model_props, 'VARI') - self.op2_val = self.handle_prop(model_props, 'VALUE') - self.op2_ix = self.constant_or_path('op_val', self.op2_val) # constant values must be added to STATE and thus are referenced by their state_ix number - self.num = self.handle_prop(model_props, 'NUM', False, 1) # number of times to perform action - self.timer_ix = self.handle_prop(model_props, 'when', False, 1) # when to begin the first attempt at action - self.ctr_ix = self.constant_or_path('ctr', 0) # this initializes the counter for how many times an action has been performed - # now add the state value that we are operating on (the target) as an input, so that this gets executed AFTER this is set initially - self.add_input('op1', ('/STATE/' + self.op_type + '_' + self.op_type[0] + str(self.range1).zfill(3) + "/" + self.vari ), 2, True ) - # @tbd: support time enable/disable - # - check if time ops have been set and add as inputs like "year", or "month", etc could give explicit path /STATE/year ... - # - add the time values to match as constants i.e. self.constant_or_path() - - def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): - # Insure all values are legal ex: no DIV by Zero - prop_val = super().handle_prop(model_props, prop_name, strict, default_value ) - if (prop_name == 'VALUE') and (self.ac == '/='): - if (prop_val == 0) or (prop_val == None): - raise Exception("Error: in properties passed to "+ self.name + " AC must be non-zero or non-Null . Object creation halted. Path to object with error is " + self.state_path) - if (prop_name == 'AC'): - self.handle_ac(prop_val) - if (prop_name == 'when'): - # when to perform this? timestamp or time-step index - prop_val = 0 - si = ModelObject.model_object_cache['/STATE/timer'] - if len(model_props['YR']) > 0: - # translate date to equivalent model step - datestring = model_props['YR'] + '-' + model_props['MO'] + '-' + \ - model_props['DA'] + ' ' + model_props['HR'] + ':' + \ - model_props['MN'] + ':00' - if datestring in si.model_props_parsed['tindex']: - prop_val = si.model_props_parsed['tindex'].get_loc(datestring) - if (prop_name == 'NUM') and (prop_val == ''): - prop_val = default_value - return prop_val - - def handle_ac(self, ac): - # cop_code 0: =/eq, 1: /gt, 3: <=/le, 4: >=/ge, 5: <>/ne - cop_codes = { - '=': 1, - '+=': 2, - '-=': 3, - '*=': 4, - '/=': 5, - 'MIN': 6 - } - # From HSPF UCI docs: - # 1 = T= A - # 2 += T= T+ A - # 3 -= T= T- A - # 4 *= T= T*A - # 5 /= T= T/A - # 6 MIN T= Min(T,A) - # 7 MAX T= Max(T,A) - # 8 ABS T= Abs(A) - # 9 INT T= Int(A) - # 10 ^= T= T^A - # 11 LN T= Ln(A) - # 12 LOG T= Log10(A) - # 13 MOD T= Mod(T,A) - if not (is_float_digit(ac)): - if not (ac in cop_codes.keys()): - raise Exception("Error: in "+ self.name + " AC (" + ac + ") not supported. Object creation halted. Path to object with error is " + self.state_path) - opid = cop_codes[ac] - self.ac = ac - else: - # this will fail catastrophically if the requested function is not supported - # which is a good thing - if not (ac in cop_codes.values()): - raise Exception("Error: in "+ self.name + "numeric AC (" + ac + ") not supported. Object creation halted. Path to object with error is " + self.state_path) - opid = ac - self.ac = list(cop_codes.keys())[list(cop_codes.values()).index(ac) ] - self.opid = opid - - def tokenize(self): - # call parent method to set basic ops common to all - super().tokenize() # sets self.ops = op_type, op_ix - self.ops = self.ops + [self.inputs_ix['op1'], self.opid, self.op2_ix, self.timer_ix, self.ctr_ix, self.num] - # @tbd: check if time ops have been set and tokenize accordingly - - def add_op_tokens(self): - # this puts the tokens into the global simulation queue - # can be customized by subclasses to add multiple lines if needed. - super().add_op_tokens() - - @staticmethod - def hdf5_load_all(hdf_source): - specla=hdf_source['/SPEC_ACTIONS/ACTIONS/table'] - for idx, x in np.ndenumerate(specla): - print(x[1].decode("utf-8"),x[2].decode("utf-8"), x[13].decode("utf-8"), x[16].decode("utf-8"), x[17]) - - -# njit functions for runtime - -@njit(cache=True) -def step_special_action(op, state_ix, dict_ix, step): - ix = op[1] # ID of this op - # these indices must be adjusted to reflect the number of common op tokens - # SpecialAction has: - # - type of condition (+=, -=, ...) - # - operand 1 (left side) - # - operand 2 (right side) - # @tbd: check if time ops have been set and enable/disable accordingly - # - 2 ops will be added for each time matching switch, the state_ix of the time element (year, month, ...) and the state_ix of the constant to match - # - matching should be as simple as if (state_ix[tix1] <> state_ix[vtix1]): return state_ix[ix1] (don't modify the value) - # - alternative: save the integer timestamp or timestep of the start, and if step/stamp > value, enable - # @tbd: add number of repeats, and save the value of repeats in a register - ix1 = op[2] # ID of source of data and destination of data - sop = op[3] - ix2 = op[4] - tix = op[5] # which slot is the time comparison in? - if (tix in state_ix and step < state_ix[tix]): - return False - ctr_ix = op[6] # id of the counter variable - num_ix = op[7] # max times to complete - num_done = state_ix[ctr_ix] - num = state_ix[num_ix] # num to complete - if (tix in state_ix and num_done >= num): - return False - else: - if sop == 1: - result = state_ix[ix2] - elif sop == 2: - result = state_ix[ix1] + state_ix[ix2] - elif sop == 3: - result = state_ix[ix1] - state_ix[ix2] - elif sop == 4: - result = state_ix[ix1] * state_ix[ix2] - elif sop == 5: - result = state_ix[ix1] / state_ix[ix2] - - # set value in target - # tbd: handle this with a model linkage? cons: this makes a loop since the ix1 is source and destination - - state_ix[ix1] = result - state_ix[op[1]] = result - return result - +""" +The class SpecialAction is used to support original HSPF ACTIONS. +""" +from HSP2.state import * +from HSP2.om import * +from HSP2.om_model_object import ModelObject +from numba import njit +class SpecialAction(ModelObject): + def __init__(self, name, container = False, model_props = {}): + super(SpecialAction, self).__init__(name, container, model_props) + + self.optype = 100 # Special Actions start indexing at 100 + + def parse_model_props(self, model_props, strict=False): + super().parse_model_props(model_props, strict) + # comes in as row from special ACTIONS table + # ex: { + # 'OPTYP': 'RCHRES', 'RANGE1': '1', 'RANGE2': '', 'DC': 'DY', 'DS': '', + # 'YR': '1986', 'MO': '3', 'DA': '1', 'HR': '12', 'MN': '', + # 'D': '2', 'T': 3, 'VARI': 'IVOL', 'S1': '', 'S2': '', + # 'AC': '+=', 'VALUE': 30.0, 'TC': '', 'TS': '', 'NUM': '', 'CURLVL': 0, + # defined by: + # - operand1, i.e. variable to access + update, path = /STATE/[OPTYP]_[op_abbrev][RANGE1]/[VARI] + # - action(operation) to perform = AC + # - operand2, a numeric value for simple ACTION = [VALUE] + # note: [op_abbrev] is *maybe* the first letter of the OPTYP? Not a very good idea to have a coded convention like that + self.op_type = self.handle_prop(model_props, 'OPTYP') + self.range1 = self.handle_prop(model_props, 'RANGE1') + self.range2 = self.handle_prop(model_props, 'RANGE2') + self.ac = '=' # set the default, and also adds a property for later testing. + self.ac = self.handle_prop(model_props, 'AC') # must handle this before we handle the operand VALUE to check for DIV by Zero + self.vari = self.handle_prop(model_props, 'VARI') + self.op2_val = self.handle_prop(model_props, 'VALUE') + self.op2_ix = self.constant_or_path('op_val', self.op2_val) # constant values must be added to STATE and thus are referenced by their state_ix number + self.num = self.handle_prop(model_props, 'NUM', False, 1) # number of times to perform action + self.timer_ix = self.handle_prop(model_props, 'when', False, 1) # when to begin the first attempt at action + self.ctr_ix = self.constant_or_path('ctr', 0) # this initializes the counter for how many times an action has been performed + # now add the state value that we are operating on (the target) as an input, so that this gets executed AFTER this is set initially + #self.add_input('op1', ('/STATE/' + self.op_type + '_' + self.op_type[0] + str(self.range1).zfill(3) + "/" + self.vari ), 2, True ) + # or, should we set up a register for the target + domain = self.model_object_cache[('/STATE/' + self.op_type + '_' + self.op_type[0] + str(self.range1).zfill(3) )] + var_register = self.insure_register(self.vari, 0.0, domain, False) + #print("Created register", var_register.name, "with path", var_register.state_path) + # add already created objects as inputs + var_register.add_object_input(self.name, self, 1) + self.op1_ix = var_register.ix + + # @tbd: support time enable/disable + # - check if time ops have been set and add as inputs like "year", or "month", etc could give explicit path /STATE/year ... + # - add the time values to match as constants i.e. self.constant_or_path() + + def handle_prop(self, model_props, prop_name, strict = False, default_value = None ): + # Insure all values are legal ex: no DIV by Zero + prop_val = super().handle_prop(model_props, prop_name, strict, default_value ) + if (prop_name == 'VALUE') and (self.ac == '/='): + if (prop_val == 0) or (prop_val == None): + raise Exception("Error: in properties passed to "+ self.name + " AC must be non-zero or non-Null . Object creation halted. Path to object with error is " + self.state_path) + if (prop_name == 'AC'): + self.handle_ac(prop_val) + if (prop_name == 'when'): + # when to perform this? timestamp or time-step index + prop_val = 0 + si = self.model_object_cache[self.find_var_path('timer')] + if len(model_props['YR']) > 0: + # translate date to equivalent model step + datestring = model_props['YR'] + '-' + model_props['MO'] + '-' + \ + model_props['DA'] + ' ' + model_props['HR'] + ':' + \ + model_props['MN'] + ':00' + if datestring in si.model_props_parsed['tindex']: + prop_val = si.model_props_parsed['tindex'].get_loc(datestring) + if (prop_name == 'NUM') and (prop_val == ''): + prop_val = default_value + return prop_val + + def handle_ac(self, ac): + # cop_code 0: =/eq, 1: /gt, 3: <=/le, 4: >=/ge, 5: <>/ne + cop_codes = { + '=': 1, + '+=': 2, + '-=': 3, + '*=': 4, + '/=': 5, + 'MIN': 6 + } + # From HSPF UCI docs: + # 1 = T= A + # 2 += T= T+ A + # 3 -= T= T- A + # 4 *= T= T*A + # 5 /= T= T/A + # 6 MIN T= Min(T,A) + # 7 MAX T= Max(T,A) + # 8 ABS T= Abs(A) + # 9 INT T= Int(A) + # 10 ^= T= T^A + # 11 LN T= Ln(A) + # 12 LOG T= Log10(A) + # 13 MOD T= Mod(T,A) + if not (is_float_digit(ac)): + if not (ac in cop_codes.keys()): + raise Exception("Error: in "+ self.name + " AC (" + ac + ") not supported. Object creation halted. Path to object with error is " + self.state_path) + opid = cop_codes[ac] + self.ac = ac + else: + # this will fail catastrophically if the requested function is not supported + # which is a good thing + if not (ac in cop_codes.values()): + raise Exception("Error: in "+ self.name + "numeric AC (" + ac + ") not supported. Object creation halted. Path to object with error is " + self.state_path) + opid = ac + self.ac = list(cop_codes.keys())[list(cop_codes.values()).index(ac) ] + self.opid = opid + + def tokenize(self): + # call parent method to set basic ops common to all + super().tokenize() # sets self.ops = op_type, op_ix + self.ops = self.ops + [self.op1_ix, self.opid, self.op2_ix, self.timer_ix, self.ctr_ix, self.num] + # @tbd: check if time ops have been set and tokenize accordingly + + def add_op_tokens(self): + # this puts the tokens into the global simulation queue + # can be customized by subclasses to add multiple lines if needed. + super().add_op_tokens() + + @staticmethod + def hdf5_load_all(hdf_source): + specla=hdf_source['/SPEC_ACTIONS/ACTIONS/table'] + for idx, x in np.ndenumerate(specla): + print(x[1].decode("utf-8"),x[2].decode("utf-8"), x[13].decode("utf-8"), x[16].decode("utf-8"), x[17]) + + +# njit functions for runtime + +@njit(cache=True) +def step_special_action(op, state_ix, dict_ix, step): + ix = op[1] # ID of this op + # these indices must be adjusted to reflect the number of common op tokens + # SpecialAction has: + # - type of condition (+=, -=, ...) + # - operand 1 (left side) + # - operand 2 (right side) + # @tbd: check if time ops have been set and enable/disable accordingly + # - 2 ops will be added for each time matching switch, the state_ix of the time element (year, month, ...) and the state_ix of the constant to match + # - matching should be as simple as if (state_ix[tix1] <> state_ix[vtix1]): return state_ix[ix1] (don't modify the value) + # - alternative: save the integer timestamp or timestep of the start, and if step/stamp > value, enable + # @tbd: add number of repeats, and save the value of repeats in a register + ix1 = op[2] # ID of source of data and destination of data + sop = op[3] + ix2 = op[4] + tix = op[5] # which slot is the time comparison in? + if (tix in state_ix and step < state_ix[tix]): + return + ctr_ix = op[6] # id of the counter variable + num_ix = op[7] # max times to complete + num_done = state_ix[ctr_ix] + num = state_ix[num_ix] # num to complete + if (tix in state_ix and num_done >= num): + return + else: + if sop == 1: + result = state_ix[ix2] + elif sop == 2: + result = state_ix[ix1] + state_ix[ix2] + elif sop == 3: + result = state_ix[ix1] - state_ix[ix2] + elif sop == 4: + result = state_ix[ix1] * state_ix[ix2] + elif sop == 5: + result = state_ix[ix1] / state_ix[ix2] + + # set value in target + # tbd: handle this with a model linkage? cons: this makes a loop since the ix1 is source and destination + + state_ix[ix1] = result + state_ix[op[1]] = result + return result + diff --git a/HSP2/state.py b/HSP2/state.py index 9c6040c9..e72da4ed 100644 --- a/HSP2/state.py +++ b/HSP2/state.py @@ -1,231 +1,245 @@ -''' General routines for SPECL ''' - -import numpy as np -import time -from pandas import DataFrame, date_range -from pandas.tseries.offsets import Minute -from numba.typed import Dict -from numpy import zeros -from numba import int8, float32, njit, types, typed # import the types -import os -import importlib.util -import sys - -def init_state_dicts(): - """ - This contains the base dictionaries used to pass model state amongst modules and custom code plugins - """ - state = {} # shared state Dictionary, contains numba-ready Dicts - state_paths = Dict.empty(key_type=types.unicode_type, value_type=types.int64) - state_ix = Dict.empty(key_type=types.int64, value_type=types.float64) - state_ix = Dict.empty(key_type=types.int64, value_type=types.float64) - dict_ix = Dict.empty(key_type=types.int64, value_type=types.float64[:,:]) - ts_ix = Dict.empty(key_type=types.int64, value_type=types.float64[:]) - # initialize state for hydr - # now put all of these Dicts into the state Dict - state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] = state_paths, state_ix, dict_ix, ts_ix - # add a generic place to stash model_data for dynamic components - state['model_data'] = {} - return state - - -def find_state_path(state_paths, parent_path, varname): - """ - We should get really good at using docstrings... - """ - # this is a bandaid, we should have an object routine that searches the parent for variables or inputs - var_path = parent_path + "/states/" + str(varname) - return var_path - -def op_path_name(operation, id): - """ - Used to generate hdf5 operation name in a central fashion to avoid naming convention slip-ups - """ - tid = str(id).zfill(3) - path_name = f'{operation}_{operation[0]}{tid}' - return path_name - -def get_op_state_path(operation, id, activity = ''): - """ - Used to generate hdf5 paths in a central fashion to avoid naming convention slip-ups - """ - op_name = op_path_name(operation, id) - if activity == '': - op_path = f'/STATE/{op_name}' - else: - op_path = f'/STATE/{op_name}/{activity}' - return op_path - - -def get_state_ix(state_ix, state_paths, var_path): - """ - Find the integer key of a variable name in state_ix - """ - if not (var_path in list(state_paths.keys())): - # we need to add this to the state - return False # should throw an error - var_ix = state_paths[var_path] - return var_ix - - -def get_ix_path(state_paths, var_ix): - """ - Find the integer key of a variable name in state_ix - """ - for spath, ix in state_paths.items(): - if var_ix == ix: - # we need to add this to the state - return spath - return False - -def set_state(state_ix, state_paths, var_path, default_value = 0.0, debug = False): - """ - Given an hdf5 style path to a variable, set the value - If the variable does not yet exist, create it. - Returns the integer key of the variable in the state_ix Dict - """ - if not (var_path in state_paths.keys()): - # we need to add this to the state - state_paths[var_path] = append_state(state_ix, default_value) - var_ix = get_state_ix(state_ix, state_paths, var_path) - if (debug == True): - print("Setting state_ix[", var_ix, "], to", default_value) - state_ix[var_ix] = default_value - return var_ix - - -def set_dict_state(state_ix, dict_ix, state_paths, var_path, default_value = {}): - """ - Given an hdf5 style path to a variable, set the value in the dict - If the variable does not yet exist, create it. - Returns the integer key of the variable in the state_ix Dict - """ - if not (var_path in state_paths.keys()): - # we need to add this to the state - state_paths[var_path] = append_state(state_ix, default_value) - var_ix = get_state_ix(state_ix, state_paths, var_path) - return var_ix - - -def append_state(state_ix, var_value): - """ - Add a new variable on the end of the state_ix Dict - Return the key of this new variable - """ - if (len(state_ix) == 0): - val_ix = 1 - else: - val_ix = max(state_ix.keys()) + 1 # next ix value - state_ix[val_ix] = var_value - return val_ix - -def state_context_hsp2(state, operation, segment, activity): - # this establishes domain info so that a module can know its paths - state['operation'] = operation - state['segment'] = segment # - state['activity'] = activity - # give shortcut to state path for the upcoming function - state['domain'] = "/STATE/" + operation + "_" + segment # + "/" + activity # may want to comment out activity? - -def state_siminfo_hsp2(uci_obj, siminfo): - # Add crucial simulation info for dynamic operation support - delt = uci_obj.opseq.INDELT_minutes[0] # get initial value for STATE objects - siminfo['delt'] = delt - siminfo['tindex'] = date_range(siminfo['start'], siminfo['stop'], freq=Minute(delt))[1:] - siminfo['steps'] = len(siminfo['tindex']) - -def state_load_hdf5_components(io_manager, siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache): - # Implement population of model_object_cache etc from components in a hdf5 such as Special ACTIONS - return - -def state_load_dynamics_hsp2(state, io_manager, siminfo): - # Load any dynamic components if present, and store variables on objects - hsp2_local_py = load_dynamics(io_manager, siminfo) - # if a local file with state_step_hydr() was found in load_dynamics(), we add it to state - state['state_step_hydr'] = siminfo['state_step_hydr'] # enabled or disabled - state['hsp2_local_py'] = hsp2_local_py # Stores the actual function in state - -def hydr_init_ix(state_ix, state_paths, domain): - # get a list of keys for all hydr state variables - hydr_state = ["DEP","IVOL","O1","O2","O3","OVOL1","OVOL2","OVOL3","PRSUPY","RO","ROVOL","SAREA","TAU","USTAR","VOL","VOLEV"] - hydr_ix = Dict.empty(key_type=types.unicode_type, value_type=types.int64) - for i in hydr_state: - #var_path = f'{domain}/{i}' - var_path = domain + "/" + i - hydr_ix[i] = set_state(state_ix, state_paths, var_path, 0.0) - return hydr_ix - -def sedtrn_init_ix(state_ix, state_paths, domain): - # get a list of keys for all sedtrn state variables - sedtrn_state = ["RSED4","RSED5","RSED6"] - sedtrn_ix = Dict.empty(key_type=types.unicode_type, value_type=types.int64) - for i in sedtrn_state: - #var_path = f'{domain}/{i}' - var_path = domain + "/" + i - sedtrn_ix[i] = set_state(state_ix, state_paths, var_path, 0.0) - return sedtrn_ix - -@njit -def hydr_get_ix(state_ix, state_paths, domain): - # get a list of keys for all hydr state variables - hydr_state = ["DEP","IVOL","O1","O2","O3","OVOL1","OVOL2","OVOL3","PRSUPY","RO","ROVOL","SAREA","TAU","USTAR","VOL","VOLEV"] - hydr_ix = Dict.empty(key_type=types.unicode_type, value_type=types.int64) - for i in hydr_state: - #var_path = f'{domain}/{i}' - var_path = domain + "/" + i - hydr_ix[i] = state_paths[var_path] - return hydr_ix - -def sedtrn_get_ix(state_ix, state_paths, domain): - # get a list of keys for all sedtrn state variables - sedtrn_state = ["RSED4", "RSED5", "RSED6"] - sedtrn_ix = Dict.empty(key_type=types.unicode_type, value_type=types.int64) - for i in sedtrn_state: - var_path = domain + "/" + i - sedtrn_ix[i] = state_paths[var_path] - return sedtrn_ix - -# function to dynamically load module, based on "Using imp module" in https://www.tutorialspoint.com/How-I-can-dynamically-import-Python-module# -#def dynamic_module_import(module_name, class_name): -def dynamic_module_import(local_name, local_path, module_name): - # find_module() is used to find the module in current directory - # it gets the pointer, path and description of the module - module = False - local_spec = False - try: - # print ("Looking for local_name, local_path", local_name, local_path) - local_spec = importlib.util.spec_from_file_location(local_name, local_path) - except ImportError: - print ("Imported module {} not found".format(local_name)) - try: - # load_module dynamically loads the module - # the parameters are pointer, path and description of the module - if (local_spec != False): - module = importlib.util.module_from_spec(local_spec) - sys.modules[local_spec.name] = module - sys.modules[module_name] = module - local_spec.loader.exec_module(module) - print("Imported custom module {}".format(local_path)) - except Exception as e: - # print(e) this isn't really an exception, it's legit to have no custom python code - pass - return module - - -def load_dynamics(io_manager, siminfo): - local_path = os.getcwd() - # try this - hdf5_path = io_manager._input.file_path - (fbase, fext) = os.path.splitext(hdf5_path) - # see if there is a code module with custom python - # print("Looking for SPECL with custom python code ", (fbase + ".py")) - hsp2_local_py = dynamic_module_import(fbase, fbase + ".py", "hsp2_local_py") - siminfo['state_step_hydr'] = 'disabled' - if 'state_step_hydr' in dir(hsp2_local_py): - siminfo['state_step_hydr'] = 'enabled' - print("state_step_hydr function defined, using custom python code") - else: - # print("state_step_hydr function not defined. Using default") - return False - return hsp2_local_py - +''' General routines for SPECL ''' + +import numpy as np +import time +from pandas import DataFrame, date_range +from pandas.tseries.offsets import Minute +from numba.typed import Dict +from numpy import zeros +from numba import int8, float32, njit, types, typed # import the types +import os +import importlib.util +import sys + +def init_state_dicts(): + """ + This contains the base dictionaries used to pass model state amongst modules and custom code plugins + """ + state = {} # shared state Dictionary, contains numba-ready Dicts + state_paths = Dict.empty(key_type=types.unicode_type, value_type=types.int64) + state_ix = Dict.empty(key_type=types.int64, value_type=types.float64) + state_ix = Dict.empty(key_type=types.int64, value_type=types.float64) + dict_ix = Dict.empty(key_type=types.int64, value_type=types.float64[:,:]) + ts_ix = Dict.empty(key_type=types.int64, value_type=types.float64[:]) + # initialize state for hydr + # now put all of these Dicts into the state Dict + state['state_paths'], state['state_ix'], state['dict_ix'], state['ts_ix'] = state_paths, state_ix, dict_ix, ts_ix + # add a generic place to stash model_data for dynamic components + state['model_data'] = {} + return state + + +def op_path_name(operation, id): + """ + Used to generate hdf5 operation name in a central fashion to avoid naming convention slip-ups + """ + tid = str(id).zfill(3) + path_name = f'{operation}_{operation[0]}{tid}' + return path_name + +def get_op_state_path(operation, id, activity = ''): + """ + Used to generate hdf5 paths in a central fashion to avoid naming convention slip-ups + """ + op_name = op_path_name(operation, id) + if activity == '': + op_path = f'/STATE/{op_name}' + else: + op_path = f'/STATE/{op_name}/{activity}' + return op_path + + +def get_state_ix(state_ix, state_paths, var_path): + """ + Find the integer key of a variable name in state_ix + """ + if not (var_path in list(state_paths.keys())): + # we need to add this to the state + return False # should throw an error + var_ix = state_paths[var_path] + return var_ix + + +def get_ix_path(state_paths, var_ix): + """ + Find the integer key of a variable name in state_ix + """ + for spath, ix in state_paths.items(): + if var_ix == ix: + # we need to add this to the state + return spath + return False + +def set_state(state_ix, state_paths, var_path, default_value = 0.0, debug = False): + """ + Given an hdf5 style path to a variable, set the value + If the variable does not yet exist, create it. + Returns the integer key of the variable in the state_ix Dict + """ + if not (var_path in state_paths.keys()): + # we need to add this to the state + state_paths[var_path] = append_state(state_ix, default_value) + var_ix = get_state_ix(state_ix, state_paths, var_path) + if (debug == True): + print("Setting state_ix[", var_ix, "], to", default_value) + state_ix[var_ix] = default_value + return var_ix + + +def set_dict_state(state_ix, dict_ix, state_paths, var_path, default_value = {}): + """ + Given an hdf5 style path to a variable, set the value in the dict + If the variable does not yet exist, create it. + Returns the integer key of the variable in the state_ix Dict + """ + if not (var_path in state_paths.keys()): + # we need to add this to the state + state_paths[var_path] = append_state(state_ix, default_value) + var_ix = get_state_ix(state_ix, state_paths, var_path) + return var_ix + + +def append_state(state_ix, var_value): + """ + Add a new variable on the end of the state_ix Dict + Return the key of this new variable + """ + if (len(state_ix) == 0): + val_ix = 1 + else: + val_ix = max(state_ix.keys()) + 1 # next ix value + state_ix[val_ix] = var_value + return val_ix + +def state_context_hsp2(state, operation, segment, activity): + # this establishes domain info so that a module can know its paths + state['operation'] = operation + state['segment'] = segment # + state['activity'] = activity + # give shortcut to state path for the upcoming function + # insure that there is a model object container + seg_name = operation + "_" + segment + seg_path = '/STATE/' + seg_name + if 'hsp_segments' not in state.keys(): + state['hsp_segments'] = {} # for later use by things that need to know hsp entities and their paths + if seg_name not in state['hsp_segments'].keys(): + state['hsp_segments'][seg_name] = seg_path + + state['domain'] = seg_path # + "/" + activity # may want to comment out activity? + +def state_siminfo_hsp2(uci_obj, siminfo): + # Add crucial simulation info for dynamic operation support + delt = uci_obj.opseq.INDELT_minutes[0] # get initial value for STATE objects + siminfo['delt'] = delt + siminfo['tindex'] = date_range(siminfo['start'], siminfo['stop'], freq=Minute(delt))[1:] + siminfo['steps'] = len(siminfo['tindex']) + +def state_init_hsp2(state, opseq, activities): + # This sets up the state entries for all state compatible HSP2 model variables + for _, operation, segment, delt in opseq.itertuples(): + if operation != 'GENER' and operation != 'COPY': + for activity, function in activities[operation].items(): + if activity == 'HYDR': + state_context_hsp2(state, operation, segment, activity) + print("Init HYDR state context for domain", state['domain']) + hydr_init_ix(state, state['domain']) + elif activity == 'SEDTRN': + state_context_hsp2(state, operation, segment, activity) + sedtrn_init_ix(state, state['domain']) + +def state_load_hdf5_components(io_manager, siminfo, op_tokens, state_paths, state_ix, dict_ix, ts_ix, model_object_cache): + # Implement population of model_object_cache etc from components in a hdf5 such as Special ACTIONS + return + +def state_load_dynamics_hsp2(state, io_manager, siminfo): + # Load any dynamic components if present, and store variables on objects + hsp2_local_py = load_dynamics(io_manager, siminfo) + # if a local file with state_step_hydr() was found in load_dynamics(), we add it to state + state['state_step_hydr'] = siminfo['state_step_hydr'] # enabled or disabled + state['hsp2_local_py'] = hsp2_local_py # Stores the actual function in state + +def hydr_init_ix(state, domain): + # get a list of keys for all hydr state variables + hydr_state = ["DEP","IVOL","O1","O2","O3","OVOL1","OVOL2","OVOL3","PRSUPY","RO","ROVOL","SAREA","TAU","USTAR","VOL","VOLEV"] + hydr_ix = Dict.empty(key_type=types.unicode_type, value_type=types.int64) + for i in hydr_state: + #var_path = f'{domain}/{i}' + var_path = domain + "/" + i + hydr_ix[i] = set_state(state['state_ix'], state['state_paths'], var_path, 0.0) + return hydr_ix + +def sedtrn_init_ix(state, domain): + # get a list of keys for all sedtrn state variables + sedtrn_state = ["RSED4","RSED5","RSED6"] + sedtrn_ix = Dict.empty(key_type=types.unicode_type, value_type=types.int64) + for i in sedtrn_state: + #var_path = f'{domain}/{i}' + var_path = domain + "/" + i + sedtrn_ix[i] = set_state(state['state_ix'], state['state_paths'], var_path, 0.0) + return sedtrn_ix + +@njit +def hydr_get_ix(state_ix, state_paths, domain): + # get a list of keys for all hydr state variables + hydr_state = ["DEP","IVOL","O1","O2","O3","OVOL1","OVOL2","OVOL3","PRSUPY","RO","ROVOL","SAREA","TAU","USTAR","VOL","VOLEV"] + hydr_ix = Dict.empty(key_type=types.unicode_type, value_type=types.int64) + for i in hydr_state: + #var_path = f'{domain}/{i}' + var_path = domain + "/" + i + hydr_ix[i] = state_paths[var_path] + return hydr_ix + +@njit +def sedtrn_get_ix(state_ix, state_paths, domain): + # get a list of keys for all sedtrn state variables + sedtrn_state = ["RSED4", "RSED5", "RSED6"] + sedtrn_ix = Dict.empty(key_type=types.unicode_type, value_type=types.int64) + for i in sedtrn_state: + var_path = domain + "/" + i + sedtrn_ix[i] = state_paths[var_path] + return sedtrn_ix + +# function to dynamically load module, based on "Using imp module" in https://www.tutorialspoint.com/How-I-can-dynamically-import-Python-module# +#def dynamic_module_import(module_name, class_name): +def dynamic_module_import(local_name, local_path, module_name): + # find_module() is used to find the module in current directory + # it gets the pointer, path and description of the module + module = False + local_spec = False + try: + # print ("Looking for local_name, local_path", local_name, local_path) + local_spec = importlib.util.spec_from_file_location(local_name, local_path) + except ImportError: + print ("Imported module {} not found".format(local_name)) + try: + # load_module dynamically loads the module + # the parameters are pointer, path and description of the module + if (local_spec != False): + module = importlib.util.module_from_spec(local_spec) + sys.modules[local_spec.name] = module + sys.modules[module_name] = module + local_spec.loader.exec_module(module) + print("Imported custom module {}".format(local_path)) + except Exception as e: + # print(e) this isn't really an exception, it's legit to have no custom python code + pass + return module + + +def load_dynamics(io_manager, siminfo): + local_path = os.getcwd() + # try this + hdf5_path = io_manager._input.file_path + (fbase, fext) = os.path.splitext(hdf5_path) + # see if there is a code module with custom python + # print("Looking for SPECL with custom python code ", (fbase + ".py")) + hsp2_local_py = dynamic_module_import(fbase, fbase + ".py", "hsp2_local_py") + siminfo['state_step_hydr'] = 'disabled' + if 'state_step_hydr' in dir(hsp2_local_py): + siminfo['state_step_hydr'] = 'enabled' + print("state_step_hydr function defined, using custom python code") + else: + # print("state_step_hydr function not defined. Using default") + return False + return hsp2_local_py + diff --git a/tests/test10/HSP2results/test10.uci b/tests/test10/HSP2results/test10.uci index 9304c7cd..101dab69 100644 --- a/tests/test10/HSP2results/test10.uci +++ b/tests/test10/HSP2results/test10.uci @@ -1,994 +1,1002 @@ -RUN - -GLOBAL - Version 11 test run: PERLND and IMPLND w/ RCHRES (sediment, water quality) - START 1976 END 1976 - RUN INTERP OUTPUT LEVEL 3 - RESUME 0 RUN 1 UNIT SYSTEM 1 -END GLOBAL - - *** This test run uses MASS-LINK and SCHEMATIC blocks *** - -FILES - ***<----FILE NAME-------------------------------------------------> -WDM 21 test10.wdm -MESSU 22 test10.ech - 01 test10.out - 66 test10.d66 - 94 test10.p94 - 95 test10.p95 -END FILES - -OPN SEQUENCE - INGRP INDELT 01:00 - PERLND 1 - RCHRES 1 - DISPLY 5 - DISPLY 1 - GENER 1 - DISPLY 2 - RCHRES 2 - RCHRES 3 - RCHRES 4 - PLTGEN 2 - IMPLND 1 - RCHRES 5 - DISPLY 3 - GENER 2 - DISPLY 4 - PLTGEN 1 - END INGRP -END OPN SEQUENCE - -PERLND - ACTIVITY - Active Sections (1=Active, 0=Inactive) *** - # - # ATMP SNOW PWAT SED PST PWG PQAL MSTL PEST NITR PHOS TRAC *** - 1 1 1 1 1 - END ACTIVITY - - PRINT-INFO - Print-flags *** PIVL PYR - # - # ATMP SNOW PWAT SED PST PWG PQAL MSTL PEST NITR PHOS TRAC *** - 1 4 4 4 4 12 - END PRINT-INFO - - GEN-INFO - <-------Name-------> Unit-systems Printer *** - # - # t-series Engl Metr *** - in out *** - 1 BICKNELL FARM 1 1 1 0 - END GEN-INFO - - *** Section SNOW *** - - ICE-FLAG - 0= Ice formation not simulated, 1= Simulated *** - # - #ICEFG *** - 1 1 - END ICE-FLAG - - SNOW-PARM1 - Snow input info: Part 1 *** - # - # LAT MELEV SHADE SNOWCF COVIND *** - 1 42. 520. 0.0 1.45 0.5 - END SNOW-PARM1 - - SNOW-PARM2 - Snow input info: Part 2 *** - # - # RDCSN TSNOW SNOEVP CCFACT MWATER MGMELT *** - 1 0.12 32. 0.05 0.5 0.08 0.0001 - END SNOW-PARM2 - - SNOW-INIT1 - Initial snow conditions: Part 1 *** - # - # PACKSNOW PACKICE PACKWATER RDENPF DULL PAKTMP *** - 1 1.4 0.2 0.1 0.2 375. 27.5 - END SNOW-INIT1 - - SNOW-INIT2 - Initial snow conditions: Part 2 *** - # - # COVINX XLNMLT SKYCLR *** - 1 0.50 0.0 1.0 - END SNOW-INIT2 - - *** Section PWATER *** - - PWAT-PARM1 - PWATER variable monthly parameter value flags *** - # - # CSNO RTOP UZFG VCS VUZ VNN VIFW VIRC VLE *** - 1 1 0 0 1 1 1 0 0 1 - END PWAT-PARM1 - - PWAT-PARM2 - *** PWATER input info: Part 2 - # - # ***FOREST LZSN INFILT LSUR SLSUR KVARY AGWRC - 1 0.010 8.0 0.150 250. 0.050 0.5 0.98 - END PWAT-PARM2 - - PWAT-PARM3 - *** PWATER input info: Part 3 - # - # ***PETMAX PETMIN INFEXP INFILD DEEPFR BASETP AGWETP - 1 40. 35. 2.0 2.0 0.10 0.0 0.08 - END PWAT-PARM3 - - PWAT-PARM4 - PWATER input info: Part 4 *** - # - # CEPSC UZSN NSUR INTFW IRC LZETP *** - 1 0.01 0.1 1.0 0.60 - END PWAT-PARM4 - - MON-INTERCEP - Only required if VCSFG=1 in PWAT-PARM1 *** - # - # Interception storage capacity at start of each month *** - JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** - 1 0.04 0.04 0.03 0.03 0.03 0.03 0.10 0.17 0.19 0.14 0.05 0.04 - END MON-INTERCEP - - MON-UZSN - Only required if VUZFG=1 in PWAT-PARM1 *** - # - # Upper zone storage at start of each month *** - JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** - 1 0.4 0.4 0.4 0.4 1.6 1.1 1.1 1.3 1.3 1.3 1.1 0.9 - END MON-UZSN - - MON-MANNING - Only required if VNNFG=1 in PWAT-PARM1 *** - # - # Manning's n for overland flow at start of each month *** - JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** - 1 0.30 0.30 0.30 0.30 0.27 0.25 0.25 0.25 0.25 0.25 0.35 0.33 - END MON-MANNING - - MON-LZETPARM - Only required if VLEFG=1 in PWAT-PARM1 *** - # - # Lower zone ET parameter at start of each month *** - JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** - 1 0.20 0.20 0.20 0.23 0.23 0.25 0.60 0.80 0.75 0.50 0.30 0.20 - END MON-LZETPARM - - PWAT-STATE1 - *** Initial conditions at start of simulation - # - # *** CEPS SURS UZS IFWS LZS AGWS GWVS - 1 0.05 0.0 0.15 0.0 4.0 0.05 0.05 - END PWAT-STATE1 - - *** Section PSTEMP *** - - PSTEMP-PARM2 - *** - # - # ASLT BSLT ULTP1 ULTP2 LGTP1 LGTP2 *** - 1 14.5 .365 1.2 4.0 1.2 6.0 - END PSTEMP-PARM2 - - *** Section PWTGAS *** - - PWT-PARM2 - *** - # - # ELEV IDOXP ICO2P ADOXP ACO2P *** - 1 500. 6. .05 5. .05 - END PWT-PARM2 -END PERLND - -IMPLND - ACTIVITY - Active Sections *** - # - # ATMP SNOW IWAT SLD IWG IQAL *** - 1 1 1 1 1 1 - END ACTIVITY - - PRINT-INFO - Print-flags *** - # - # ATMP SNOW IWAT SLD IWG IQAL PIVL PYR *** - 1 4 4 4 4 4 12 - END PRINT-INFO - - GEN-INFO - <-------Name-------> Unit-systems Printer *** - # - # t-series Engl Metr *** - in out *** - 1 DONIGIAN INDUSTRY 1 1 1 0 - END GEN-INFO - - *** Section SNOW *** - - ICE-FLAG - 0= Ice formation not simulated, 1= Simulated *** - # - #ICEFG *** - 1 1 - END ICE-FLAG - - SNOW-PARM1 - Snow input info: Part 1 *** - # - # LAT MELEV SHADE SNOWCF COVIND *** - 1 42. 450. 0.0 1.45 0.5 - END SNOW-PARM1 - - SNOW-PARM2 - Snow input info: Part 2 *** - # - # RDCSN TSNOW SNOEVP CCFACT MWATER MGMELT *** - 1 0.12 32. 0.05 0.5 0.08 0.0001 - END SNOW-PARM2 - - SNOW-INIT1 - Initial snow conditions: Part 1 *** - # - # PACKSNOW PACKICE PACKWATER RDENPF DULL PAKTMP *** - 1 1.4 0.2 0.1 0.2 375. 27.5 - END SNOW-INIT1 - - SNOW-INIT2 - Initial snow conditions: Part 2 *** - # - # COVINX XLNMLT SKYCLR *** - 1 0.50 0.0 1.0 - END SNOW-INIT2 - - *** Section IWATER *** - - IWAT-PARM1 - Flags *** - # - # CSNO RTOP VRS VNN RTLI *** - 1 1 1 - END IWAT-PARM1 - - IWAT-PARM2 - *** - # - # LSUR SLSUR NSUR RETSC *** - 1 200. .010 .010 .01 - END IWAT-PARM2 - - IWAT-PARM3 - *** - # - # PETMAX PETMIN *** - 1 40. 35. - END IWAT-PARM3 - - IWAT-STATE1 - IWATER state variables *** - # - # RETS SURS *** - 1 .01 .01 - END IWAT-STATE1 - - *** Section SOLIDS *** - - SLD-PARM2 - *** - # - # KEIM JEIM ACCSDP REMSDP *** - 1 .08 1.9 .01 .5 - END SLD-PARM2 - - SLD-STOR - Solids storage (tons/acre) *** - # - # *** - 1 0.2 - END SLD-STOR - - *** Section IWTGAS *** - - IWT-PARM2 - *** - # - # ELEV AWTF BWTF *** - 1 410. 40. 0.8 - END IWT-PARM2 - - *** Section IQUAL *** - - NQUALS - *** - # - #NQUAL *** - 1 1 - END NQUALS - - QUAL-PROPS - Identifiers and Flags *** - # - #<--qualid--> QTID QSD VPFW QSO VQO *** - 1 COD LB 1 1 - END QUAL-PROPS - - QUAL-INPUT - Storage on surface and nonseasonal parameters *** - # - # SQO POTFW ACQOP SQOLIM WSQOP *** - 1 1.20 .175 .02 2.0 1.7 - END QUAL-INPUT -END IMPLND - -RCHRES - ACTIVITY - RCHRES Active Sections (1=Active, 0=Inactive) *** - # - # HYFG ADFG CNFG HTFG SDFG GQFG OXFG NUFG PKFG PHFG *** - 1 5 1 1 1 1 1 1 1 1 1 1 - END ACTIVITY - - PRINT-INFO - RCHRES Print-flags *** - # - # HYDR ADCA CONS HEAT SED GQL OXRX NUTR PLNK PHCB PIVL PYR *** - 1 4 5 5 5 5 5 5 5 5 5 5 12 - 5 4 4 4 4 4 4 4 4 4 4 12 - END PRINT-INFO - - GEN-INFO - RCHRES<-------Name------->Nexit Unit Systems Printer *** - # - # t-series Engl Metr LKFG *** - in out *** - 1 MEIER POND 2 1 1 1 0 1 - 2 OUTLET 1 1 1 1 0 - 3 SPILLWAY 1 1 1 1 0 - 4 UPPER KITTLE CREEK 1 1 1 1 0 - 5 LOWER KITTLE CREEK 1 1 1 1 0 - END GEN-INFO - - *** Section HYDR *** - - HYDR-PARM1 - RCHRES Flags for HYDR section *** - # - # VC A1 A2 A3 ODFVFG for each ODGTFG for each *** FUNCT for each - FG FG FG FG possible exit possible exit *** possible exit - 1 2 3 4 5 1 2 3 4 5 *** - - 1 1 1 1 -1 6 - 2 5 1 1 1 4 - END HYDR-PARM1 - - HYDR-PARM2 - RCHRES *** - # - # DSN FTBN LEN DELTH STCOR KS DB50 *** - 1 00 1 0.5 1. .5 - 2 00 2 0.25 20. .5 - 3 00 3 0.25 30. .5 - 4 00 4 2.0 40. .5 - 5 00 5 3.0 40. .5 - END HYDR-PARM2 - - HYDR-INIT - RCHRES Initial conditions for HYDR section *** - # - # VOL Initial value of COLIND *** Initial value of OUTDGT - (ac-ft) for each possible exit *** for each possible exit - EX1 EX2 EX3 EX4 EX5 *** EX1 EX2 EX3 EX4 EX5 - 1 30. 4.0 5.0 - 2 5 0.0 4.0 - END HYDR-INIT - - *** Section CONS *** - - NCONS - RCHRES *** - # - #NCONS *** - 1 5 1 - END NCONS - - CONS-DATA - RCHRES Data for conservative constituent No. 3 *** - # - #<---Substance-id---> Conc ID CONV QTYID *** - 1 5 ALKALINITY 1000. MG/L 35.31 KG - END CONS-DATA - - *** Section HTRCH *** - - HEAT-PARM - RCHRES ELEV ELDAT CFSAEX KATRAD KCOND KEVAP *** - # - # *** - 1 5 450. 100. .95 - END HEAT-PARM - - HEAT-INIT - RCHRES TW AIRTMP *** - # - # *** - 1 5 60. 40. - END HEAT-INIT - - *** Section SEDTRN *** - - SANDFG - RCHRES *** - # - # SNDFG *** - 1 2 1 - 3 4 2 - 5 3 - END SANDFG - - SED-GENPARM - RCHRES BEDWID BEDWRN POR *** - # - # *** - 1 200. 4. - 2 3 1.33 3. - 4 2.0 2. - 5 2.66 2. - END SED-GENPARM - - SAND-PM - RCHRES D W RHO KSAND EXPSND *** - # - # *** - 1 5 .014 2.5 1.5 1.2 - END SAND-PM - - SILT-CLAY-PM - RCHRES D W RHO TAUCD TAUCS M *** - # - # *** - 1 .00063 .0066 2.2 .2 .4 .5 - 2 3 .00063 .0066 2.2 1.E-10 500. .5 - 4 5 .00063 .0066 2.2 .2 .4 .5 - END SILT-CLAY-PM - - SILT-CLAY-PM - RCHRES D W RHO TAUCD TAUCS M *** - # - # *** - 1 .000055 .000034 2.0 .15 .3 .75 - 2 3 .000055 .000034 2.0 1.E-10 500. .75 - 4 5 .000055 .000034 2.0 .15 .3 .75 - END SILT-CLAY-PM - - SSED-INIT - RCHRES Suspended sed concs (mg/l) *** - # - # Sand Silt Clay *** - 1 5 5. 20. 30. - END SSED-INIT - - BED-INIT - RCHRES BEDDEP Initial bed composition *** - # - # (ft) Sand Silt Clay *** - 1 2. .8 .1 .1 - 2 3 2. .8 .1 .1 - 4 5 1. .8 .1 .1 - END BED-INIT - - *** Section GQUAL *** - - GQ-GENDATA - RCHRES NGQL TPFG PHFG ROFG CDFG SDFG PYFG LAT *** - # - # *** - 1 5 1 1 1 2 1 1 1 42 - END GQ-GENDATA - - GQ-QALDATA - RCHRES<-------GQID-------> DQAL CONCID CONV QTYID *** - # - # *** - 1 5 PESTICIDE B4 10. UG 1.E6 G - END GQ-QALDATA - - GQ-QALFG - RCHRES HDRL OXID PHOT VOLT BIOD GEN SDAS *** - # - # *** - 1 5 1 1 1 1 1 1 1 - END GQ-QALFG - - GQ-HYDPM - RCHRES KA KB KN THHYD *** - # - # *** - 1 5 .001 .01 .001 1.03 - END GQ-HYDPM - - GQ-ROXPM - RCHRES KOX THOX *** - # - # *** - 1 5 .1 1.03 - END GQ-ROXPM - - GQ-PHOTPM - # - #*** K1 K2 K3 K4 K5 K6 K7 - # - #*** K8 K9 K10 K11 K12 K13 K14 - # - #*** K15 K16 K17 K18 PHI THETA - 1 5 848. 544. 330. 195. 120. 68. 41. - 1 5 23. 13. 7. 4. 1. .1 - 1 5 .3 1.1 - END GQ-PHOTPM - - GQ-CFGAS - RCHRES CFGAS *** - # - # *** - 1 5 .001 - END GQ-CFGAS - - GQ-BIOPM - RCHRES BIOCON THBIO BIO *** - # - # *** - 1 5 .01 10. - END GQ-BIOPM - - GQ-GENDECAY - RCHRES FSTDEC THFST *** - # - # *** - 1 5 .2 - END GQ-GENDECAY - - GQ-SEDDECAY - RCHRES KSUSP THSUSP KBED THBED *** - # - # *** - 1 5 .002 - END GQ-SEDDECAY - - GQ-KD - RCHRES Partition coefficients *** - # - # ADPM(1,1) ADPM(2,1) ADPM(3,1) ADPM(4,1) ADPM(5,1) ADPM(6,1) *** - 1 .0001 .001 .001 .0001 .001 .001 - 2 3 .0001 .001 .001 1.E-10 1.E-10 1.E-10 - 4 5 .0001 .001 .001 .0001 .001 .001 - END GQ-KD - - GQ-ADRATE - RCHRES Adsorption/desorption rate parameters *** - # - # ADPM(1,2) ADPM(2,2) ADPM(3,2) ADPM(4,2) ADPM(5,2) ADPM(6,2) *** - 1 150. 150. 150. .25 .25 .25 - 2 3 150. 150. 150. 1000. 1000. 1000. - 4 5 150. 150. 150. .25 .25 .25 - END GQ-ADRATE - - GQ-SEDCONC - RCHRES SQAL1 SQAL2 SQAL3 SQAL4 SQAL5 SQAL6 *** - # - # *** - 1 .001 .01 .01 .001 .01 .01 - 2 3 .001 .01 .01 0. 0. 0. - 4 5 .001 .01 .01 .001 .01 .01 - END GQ-SEDCONC - - GQ-VALUES - RCHRES TWAT PHVAL ROC CLD SDCNC PHY *** - # - # *** - 1 5 1.E-5 - END GQ-VALUES - - GQ-ALPHA - RCHRES*** - # - #*** K1 K2 K3 K4 K5 K6 K7 - # - #*** K8 K9 K10 K11 K12 K13 K14 - # - #*** K15 K16 K17 K18 - 1 5 .008 .009 .010 .011 .011 .011 .012 - 1 5 .013 .015 .016 .017 .018 .019 .020 - 1 5 .021 .022 .024 .024 - END GQ-ALPHA - - GQ-GAMMA - RCHRES*** - # - #*** K1 K2 K3 K4 K5 K6 K7 - # - #*** K8 K9 K10 K11 K12 K13 K14 - # - #*** K15 K16 K17 K18 - 1 5 .001 .001 .001 .001 .001 .001 .001 - 1 5 .001 .002 .002 .002 .002 .002 .002 - 1 5 .002 .002 .002 .002 - END GQ-GAMMA - - GQ-DELTA - RCHRES*** - # - #*** K1 K2 K3 K4 K5 K6 K7 - # - #*** K8 K9 K10 K11 K12 K13 K14 - # - #*** K15 K16 K17 K18 - 1 5 .0007 .0007 .0007 .0007 .0007 .0007 .0007 - 1 5 .0007 .0007 .0007 .0007 .0007 .0007 .0007 - 1 5 .0007 .0007 .0007 .0007 - END GQ-DELTA - - GQ-CLDFACT - RCHRES*** - # - #*** F1 F2 F3 F4 F5 F6 F7 - # - #*** F8 F9 F10 F11 F12 F13 F14 - # - #*** F15 F16 F17 F18 - 1 5 .10 .10 .10 .15 .15 .15 .15 - 1 5 .17 .17 .17 .17 .18 .19 .20 - 1 5 .21 .21 .21 .21 - END GQ-CLDFACT - - *** Section RQUAL *** - - BENTH-FLAG - RCHRES BENF *** - # - # *** - 1 1 - 4 5 1 - END BENTH-FLAG - - SCOUR-PARMS - RCHRES SCRVEL SCRMUL *** - # - # *** - 1 5 3. - END SCOUR-PARMS - - *** Section OXRX *** - - OX-FLAGS - RCHRES REAM *** - # - # *** - 2 3 1 - 4 3 - 5 2 - END OX-FLAGS - - OX-GENPARM - RCHRES KBOD20 TCBOD KODSET SUPSAT *** - # - # /hr *** - 1 5 .1 8. - END OX-GENPARM - - OX-BENPARM - RCHRES BENOD TCBEN EXPOD BRBOD(1) BRBOD(2) EXPREL *** - # - # mg/m2.hr mg/m2.hr mg/m2.hr *** - 1 5 10. 1.1 1.2 20. 25. 1.3 - END OX-BENPARM - - OX-CFOREA - RCHRES CFOREA *** - # - # *** - 1 5. - END OX-CFOREA - - OX-REAPARM - RCHRES TCGINV REAK EXPRED EXPREV *** - # - # /hr *** - 4 2.0 -1.1 1.1 - END OX-REAPARM - - OX-INIT - RCHRES DOX BOD SATDO *** - # - # mg/l mg/l mg/l *** - 1 5 8. 100. - END OX-INIT - - *** Section NUTRX *** - - NUT-FLAGS - RCHRES TAM NO2 PO4 AMV DEN ADNH ADPO PHFL *** - # - # *** - 1 5 1 1 1 1 1 0 0 - END NUT-FLAGS - - NUT-BENPARM - RCHRES BRTAM(1) BRTAM(2) BRPO4(1) BRPO4(2) ANAER *** - # - # mg/m2.hr mg/m2.hr mg/m2.hr mg/m2.hr mg/l *** - 1 5 11.0 33.0 1.1 2.2 0.0005 - END NUT-BENPARM - - NUT-NITDENIT - RCHRES KTAM20 KNO220 TCNIT KNO320 TCDEN DENOXT *** - # - # /hr /hr /hr mg/l *** - 1 5 .002 .004 1.07 .001 1.04 0.2 - END NUT-NITDENIT - - NUT-NH3VOLAT - RCHRES EXPNVG EXPNVL *** - # - # *** - 1 5 .50 0.6667 - END NUT-NH3VOLAT - - NUT-BEDCONC - RCHRES Bed concentrations of NH4 & PO4 (mg/mg) *** - # - # NH4-sand NH4-silt NH4-clay PO4-sand PO4-silt PO4-clay *** - 1 5 0.00001 0.00001 0.00001 0.00001 0.00001 0.00001 - END NUT-BEDCONC - - NUT-ADSPARM - RCHRES Partition coefficients for NH4 AND PO4 (l/mg) *** - # - # NH4-sand NH4-silt NH4-clay PO4-sand PO4-silt PO4-clay *** - 1 5 0.0001 0.0001 0.0001 10. 10. 10. - END NUT-ADSPARM - - NUT-DINIT - RCHRES NO3 TAM NO2 PO4 PHVAL *** - # - # mg/l mg/l mg/l mg/l ph units *** - 1 5 40. 10. 1. 50. 7.0 - END NUT-DINIT - - NUT-ADSINIT - RCHRES Initial suspended NH4 and PO4 concentrations (mg/mg) *** - # - # NH4-sand NH4-silt NH4-clay PO4-sand PO4-silt PO4-clay *** - 1 5 0. 0. 0. 0. 0. 0. - END NUT-ADSINIT - - *** Section PLANK *** - - PLNK-FLAGS - RCHRES PHYF ZOOF BALF SDLT AMRF DECF NSFG ZFOO *** - # - # *** - 1 5 1 1 1 1 1 1 - END PLNK-FLAGS - - PLNK-PARM1 - RCHRES RATCLP NONREF LITSED ALNPR EXTB MALGR *** - # - # /ft /hr *** - 1 5 4.5 - END PLNK-PARM1 - - PHYTO-PARM - RCHRES SEED MXSTAY OREF CLALDH PHYSET REFSET *** - # - # mg/l mg/l ug/l *** - 1 5 .1 .1 .5 .5 - END PHYTO-PARM - - ZOO-PARM1 - RCHRES MZOEAT ZFIL20 ZRES20 ZD OXZD *** - # - # mg/l.hr l/mgzoo.hr /hr /hr /hr *** - 1 5 .2 - END ZOO-PARM1 - - PLNK-INIT - RCHRES PHYTO ZOO BENAL ORN ORP ORC *** - # - # mg/l org/l mg/m2 mg/l mg/l mg/l *** - 1 5 40. 200. 5. 20. 20. 20. - END PLNK-INIT - - *** Section PHCARB *** - - PH-PARM1 - RCHRES PHCN ALKC *** - # - # *** - 1 5 50 - END PH-PARM1 - - PH-INIT - RCHRES TIC CO2 PH *** - # - # mg/l mg/l *** - 1 5 20. 5. 8.5 - END PH-INIT -END RCHRES - -FTABLES - FTABLE 1 - ROWS COLS *** - 14 6 - WINTER SUMMER SPLWAY *** - DEPTH AREA VOLUME OUTLET OUTLET DISCH *** - (FT) (ACRES) (AC-FT) DISCH DISCH (CFS) *** - (CFS) (CFS) *** - .000 .000 .0000 .0000 .0000 .0000 - 2.000 1.212 1.2120 0.0000 .0000 .0000 - 4.000 2.424 4.8480 0.0000 .0000 .0000 - 6.000 3.636 10.9080 0.0000 .0000 .0000 - 8.000 4.848 19.3920 0.0000 .0000 .0000 - 10.000 6.061 30.3050 0.0000 .0000 .0000 - 12.000 7.273 43.6380 5.0000 3.5000 .0000 - 14.000 8.485 59.3950 6.2500 4.3750 .0000 - 16.000 9.697 77.5760 7.5000 5.2500 .0000 - 18.000 10.909 98.1810 8.7500 6.1250 .0000 - 20.000 12.121 121.2100 10.0000 7.0000 .0000 - 21.000 12.727 133.6360 10.6250 7.4375 50.0000 - 22.000 13.333 146.6630 11.2500 7.8750 100.0000 - 23.000 13.939 160.3030 11.8750 8.3125 500.0000 - END FTABLE 1 - - FTABLE 2 - ROWS COLS *** - 13 4 - DEPTH AREA VOLUME DISCH FLO-THRU *** - (FT) (ACRES) (AC-FT) (CFS) (MIN) *** - .000 .000 .0000 .000 0.0 - .167 .071 .0109 1.2241 6.5 - .333 .081 .0236 3.9148 4.4 - .500 .091 .0379 7.8193 3.5 - .667 .101 .0539 12.9032 3.0 - .833 .111 .0715 19.1853 2.7 - 1.000 .121 .0909 26.7046 2.5 - 1.333 .141 .1347 45.6529 2.1 - 1.667 .162 .1852 70.1757 1.9 - 2.000 .182 .2424 100.7192 1.7 - 2.667 .586 .4983 201.9005 1.8 - 3.333 .990 1.0236 344.6344 2.2 - 4.000 1.394 1.8182 537.0775 2.5 - END FTABLE 2 - - FTABLE 3 - ROWS COLS *** - 13 4 - DEPTH AREA VOLUME DISCH FLO-THRU *** - (FT) (ACRES) (AC-FT) (CFS) (MIN) *** - .000 .000 .0000 .000 0.0 - .167 .071 .0109 1.4992 5.3 - .333 .081 .0236 4.7947 3.6 - .500 .091 .0379 9.5766 2.9 - .667 .101 .0539 15.8032 2.5 - .833 .111 .0715 23.4971 2.2 - 1.000 .121 .0909 32.7063 2.0 - 1.333 .141 .1347 55.9132 1.7 - 1.667 .162 .1852 85.9474 1.6 - 2.000 .182 .2424 123.3553 1.4 - 2.667 .586 .4983 247.2766 1.5 - 3.333 .990 1.0236 422.0892 1.8 - 4.000 1.394 1.8182 657.7828 2.0 - END FTABLE 3 - - FTABLE 4 - ROWS COLS *** - 13 4 - DEPTH AREA VOLUME DISCH FLO-THRU *** - (FT) (ACRES) (AC-FT) (CFS) (MIN) *** - .000 .000 .0000 .000 0.0 - .250 .848 .1970 .9024 158.5 - .500 .970 .4242 2.8860 106.7 - .750 1.091 .6818 5.7642 85.9 - 1.000 1.212 .9697 9.5120 74.0 - 1.250 1.333 1.2879 14.1431 66.1 - 1.500 1.455 1.6364 19.6862 60.3 - 2.000 1.697 2.4242 33.6545 52.3 - 2.500 1.939 3.3333 51.7323 46.8 - 3.000 2.182 4.3636 74.2486 42.7 - 4.000 11.879 11.3939 155.5774 53.2 - 5.000 21.576 28.1212 296.8633 68.8 - 6.000 31.273 54.5454 522.1440 75.8 - END FTABLE 4 - - FTABLE 5 - ROWS COLS *** - 13 4 - DEPTH AREA VOLUME DISCH FLO-THRU *** - (FT) (ACRES) (AC-FT) (CFS) (MIN) *** - .000 .000 .0000 .000 0.0 - .333 1.697 .5253 1.5869 240.3 - .667 1.939 1.1313 5.0752 161.8 - 1.000 2.182 1.8182 10.1370 130.2 - 1.333 2.424 2.5859 16.7279 112.2 - 1.667 2.667 3.4343 24.8719 100.2 - 2.000 2.909 4.3636 34.6200 91.5 - 2.667 3.394 6.4646 59.1848 79.3 - 3.333 3.879 8.8889 90.9763 70.9 - 4.000 4.364 11.6364 130.5731 64.7 - 5.333 36.687 39.0034 284.8886 99.4 - 6.667 69.010 109.4680 593.7734 133.8 - 8.000 101.333 223.0302 1129.6948 143.3 - END FTABLE 5 -END FTABLES - -DISPLY - DISPLY-INFO1 - # - #<----------Title----------->***TRAN PIVL DIG1 FIL1 PYR DIG2 FIL2 YRND - 1 O2 CONC, MEIER POND (mg/l) AVER 1 2 66 12 - 2 PEST SED CONC, POND (mg/kg) AVER 1 2 66 12 - 3 O2 CONC,LOWER KITTLE C(mg/l) AVER 1 2 66 12 - 4 PEST SED CONC,L KTL C(mg/kg) AVER 1 2 66 12 - 5 WATER TEMP,MEIER POND (DEGF) AVER 1 2 66 12 - END DISPLY-INFO1 -END DISPLY - -GENER - OPCODE - # - # Op- *** - code *** - 1 2 19 - END OPCODE -END GENER - -PLTGEN - PLOTINFO - # - # FILE NPT NMN LABL PYR PIVL *** - 1 94 2 24 - 2 95 3 1 6 - END PLOTINFO - - GEN-LABELS - # - #<----------------Title ----------------> *** <------Y axis------> - 1 SIMULATED FLOWS (CFS) CFS - 2 SIMULATED VALS RELATED TO TEMP&PH,RCH 4 - END GEN-LABELS - - SCALING - # - # YMIN YMAX IVLIN *** - 1 2 0. 150. 20. - END SCALING - - CURV-DATA (first curve) - <-Curve label--> Line Intg Col Tran *** - # - # type eqv code code *** - 1 TOTAL POND OUTFL 7 1 AVER - 2 AVDEP FOR RCH 4 7 1 LAST - END CURV-DATA - - CURV-DATA (second curve) - <-Curve label--> Line Intg Col Tran *** - # - # type eqv code code *** - 1 LOWER KITTLE CR 8 2 AVER - 2 TW FOR RCH 4 8 2 LAST - END CURV-DATA - - CURV-DATA (third curve) - <-Curve label--> Line Intg Col Tran *** - # - # type eqv code code *** - 2 PH FOR RCH 4 9 2 LAST - END CURV-DATA - - CURV-DATA (fourth curve) - <-Curve label--> Line Intg Col Tran *** - # - # type eqv code code *** - 2 HTEXCH FOR RCH 4 10 2 - END CURV-DATA -END PLTGEN - -EXT SOURCES -<-Volume-> SsysSgap<--Mult-->Tran <-Target vols> <-Grp> <-Member-> *** - # # tem strg<-factor->strg # # # # *** -WDM 39 PREC ENGLZERO SAME PERLND 1 EXTNL PREC -WDM 131 PREC ENGLZERO SAME IMPLND 1 EXTNL PREC -WDM 39 PREC ENGLZERO SAME RCHRES 1 3 EXTNL PREC -WDM 131 PREC ENGLZERO SAME RCHRES 4 5 EXTNL PREC -WDM 123 ATMP ENGL SAME PERLND 1 ATEMP AIRTMP -WDM 122 ATMP ENGL SAME IMPLND 1 ATEMP AIRTMP -WDM 123 ATMP ENGL SAME RCHRES 1 3 EXTNL GATMP -WDM 122 ATMP ENGL SAME RCHRES 4 5 EXTNL GATMP -WDM 41 EVAP ENGL .7 DIV PERLND 1 EXTNL PETINP -WDM 41 EVAP ENGL .7 DIV IMPLND 1 EXTNL PETINP -WDM 41 EVAP ENGL .7 DIV RCHRES 1 5 EXTNL POTEV -WDM 42 WIND ENGL DIV PERLND 1 EXTNL WINMOV -WDM 42 WIND ENGL DIV IMPLND 1 EXTNL WINMOV -WDM 42 WIND ENGL DIV RCHRES 1 5 EXTNL WIND -WDM 46 SOLR ENGL DIV PERLND 1 EXTNL SOLRAD -WDM 46 SOLR ENGL DIV IMPLND 1 EXTNL SOLRAD -WDM 46 SOLR ENGL DIV RCHRES 1 5 EXTNL SOLRAD -WDM 126 DEWP ENGL SAME PERLND 1 EXTNL DTMPG -WDM 125 DEWP ENGL SAME IMPLND 1 EXTNL DTMPG -WDM 126 DEWP ENGL SAME RCHRES 1 3 EXTNL DEWTMP -WDM 125 DEWP ENGL SAME RCHRES 4 5 EXTNL DEWTMP -WDM 140 CLND ENGL SAME RCHRES 1 EXTNL COLIND -WDM 135 CLDC ENGL SAME PERLND 1 EXTNL CLOUD -WDM 135 CLDC ENGL SAME IMPLND 1 EXTNL CLOUD -WDM 135 CLDC ENGL SAME RCHRES 1 5 EXTNL CLOUD -END EXT SOURCES - -SCHEMATIC -<-Source-> <--Area--> <-Target-> *** - # <-factor-> # # *** -PERLND 1 6000. RCHRES 1 1 -IMPLND 1 3000. RCHRES 5 2 -RCHRES 1 RCHRES 2 3 -RCHRES 1 RCHRES 3 4 -RCHRES 2 RCHRES 4 5 -RCHRES 3 RCHRES 4 5 -RCHRES 4 RCHRES 5 5 -END SCHEMATIC - -MASS-LINK - - MASS-LINK 1 - <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** - # #<-factor-> # # *** -PERLND PWATER PERO 0.0833333 RCHRES INFLOW IVOL -PERLND PWTGAS POHT RCHRES INFLOW IHEAT -PERLND PWTGAS PODOXM RCHRES INFLOW OXIF 1 -PERLND PWTGAS POCO2M RCHRES INFLOW PHIF 2 - END MASS-LINK 1 - - MASS-LINK 2 - <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** - # #<-factor-> # # *** -IMPLND IWATER SURO 0.0833333 RCHRES INFLOW IVOL -IMPLND SOLIDS SOSLD 0.10 RCHRES INFLOW ISED 1 -IMPLND SOLIDS SOSLD 0.46 RCHRES INFLOW ISED 2 -IMPLND SOLIDS SOSLD 0.44 RCHRES INFLOW ISED 3 -IMPLND IWTGAS SOHT RCHRES INFLOW IHEAT -IMPLND IWTGAS SODOXM RCHRES INFLOW OXIF 1 -IMPLND IWTGAS SOCO2M RCHRES INFLOW PHIF 2 -IMPLND IQUAL SOQUAL RCHRES INFLOW OXIF 2 - END MASS-LINK 2 - - MASS-LINK 3 - <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** - # #<-factor-> # # *** -RCHRES OFLOW 1 RCHRES INFLOW - END MASS-LINK 3 - - MASS-LINK 4 - <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** - # #<-factor-> # # *** -RCHRES OFLOW 2 RCHRES INFLOW - END MASS-LINK 4 - - MASS-LINK 5 - <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** - # #<-factor-> # # *** -RCHRES ROFLOW RCHRES INFLOW - END MASS-LINK 5 - -END MASS-LINK - -NETWORK -<-Volume-> <-Grp> <-Member-><--Mult-->Tran <-Target vols> <-Grp> <-Member-> *** - # # #<-factor->strg # # # # *** -RCHRES 1 HTRCH TW 1. DISPLY 5 INPUT TIMSER -RCHRES 1 OXRX DOX 1. DISPLY 1 INPUT TIMSER -RCHRES 1 GQUAL RSQAL 12 1. GENER 1 INPUT ONE -RCHRES 1 SEDTRN RSED 10 1. GENER 1 INPUT TWO -GENER 1 OUTPUT TIMSER 1.1 DISPLY 2 INPUT TIMSER -RCHRES 1 HYDR ROVOL 12.1 PLTGEN 1 INPUT MEAN 1 -RCHRES 4 HYDR AVDEP 1. PLTGEN 2 INPUT POINT 1 -RCHRES 4 HTRCH TW 1. PLTGEN 2 INPUT POINT 2 -RCHRES 4 PHCARB PHST 3 1. PLTGEN 2 INPUT POINT 3 -RCHRES 4 HTRCH HTEXCH 1. PLTGEN 2 INPUT MEAN 1 -RCHRES 5 OXRX DOX 1. DISPLY 3 INPUT TIMSER -RCHRES 5 GQUAL RSQAL 12 1. GENER 2 INPUT ONE -RCHRES 5 SEDTRN RSED 10 1. GENER 2 INPUT TWO -GENER 2 OUTPUT TIMSER 1.1 DISPLY 4 INPUT TIMSER -RCHRES 5 HYDR ROVOL 12.1 PLTGEN 1 INPUT MEAN 2 -END NETWORK - -END RUN +RUN + +GLOBAL + Version 11 test run: PERLND and IMPLND w/ RCHRES (sediment, water quality) + START 1976 END 1976 + RUN INTERP OUTPUT LEVEL 3 + RESUME 0 RUN 1 UNIT SYSTEM 1 +END GLOBAL + + *** This test run uses MASS-LINK and SCHEMATIC blocks *** + +FILES + ***<----FILE NAME-------------------------------------------------> +WDM 21 test10.wdm +MESSU 22 test10.ech + 01 test10.out + 66 test10.d66 + 94 test10.p94 + 95 test10.p95 +END FILES + +OPN SEQUENCE + INGRP INDELT 01:00 + PERLND 1 + RCHRES 1 + DISPLY 5 + DISPLY 1 + GENER 1 + DISPLY 2 + RCHRES 2 + RCHRES 3 + RCHRES 4 + PLTGEN 2 + IMPLND 1 + RCHRES 5 + DISPLY 3 + GENER 2 + DISPLY 4 + PLTGEN 1 + END INGRP +END OPN SEQUENCE + +PERLND + ACTIVITY + Active Sections (1=Active, 0=Inactive) *** + # - # ATMP SNOW PWAT SED PST PWG PQAL MSTL PEST NITR PHOS TRAC *** + 1 1 1 1 1 + END ACTIVITY + + PRINT-INFO + Print-flags *** PIVL PYR + # - # ATMP SNOW PWAT SED PST PWG PQAL MSTL PEST NITR PHOS TRAC *** + 1 4 4 4 4 12 + END PRINT-INFO + + GEN-INFO + <-------Name-------> Unit-systems Printer *** + # - # t-series Engl Metr *** + in out *** + 1 BICKNELL FARM 1 1 1 0 + END GEN-INFO + + *** Section SNOW *** + + ICE-FLAG + 0= Ice formation not simulated, 1= Simulated *** + # - #ICEFG *** + 1 1 + END ICE-FLAG + + SNOW-PARM1 + Snow input info: Part 1 *** + # - # LAT MELEV SHADE SNOWCF COVIND *** + 1 42. 520. 0.0 1.45 0.5 + END SNOW-PARM1 + + SNOW-PARM2 + Snow input info: Part 2 *** + # - # RDCSN TSNOW SNOEVP CCFACT MWATER MGMELT *** + 1 0.12 32. 0.05 0.5 0.08 0.0001 + END SNOW-PARM2 + + SNOW-INIT1 + Initial snow conditions: Part 1 *** + # - # PACKSNOW PACKICE PACKWATER RDENPF DULL PAKTMP *** + 1 1.4 0.2 0.1 0.2 375. 27.5 + END SNOW-INIT1 + + SNOW-INIT2 + Initial snow conditions: Part 2 *** + # - # COVINX XLNMLT SKYCLR *** + 1 0.50 0.0 1.0 + END SNOW-INIT2 + + *** Section PWATER *** + + PWAT-PARM1 + PWATER variable monthly parameter value flags *** + # - # CSNO RTOP UZFG VCS VUZ VNN VIFW VIRC VLE *** + 1 1 0 0 1 1 1 0 0 1 + END PWAT-PARM1 + + PWAT-PARM2 + *** PWATER input info: Part 2 + # - # ***FOREST LZSN INFILT LSUR SLSUR KVARY AGWRC + 1 0.010 8.0 0.150 250. 0.050 0.5 0.98 + END PWAT-PARM2 + + PWAT-PARM3 + *** PWATER input info: Part 3 + # - # ***PETMAX PETMIN INFEXP INFILD DEEPFR BASETP AGWETP + 1 40. 35. 2.0 2.0 0.10 0.0 0.08 + END PWAT-PARM3 + + PWAT-PARM4 + PWATER input info: Part 4 *** + # - # CEPSC UZSN NSUR INTFW IRC LZETP *** + 1 0.01 0.1 1.0 0.60 + END PWAT-PARM4 + + MON-INTERCEP + Only required if VCSFG=1 in PWAT-PARM1 *** + # - # Interception storage capacity at start of each month *** + JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** + 1 0.04 0.04 0.03 0.03 0.03 0.03 0.10 0.17 0.19 0.14 0.05 0.04 + END MON-INTERCEP + + MON-UZSN + Only required if VUZFG=1 in PWAT-PARM1 *** + # - # Upper zone storage at start of each month *** + JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** + 1 0.4 0.4 0.4 0.4 1.6 1.1 1.1 1.3 1.3 1.3 1.1 0.9 + END MON-UZSN + + MON-MANNING + Only required if VNNFG=1 in PWAT-PARM1 *** + # - # Manning's n for overland flow at start of each month *** + JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** + 1 0.30 0.30 0.30 0.30 0.27 0.25 0.25 0.25 0.25 0.25 0.35 0.33 + END MON-MANNING + + MON-LZETPARM + Only required if VLEFG=1 in PWAT-PARM1 *** + # - # Lower zone ET parameter at start of each month *** + JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** + 1 0.20 0.20 0.20 0.23 0.23 0.25 0.60 0.80 0.75 0.50 0.30 0.20 + END MON-LZETPARM + + PWAT-STATE1 + *** Initial conditions at start of simulation + # - # *** CEPS SURS UZS IFWS LZS AGWS GWVS + 1 0.05 0.0 0.15 0.0 4.0 0.05 0.05 + END PWAT-STATE1 + + *** Section PSTEMP *** + + PSTEMP-PARM2 + *** + # - # ASLT BSLT ULTP1 ULTP2 LGTP1 LGTP2 *** + 1 14.5 .365 1.2 4.0 1.2 6.0 + END PSTEMP-PARM2 + + *** Section PWTGAS *** + + PWT-PARM2 + *** + # - # ELEV IDOXP ICO2P ADOXP ACO2P *** + 1 500. 6. .05 5. .05 + END PWT-PARM2 +END PERLND + +IMPLND + ACTIVITY + Active Sections *** + # - # ATMP SNOW IWAT SLD IWG IQAL *** + 1 1 1 1 1 1 + END ACTIVITY + + PRINT-INFO + Print-flags *** + # - # ATMP SNOW IWAT SLD IWG IQAL PIVL PYR *** + 1 4 4 4 4 4 12 + END PRINT-INFO + + GEN-INFO + <-------Name-------> Unit-systems Printer *** + # - # t-series Engl Metr *** + in out *** + 1 DONIGIAN INDUSTRY 1 1 1 0 + END GEN-INFO + + *** Section SNOW *** + + ICE-FLAG + 0= Ice formation not simulated, 1= Simulated *** + # - #ICEFG *** + 1 1 + END ICE-FLAG + + SNOW-PARM1 + Snow input info: Part 1 *** + # - # LAT MELEV SHADE SNOWCF COVIND *** + 1 42. 450. 0.0 1.45 0.5 + END SNOW-PARM1 + + SNOW-PARM2 + Snow input info: Part 2 *** + # - # RDCSN TSNOW SNOEVP CCFACT MWATER MGMELT *** + 1 0.12 32. 0.05 0.5 0.08 0.0001 + END SNOW-PARM2 + + SNOW-INIT1 + Initial snow conditions: Part 1 *** + # - # PACKSNOW PACKICE PACKWATER RDENPF DULL PAKTMP *** + 1 1.4 0.2 0.1 0.2 375. 27.5 + END SNOW-INIT1 + + SNOW-INIT2 + Initial snow conditions: Part 2 *** + # - # COVINX XLNMLT SKYCLR *** + 1 0.50 0.0 1.0 + END SNOW-INIT2 + + *** Section IWATER *** + + IWAT-PARM1 + Flags *** + # - # CSNO RTOP VRS VNN RTLI *** + 1 1 1 + END IWAT-PARM1 + + IWAT-PARM2 + *** + # - # LSUR SLSUR NSUR RETSC *** + 1 200. .010 .010 .01 + END IWAT-PARM2 + + IWAT-PARM3 + *** + # - # PETMAX PETMIN *** + 1 40. 35. + END IWAT-PARM3 + + IWAT-STATE1 + IWATER state variables *** + # - # RETS SURS *** + 1 .01 .01 + END IWAT-STATE1 + + *** Section SOLIDS *** + + SLD-PARM2 + *** + # - # KEIM JEIM ACCSDP REMSDP *** + 1 .08 1.9 .01 .5 + END SLD-PARM2 + + SLD-STOR + Solids storage (tons/acre) *** + # - # *** + 1 0.2 + END SLD-STOR + + *** Section IWTGAS *** + + IWT-PARM2 + *** + # - # ELEV AWTF BWTF *** + 1 410. 40. 0.8 + END IWT-PARM2 + + *** Section IQUAL *** + + NQUALS + *** + # - #NQUAL *** + 1 1 + END NQUALS + + QUAL-PROPS + Identifiers and Flags *** + # - #<--qualid--> QTID QSD VPFW QSO VQO *** + 1 COD LB 1 1 + END QUAL-PROPS + + QUAL-INPUT + Storage on surface and nonseasonal parameters *** + # - # SQO POTFW ACQOP SQOLIM WSQOP *** + 1 1.20 .175 .02 2.0 1.7 + END QUAL-INPUT +END IMPLND + +RCHRES + ACTIVITY + RCHRES Active Sections (1=Active, 0=Inactive) *** + # - # HYFG ADFG CNFG HTFG SDFG GQFG OXFG NUFG PKFG PHFG *** + 1 5 1 1 1 1 1 1 1 1 1 1 + END ACTIVITY + + PRINT-INFO + RCHRES Print-flags *** + # - # HYDR ADCA CONS HEAT SED GQL OXRX NUTR PLNK PHCB PIVL PYR *** + 1 4 5 5 5 5 5 5 5 5 5 5 12 + 5 4 4 4 4 4 4 4 4 4 4 12 + END PRINT-INFO + + GEN-INFO + RCHRES<-------Name------->Nexit Unit Systems Printer *** + # - # t-series Engl Metr LKFG *** + in out *** + 1 MEIER POND 2 1 1 1 0 1 + 2 OUTLET 1 1 1 1 0 + 3 SPILLWAY 1 1 1 1 0 + 4 UPPER KITTLE CREEK 1 1 1 1 0 + 5 LOWER KITTLE CREEK 1 1 1 1 0 + END GEN-INFO + + *** Section HYDR *** + + HYDR-PARM1 + RCHRES Flags for HYDR section *** + # - # VC A1 A2 A3 ODFVFG for each ODGTFG for each *** FUNCT for each + FG FG FG FG possible exit possible exit *** possible exit + 1 2 3 4 5 1 2 3 4 5 *** + + 1 1 1 1 -1 6 + 2 5 1 1 1 4 + END HYDR-PARM1 + + HYDR-PARM2 + RCHRES *** + # - # DSN FTBN LEN DELTH STCOR KS DB50 *** + 1 00 1 0.5 1. .5 + 2 00 2 0.25 20. .5 + 3 00 3 0.25 30. .5 + 4 00 4 2.0 40. .5 + 5 00 5 3.0 40. .5 + END HYDR-PARM2 + + HYDR-INIT + RCHRES Initial conditions for HYDR section *** + # - # VOL Initial value of COLIND *** Initial value of OUTDGT + (ac-ft) for each possible exit *** for each possible exit + EX1 EX2 EX3 EX4 EX5 *** EX1 EX2 EX3 EX4 EX5 + 1 30. 4.0 5.0 + 2 5 0.0 4.0 + END HYDR-INIT + + *** Section CONS *** + + NCONS + RCHRES *** + # - #NCONS *** + 1 5 1 + END NCONS + + CONS-DATA + RCHRES Data for conservative constituent No. 3 *** + # - #<---Substance-id---> Conc ID CONV QTYID *** + 1 5 ALKALINITY 1000. MG/L 35.31 KG + END CONS-DATA + + *** Section HTRCH *** + + HEAT-PARM + RCHRES ELEV ELDAT CFSAEX KATRAD KCOND KEVAP *** + # - # *** + 1 5 450. 100. .95 + END HEAT-PARM + + HEAT-INIT + RCHRES TW AIRTMP *** + # - # *** + 1 5 60. 40. + END HEAT-INIT + + *** Section SEDTRN *** + + SANDFG + RCHRES *** + # - # SNDFG *** + 1 2 1 + 3 4 2 + 5 3 + END SANDFG + + SED-GENPARM + RCHRES BEDWID BEDWRN POR *** + # - # *** + 1 200. 4. + 2 3 1.33 3. + 4 2.0 2. + 5 2.66 2. + END SED-GENPARM + + SAND-PM + RCHRES D W RHO KSAND EXPSND *** + # - # *** + 1 5 .014 2.5 1.5 1.2 + END SAND-PM + + SILT-CLAY-PM + RCHRES D W RHO TAUCD TAUCS M *** + # - # *** + 1 .00063 .0066 2.2 .2 .4 .5 + 2 3 .00063 .0066 2.2 1.E-10 500. .5 + 4 5 .00063 .0066 2.2 .2 .4 .5 + END SILT-CLAY-PM + + SILT-CLAY-PM + RCHRES D W RHO TAUCD TAUCS M *** + # - # *** + 1 .000055 .000034 2.0 .15 .3 .75 + 2 3 .000055 .000034 2.0 1.E-10 500. .75 + 4 5 .000055 .000034 2.0 .15 .3 .75 + END SILT-CLAY-PM + + SSED-INIT + RCHRES Suspended sed concs (mg/l) *** + # - # Sand Silt Clay *** + 1 5 5. 20. 30. + END SSED-INIT + + BED-INIT + RCHRES BEDDEP Initial bed composition *** + # - # (ft) Sand Silt Clay *** + 1 2. .8 .1 .1 + 2 3 2. .8 .1 .1 + 4 5 1. .8 .1 .1 + END BED-INIT + + *** Section GQUAL *** + + GQ-GENDATA + RCHRES NGQL TPFG PHFG ROFG CDFG SDFG PYFG LAT *** + # - # *** + 1 5 1 1 1 2 1 1 1 42 + END GQ-GENDATA + + GQ-QALDATA + RCHRES<-------GQID-------> DQAL CONCID CONV QTYID *** + # - # *** + 1 5 PESTICIDE B4 10. UG 1.E6 G + END GQ-QALDATA + + GQ-QALFG + RCHRES HDRL OXID PHOT VOLT BIOD GEN SDAS *** + # - # *** + 1 5 1 1 1 1 1 1 1 + END GQ-QALFG + + GQ-HYDPM + RCHRES KA KB KN THHYD *** + # - # *** + 1 5 .001 .01 .001 1.03 + END GQ-HYDPM + + GQ-ROXPM + RCHRES KOX THOX *** + # - # *** + 1 5 .1 1.03 + END GQ-ROXPM + + GQ-PHOTPM + # - #*** K1 K2 K3 K4 K5 K6 K7 + # - #*** K8 K9 K10 K11 K12 K13 K14 + # - #*** K15 K16 K17 K18 PHI THETA + 1 5 848. 544. 330. 195. 120. 68. 41. + 1 5 23. 13. 7. 4. 1. .1 + 1 5 .3 1.1 + END GQ-PHOTPM + + GQ-CFGAS + RCHRES CFGAS *** + # - # *** + 1 5 .001 + END GQ-CFGAS + + GQ-BIOPM + RCHRES BIOCON THBIO BIO *** + # - # *** + 1 5 .01 10. + END GQ-BIOPM + + GQ-GENDECAY + RCHRES FSTDEC THFST *** + # - # *** + 1 5 .2 + END GQ-GENDECAY + + GQ-SEDDECAY + RCHRES KSUSP THSUSP KBED THBED *** + # - # *** + 1 5 .002 + END GQ-SEDDECAY + + GQ-KD + RCHRES Partition coefficients *** + # - # ADPM(1,1) ADPM(2,1) ADPM(3,1) ADPM(4,1) ADPM(5,1) ADPM(6,1) *** + 1 .0001 .001 .001 .0001 .001 .001 + 2 3 .0001 .001 .001 1.E-10 1.E-10 1.E-10 + 4 5 .0001 .001 .001 .0001 .001 .001 + END GQ-KD + + GQ-ADRATE + RCHRES Adsorption/desorption rate parameters *** + # - # ADPM(1,2) ADPM(2,2) ADPM(3,2) ADPM(4,2) ADPM(5,2) ADPM(6,2) *** + 1 150. 150. 150. .25 .25 .25 + 2 3 150. 150. 150. 1000. 1000. 1000. + 4 5 150. 150. 150. .25 .25 .25 + END GQ-ADRATE + + GQ-SEDCONC + RCHRES SQAL1 SQAL2 SQAL3 SQAL4 SQAL5 SQAL6 *** + # - # *** + 1 .001 .01 .01 .001 .01 .01 + 2 3 .001 .01 .01 0. 0. 0. + 4 5 .001 .01 .01 .001 .01 .01 + END GQ-SEDCONC + + GQ-VALUES + RCHRES TWAT PHVAL ROC CLD SDCNC PHY *** + # - # *** + 1 5 1.E-5 + END GQ-VALUES + + GQ-ALPHA + RCHRES*** + # - #*** K1 K2 K3 K4 K5 K6 K7 + # - #*** K8 K9 K10 K11 K12 K13 K14 + # - #*** K15 K16 K17 K18 + 1 5 .008 .009 .010 .011 .011 .011 .012 + 1 5 .013 .015 .016 .017 .018 .019 .020 + 1 5 .021 .022 .024 .024 + END GQ-ALPHA + + GQ-GAMMA + RCHRES*** + # - #*** K1 K2 K3 K4 K5 K6 K7 + # - #*** K8 K9 K10 K11 K12 K13 K14 + # - #*** K15 K16 K17 K18 + 1 5 .001 .001 .001 .001 .001 .001 .001 + 1 5 .001 .002 .002 .002 .002 .002 .002 + 1 5 .002 .002 .002 .002 + END GQ-GAMMA + + GQ-DELTA + RCHRES*** + # - #*** K1 K2 K3 K4 K5 K6 K7 + # - #*** K8 K9 K10 K11 K12 K13 K14 + # - #*** K15 K16 K17 K18 + 1 5 .0007 .0007 .0007 .0007 .0007 .0007 .0007 + 1 5 .0007 .0007 .0007 .0007 .0007 .0007 .0007 + 1 5 .0007 .0007 .0007 .0007 + END GQ-DELTA + + GQ-CLDFACT + RCHRES*** + # - #*** F1 F2 F3 F4 F5 F6 F7 + # - #*** F8 F9 F10 F11 F12 F13 F14 + # - #*** F15 F16 F17 F18 + 1 5 .10 .10 .10 .15 .15 .15 .15 + 1 5 .17 .17 .17 .17 .18 .19 .20 + 1 5 .21 .21 .21 .21 + END GQ-CLDFACT + + *** Section RQUAL *** + + BENTH-FLAG + RCHRES BENF *** + # - # *** + 1 1 + 4 5 1 + END BENTH-FLAG + + SCOUR-PARMS + RCHRES SCRVEL SCRMUL *** + # - # *** + 1 5 3. + END SCOUR-PARMS + + *** Section OXRX *** + + OX-FLAGS + RCHRES REAM *** + # - # *** + 2 3 1 + 4 3 + 5 2 + END OX-FLAGS + + OX-GENPARM + RCHRES KBOD20 TCBOD KODSET SUPSAT *** + # - # /hr *** + 1 5 .1 8. + END OX-GENPARM + + OX-BENPARM + RCHRES BENOD TCBEN EXPOD BRBOD(1) BRBOD(2) EXPREL *** + # - # mg/m2.hr mg/m2.hr mg/m2.hr *** + 1 5 10. 1.1 1.2 20. 25. 1.3 + END OX-BENPARM + + OX-CFOREA + RCHRES CFOREA *** + # - # *** + 1 5. + END OX-CFOREA + + OX-REAPARM + RCHRES TCGINV REAK EXPRED EXPREV *** + # - # /hr *** + 4 2.0 -1.1 1.1 + END OX-REAPARM + + OX-INIT + RCHRES DOX BOD SATDO *** + # - # mg/l mg/l mg/l *** + 1 5 8. 100. + END OX-INIT + + *** Section NUTRX *** + + NUT-FLAGS + RCHRES TAM NO2 PO4 AMV DEN ADNH ADPO PHFL *** + # - # *** + 1 5 1 1 1 1 1 0 0 + END NUT-FLAGS + + NUT-BENPARM + RCHRES BRTAM(1) BRTAM(2) BRPO4(1) BRPO4(2) ANAER *** + # - # mg/m2.hr mg/m2.hr mg/m2.hr mg/m2.hr mg/l *** + 1 5 11.0 33.0 1.1 2.2 0.0005 + END NUT-BENPARM + + NUT-NITDENIT + RCHRES KTAM20 KNO220 TCNIT KNO320 TCDEN DENOXT *** + # - # /hr /hr /hr mg/l *** + 1 5 .002 .004 1.07 .001 1.04 0.2 + END NUT-NITDENIT + + NUT-NH3VOLAT + RCHRES EXPNVG EXPNVL *** + # - # *** + 1 5 .50 0.6667 + END NUT-NH3VOLAT + + NUT-BEDCONC + RCHRES Bed concentrations of NH4 & PO4 (mg/mg) *** + # - # NH4-sand NH4-silt NH4-clay PO4-sand PO4-silt PO4-clay *** + 1 5 0.00001 0.00001 0.00001 0.00001 0.00001 0.00001 + END NUT-BEDCONC + + NUT-ADSPARM + RCHRES Partition coefficients for NH4 AND PO4 (l/mg) *** + # - # NH4-sand NH4-silt NH4-clay PO4-sand PO4-silt PO4-clay *** + 1 5 0.0001 0.0001 0.0001 10. 10. 10. + END NUT-ADSPARM + + NUT-DINIT + RCHRES NO3 TAM NO2 PO4 PHVAL *** + # - # mg/l mg/l mg/l mg/l ph units *** + 1 5 40. 10. 1. 50. 7.0 + END NUT-DINIT + + NUT-ADSINIT + RCHRES Initial suspended NH4 and PO4 concentrations (mg/mg) *** + # - # NH4-sand NH4-silt NH4-clay PO4-sand PO4-silt PO4-clay *** + 1 5 0. 0. 0. 0. 0. 0. + END NUT-ADSINIT + + *** Section PLANK *** + + PLNK-FLAGS + RCHRES PHYF ZOOF BALF SDLT AMRF DECF NSFG ZFOO *** + # - # *** + 1 5 1 1 1 1 1 1 + END PLNK-FLAGS + + PLNK-PARM1 + RCHRES RATCLP NONREF LITSED ALNPR EXTB MALGR *** + # - # /ft /hr *** + 1 5 4.5 + END PLNK-PARM1 + + PHYTO-PARM + RCHRES SEED MXSTAY OREF CLALDH PHYSET REFSET *** + # - # mg/l mg/l ug/l *** + 1 5 .1 .1 .5 .5 + END PHYTO-PARM + + ZOO-PARM1 + RCHRES MZOEAT ZFIL20 ZRES20 ZD OXZD *** + # - # mg/l.hr l/mgzoo.hr /hr /hr /hr *** + 1 5 .2 + END ZOO-PARM1 + + PLNK-INIT + RCHRES PHYTO ZOO BENAL ORN ORP ORC *** + # - # mg/l org/l mg/m2 mg/l mg/l mg/l *** + 1 5 40. 200. 5. 20. 20. 20. + END PLNK-INIT + + *** Section PHCARB *** + + PH-PARM1 + RCHRES PHCN ALKC *** + # - # *** + 1 5 50 + END PH-PARM1 + + PH-INIT + RCHRES TIC CO2 PH *** + # - # mg/l mg/l *** + 1 5 20. 5. 8.5 + END PH-INIT +END RCHRES + +FTABLES + FTABLE 1 + ROWS COLS *** + 14 6 + WINTER SUMMER SPLWAY *** + DEPTH AREA VOLUME OUTLET OUTLET DISCH *** + (FT) (ACRES) (AC-FT) DISCH DISCH (CFS) *** + (CFS) (CFS) *** + .000 .000 .0000 .0000 .0000 .0000 + 2.000 1.212 1.2120 0.0000 .0000 .0000 + 4.000 2.424 4.8480 0.0000 .0000 .0000 + 6.000 3.636 10.9080 0.0000 .0000 .0000 + 8.000 4.848 19.3920 0.0000 .0000 .0000 + 10.000 6.061 30.3050 0.0000 .0000 .0000 + 12.000 7.273 43.6380 5.0000 3.5000 .0000 + 14.000 8.485 59.3950 6.2500 4.3750 .0000 + 16.000 9.697 77.5760 7.5000 5.2500 .0000 + 18.000 10.909 98.1810 8.7500 6.1250 .0000 + 20.000 12.121 121.2100 10.0000 7.0000 .0000 + 21.000 12.727 133.6360 10.6250 7.4375 50.0000 + 22.000 13.333 146.6630 11.2500 7.8750 100.0000 + 23.000 13.939 160.3030 11.8750 8.3125 500.0000 + END FTABLE 1 + + FTABLE 2 + ROWS COLS *** + 13 4 + DEPTH AREA VOLUME DISCH FLO-THRU *** + (FT) (ACRES) (AC-FT) (CFS) (MIN) *** + .000 .000 .0000 .000 0.0 + .167 .071 .0109 1.2241 6.5 + .333 .081 .0236 3.9148 4.4 + .500 .091 .0379 7.8193 3.5 + .667 .101 .0539 12.9032 3.0 + .833 .111 .0715 19.1853 2.7 + 1.000 .121 .0909 26.7046 2.5 + 1.333 .141 .1347 45.6529 2.1 + 1.667 .162 .1852 70.1757 1.9 + 2.000 .182 .2424 100.7192 1.7 + 2.667 .586 .4983 201.9005 1.8 + 3.333 .990 1.0236 344.6344 2.2 + 4.000 1.394 1.8182 537.0775 2.5 + END FTABLE 2 + + FTABLE 3 + ROWS COLS *** + 13 4 + DEPTH AREA VOLUME DISCH FLO-THRU *** + (FT) (ACRES) (AC-FT) (CFS) (MIN) *** + .000 .000 .0000 .000 0.0 + .167 .071 .0109 1.4992 5.3 + .333 .081 .0236 4.7947 3.6 + .500 .091 .0379 9.5766 2.9 + .667 .101 .0539 15.8032 2.5 + .833 .111 .0715 23.4971 2.2 + 1.000 .121 .0909 32.7063 2.0 + 1.333 .141 .1347 55.9132 1.7 + 1.667 .162 .1852 85.9474 1.6 + 2.000 .182 .2424 123.3553 1.4 + 2.667 .586 .4983 247.2766 1.5 + 3.333 .990 1.0236 422.0892 1.8 + 4.000 1.394 1.8182 657.7828 2.0 + END FTABLE 3 + + FTABLE 4 + ROWS COLS *** + 13 4 + DEPTH AREA VOLUME DISCH FLO-THRU *** + (FT) (ACRES) (AC-FT) (CFS) (MIN) *** + .000 .000 .0000 .000 0.0 + .250 .848 .1970 .9024 158.5 + .500 .970 .4242 2.8860 106.7 + .750 1.091 .6818 5.7642 85.9 + 1.000 1.212 .9697 9.5120 74.0 + 1.250 1.333 1.2879 14.1431 66.1 + 1.500 1.455 1.6364 19.6862 60.3 + 2.000 1.697 2.4242 33.6545 52.3 + 2.500 1.939 3.3333 51.7323 46.8 + 3.000 2.182 4.3636 74.2486 42.7 + 4.000 11.879 11.3939 155.5774 53.2 + 5.000 21.576 28.1212 296.8633 68.8 + 6.000 31.273 54.5454 522.1440 75.8 + END FTABLE 4 + + FTABLE 5 + ROWS COLS *** + 13 4 + DEPTH AREA VOLUME DISCH FLO-THRU *** + (FT) (ACRES) (AC-FT) (CFS) (MIN) *** + .000 .000 .0000 .000 0.0 + .333 1.697 .5253 1.5869 240.3 + .667 1.939 1.1313 5.0752 161.8 + 1.000 2.182 1.8182 10.1370 130.2 + 1.333 2.424 2.5859 16.7279 112.2 + 1.667 2.667 3.4343 24.8719 100.2 + 2.000 2.909 4.3636 34.6200 91.5 + 2.667 3.394 6.4646 59.1848 79.3 + 3.333 3.879 8.8889 90.9763 70.9 + 4.000 4.364 11.6364 130.5731 64.7 + 5.333 36.687 39.0034 284.8886 99.4 + 6.667 69.010 109.4680 593.7734 133.8 + 8.000 101.333 223.0302 1129.6948 143.3 + END FTABLE 5 +END FTABLES + +DISPLY + DISPLY-INFO1 + # - #<----------Title----------->***TRAN PIVL DIG1 FIL1 PYR DIG2 FIL2 YRND + 1 O2 CONC, MEIER POND (mg/l) AVER 1 2 66 12 + 2 PEST SED CONC, POND (mg/kg) AVER 1 2 66 12 + 3 O2 CONC,LOWER KITTLE C(mg/l) AVER 1 2 66 12 + 4 PEST SED CONC,L KTL C(mg/kg) AVER 1 2 66 12 + 5 WATER TEMP,MEIER POND (DEGF) AVER 1 2 66 12 + END DISPLY-INFO1 +END DISPLY + +GENER + OPCODE + # - # Op- *** + code *** + 1 2 19 + END OPCODE +END GENER + +PLTGEN + PLOTINFO + # - # FILE NPT NMN LABL PYR PIVL *** + 1 94 2 24 + 2 95 3 1 6 + END PLOTINFO + + GEN-LABELS + # - #<----------------Title ----------------> *** <------Y axis------> + 1 SIMULATED FLOWS (CFS) CFS + 2 SIMULATED VALS RELATED TO TEMP&PH,RCH 4 + END GEN-LABELS + + SCALING + # - # YMIN YMAX IVLIN *** + 1 2 0. 150. 20. + END SCALING + + CURV-DATA (first curve) + <-Curve label--> Line Intg Col Tran *** + # - # type eqv code code *** + 1 TOTAL POND OUTFL 7 1 AVER + 2 AVDEP FOR RCH 4 7 1 LAST + END CURV-DATA + + CURV-DATA (second curve) + <-Curve label--> Line Intg Col Tran *** + # - # type eqv code code *** + 1 LOWER KITTLE CR 8 2 AVER + 2 TW FOR RCH 4 8 2 LAST + END CURV-DATA + + CURV-DATA (third curve) + <-Curve label--> Line Intg Col Tran *** + # - # type eqv code code *** + 2 PH FOR RCH 4 9 2 LAST + END CURV-DATA + + CURV-DATA (fourth curve) + <-Curve label--> Line Intg Col Tran *** + # - # type eqv code code *** + 2 HTEXCH FOR RCH 4 10 2 + END CURV-DATA +END PLTGEN + +EXT SOURCES +<-Volume-> SsysSgap<--Mult-->Tran <-Target vols> <-Grp> <-Member-> *** + # # tem strg<-factor->strg # # # # *** +WDM 39 PREC ENGLZERO SAME PERLND 1 EXTNL PREC +WDM 131 PREC ENGLZERO SAME IMPLND 1 EXTNL PREC +WDM 39 PREC ENGLZERO SAME RCHRES 1 3 EXTNL PREC +WDM 131 PREC ENGLZERO SAME RCHRES 4 5 EXTNL PREC +WDM 123 ATMP ENGL SAME PERLND 1 ATEMP AIRTMP +WDM 122 ATMP ENGL SAME IMPLND 1 ATEMP AIRTMP +WDM 123 ATMP ENGL SAME RCHRES 1 3 EXTNL GATMP +WDM 122 ATMP ENGL SAME RCHRES 4 5 EXTNL GATMP +WDM 41 EVAP ENGL .7 DIV PERLND 1 EXTNL PETINP +WDM 41 EVAP ENGL .7 DIV IMPLND 1 EXTNL PETINP +WDM 41 EVAP ENGL .7 DIV RCHRES 1 5 EXTNL POTEV +WDM 42 WIND ENGL DIV PERLND 1 EXTNL WINMOV +WDM 42 WIND ENGL DIV IMPLND 1 EXTNL WINMOV +WDM 42 WIND ENGL DIV RCHRES 1 5 EXTNL WIND +WDM 46 SOLR ENGL DIV PERLND 1 EXTNL SOLRAD +WDM 46 SOLR ENGL DIV IMPLND 1 EXTNL SOLRAD +WDM 46 SOLR ENGL DIV RCHRES 1 5 EXTNL SOLRAD +WDM 126 DEWP ENGL SAME PERLND 1 EXTNL DTMPG +WDM 125 DEWP ENGL SAME IMPLND 1 EXTNL DTMPG +WDM 126 DEWP ENGL SAME RCHRES 1 3 EXTNL DEWTMP +WDM 125 DEWP ENGL SAME RCHRES 4 5 EXTNL DEWTMP +WDM 140 CLND ENGL SAME RCHRES 1 EXTNL COLIND +WDM 135 CLDC ENGL SAME PERLND 1 EXTNL CLOUD +WDM 135 CLDC ENGL SAME IMPLND 1 EXTNL CLOUD +WDM 135 CLDC ENGL SAME RCHRES 1 5 EXTNL CLOUD +END EXT SOURCES + +SCHEMATIC +<-Source-> <--Area--> <-Target-> *** + # <-factor-> # # *** +PERLND 1 6000. RCHRES 1 1 +IMPLND 1 3000. RCHRES 5 2 +RCHRES 1 RCHRES 2 3 +RCHRES 1 RCHRES 3 4 +RCHRES 2 RCHRES 4 5 +RCHRES 3 RCHRES 4 5 +RCHRES 4 RCHRES 5 5 +END SCHEMATIC + +MASS-LINK + + MASS-LINK 1 + <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** + # #<-factor-> # # *** +PERLND PWATER PERO 0.0833333 RCHRES INFLOW IVOL +PERLND PWTGAS POHT RCHRES INFLOW IHEAT +PERLND PWTGAS PODOXM RCHRES INFLOW OXIF 1 +PERLND PWTGAS POCO2M RCHRES INFLOW PHIF 2 + END MASS-LINK 1 + + MASS-LINK 2 + <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** + # #<-factor-> # # *** +IMPLND IWATER SURO 0.0833333 RCHRES INFLOW IVOL +IMPLND SOLIDS SOSLD 0.10 RCHRES INFLOW ISED 1 +IMPLND SOLIDS SOSLD 0.46 RCHRES INFLOW ISED 2 +IMPLND SOLIDS SOSLD 0.44 RCHRES INFLOW ISED 3 +IMPLND IWTGAS SOHT RCHRES INFLOW IHEAT +IMPLND IWTGAS SODOXM RCHRES INFLOW OXIF 1 +IMPLND IWTGAS SOCO2M RCHRES INFLOW PHIF 2 +IMPLND IQUAL SOQUAL RCHRES INFLOW OXIF 2 + END MASS-LINK 2 + + MASS-LINK 3 + <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** + # #<-factor-> # # *** +RCHRES OFLOW 1 RCHRES INFLOW + END MASS-LINK 3 + + MASS-LINK 4 + <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** + # #<-factor-> # # *** +RCHRES OFLOW 2 RCHRES INFLOW + END MASS-LINK 4 + + MASS-LINK 5 + <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** + # #<-factor-> # # *** +RCHRES ROFLOW RCHRES INFLOW + END MASS-LINK 5 + +END MASS-LINK + +NETWORK +<-Volume-> <-Grp> <-Member-><--Mult-->Tran <-Target vols> <-Grp> <-Member-> *** + # # #<-factor->strg # # # # *** +RCHRES 1 HTRCH TW 1. DISPLY 5 INPUT TIMSER +RCHRES 1 OXRX DOX 1. DISPLY 1 INPUT TIMSER +RCHRES 1 GQUAL RSQAL 12 1. GENER 1 INPUT ONE +RCHRES 1 SEDTRN RSED 10 1. GENER 1 INPUT TWO +GENER 1 OUTPUT TIMSER 1.1 DISPLY 2 INPUT TIMSER +RCHRES 1 HYDR ROVOL 12.1 PLTGEN 1 INPUT MEAN 1 +RCHRES 4 HYDR AVDEP 1. PLTGEN 2 INPUT POINT 1 +RCHRES 4 HTRCH TW 1. PLTGEN 2 INPUT POINT 2 +RCHRES 4 PHCARB PHST 3 1. PLTGEN 2 INPUT POINT 3 +RCHRES 4 HTRCH HTEXCH 1. PLTGEN 2 INPUT MEAN 1 +RCHRES 5 OXRX DOX 1. DISPLY 3 INPUT TIMSER +RCHRES 5 GQUAL RSQAL 12 1. GENER 2 INPUT ONE +RCHRES 5 SEDTRN RSED 10 1. GENER 2 INPUT TWO +GENER 2 OUTPUT TIMSER 1.1 DISPLY 4 INPUT TIMSER +RCHRES 5 HYDR ROVOL 12.1 PLTGEN 1 INPUT MEAN 2 +END NETWORK + +SPEC-ACTIONS +*** test special actions + RCHRES 5 RSED 4 += 2.50E+05 + RCHRES 5 RSED 5 += 6.89E+05 + RCHRES 5 RSED 6 += 4.01E+05 +END SPEC-ACTIONS + + +END RUN diff --git a/tests/testcbp/HSP2results/check_depends_endpoint.py b/tests/testcbp/HSP2results/check_depends_endpoint.py new file mode 100644 index 00000000..49f27f18 --- /dev/null +++ b/tests/testcbp/HSP2results/check_depends_endpoint.py @@ -0,0 +1,75 @@ +# bare bones tester - must be run from the HSPsquared source directory +import os +from HSP2.main import * +from HSP2.om import * +import HSP2IO +import numpy +from HSP2IO.hdf import HDF5 +from HSP2IO.io import IOManager +fpath = './tests/test10/HSP2results/test10.h5' +# try also: +# fpath = './tests/testcbp/HSP2results/PL3_5250_0001.h5' +# sometimes when testing you may need to close the file, so try: +# f = h5py.File(fpath,'a') # use mode 'a' which allows read, write, modify +# # f.close() +hdf5_instance = HDF5(fpath) +io_manager = IOManager(hdf5_instance) +uci_obj = io_manager.read_uci() +siminfo = uci_obj.siminfo +opseq = uci_obj.opseq +# - finally stash specactions in state, not domain (segment) dependent so do it once +# now load state and the special actions +state = init_state_dicts() +state_initialize_om(state) +state['specactions'] = uci_obj.specactions # stash the specaction dict in state + +state_siminfo_hsp2(uci_obj, siminfo) +# Add support for dynamic functions to operate on STATE +# - Load any dynamic components if present, and store variables on objects +state_load_dynamics_hsp2(state, io_manager, siminfo) +# Iterate through all segments and add crucial paths to state +# before loading dynamic components that may reference them +state_init_hsp2(state, opseq, activities) +state_load_dynamics_specl(state, io_manager, siminfo) # traditional special actions +state_load_dynamics_om(state, io_manager, siminfo) # operational model for custom python +state_om_model_run_prep(state, io_manager, siminfo) # this creates all objects from the UCI and previous loads +# state['model_root_object'].find_var_path('RCHRES_R001') + + +model_order_recursive(specl2, state['model_object_cache'], mel, mtl) +def mel_path(mel, state): + ixn = 1 + for ix in mel: + ip = get_ix_path(state['state_paths'], ix) + im = state['model_object_cache'][ip] + print(ixn, ":", im.name, "->", im.state_path, '=', im.get_state()) + ixn = ixn + 1 + return + +# Show order of ops based on dependencies +endpoint = state['model_object_cache']['/STATE/RCHRES_R005/RSED5'] +mel = [] +mtl = [] +model_order_recursive(endpoint, state['model_object_cache'], mel, mtl) +print("Dependency ordered execution for constants and runnables influencing", endpoint.name) +mel_path(mel, state) +mel_runnable = ModelObject.runnable_op_list(state['op_tokens'], mel) +print("Dependency ordered execution of runnables only for", endpoint.name) +mel_path(mel_runnable, state) + +# Aggregate the list of all SEDTRN end point dependencies +domain = '/STATE/RCHRES_R005' +ep_list = ['RSED4', 'RSED5', 'RSED6'] +mello = model_domain_dependencies(state, domain, ep_list) +print("Dependency ordered execution for RSED constants and runnables influencing", domain, "=", mello) +mel_runnable = ModelObject.runnable_op_list(state['op_tokens'], mello) +print("Dependency ordered execution of RSED runnables only for", domain, "=", mel_runnable) + +# Just for grins, we can show the dependency using the special action as an end point +specl2 = state['model_object_cache']['/STATE/SPECACTION2'] +mel = [] +mtl = [] +print("Dependency ordered execution for constants and runnables influencing ", rsed4.name) +model_order_recursive(specl2, state['model_object_cache'], mel, mtl) +mel_path(mel, state) +mel_runnable = ModelObject.runnable_op_list(state['op_tokens'], mel) From ef729177cfb66cb193a9b087ae09cd9d70885e11 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Fri, 12 Apr 2024 15:21:55 -0400 Subject: [PATCH 30/35] domain specific dependency endpoint support functions and test implementation in SEDTRN --- HSP2/om.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/HSP2/om.py b/HSP2/om.py index 46f06612..f1b7d1cd 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -55,11 +55,11 @@ def is_float_digit(n: str) -> bool: # Import Code Classes from HSP2.om_model_object import * from HSP2.om_sim_timer import * -from HSP2.om_equation import * +#from HSP2.om_equation import * from HSP2.om_model_linkage import * from HSP2.om_special_action import * -from HSP2.om_data_matrix import * -from HSP2.om_model_broadcast import * +#from HSP2.om_data_matrix import * +#from HSP2.om_model_broadcast import * #from HSP2.om_simple_channel import * #from HSP2.om_impoundment import * from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries From 5f4305158a8afd9bafe92f26259bea2c57a2b883 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Mon, 15 Apr 2024 10:08:36 -0400 Subject: [PATCH 31/35] added some informational items --- HSP2/om.py | 15 ++++++++++++++- HSP2/om_special_action.py | 4 +--- .../HSP2results/check_depends_endpoint.py | 17 +++++++++-------- 3 files changed, 24 insertions(+), 12 deletions(-) diff --git a/HSP2/om.py b/HSP2/om.py index f1b7d1cd..6129fb06 100644 --- a/HSP2/om.py +++ b/HSP2/om.py @@ -52,6 +52,19 @@ def is_float_digit(n: str) -> bool: except ValueError: return False +def model_element_paths(mel, state): + """ + Informational. If given a list of state_ix keys, shows the operators local name and state_path + """ + ixn = 1 + for ix in mel: + ip = get_ix_path(state['state_paths'], ix) + im = state['model_object_cache'][ip] + print(ixn, ":", im.name, "->", im.state_path, '=', im.get_state()) + ixn = ixn + 1 + return + + # Import Code Classes from HSP2.om_model_object import * from HSP2.om_sim_timer import * @@ -521,7 +534,7 @@ def step_one(op_tokens, ops, state_ix, dict_ix, ts_ix, step, debug = 0): if debug == 1: print("DEBUG: Operator ID", ops[1], "is op type", ops[0]) if ops[0] == 1: - step_equation(ops, state_ix) + pass #step_equation(ops, state_ix) elif ops[0] == 2: # todo: this should be moved into a single function, # with the conforming name step_matrix(op_tokens, ops, state_ix, dict_ix) diff --git a/HSP2/om_special_action.py b/HSP2/om_special_action.py index c8d81278..774a999b 100644 --- a/HSP2/om_special_action.py +++ b/HSP2/om_special_action.py @@ -35,9 +35,7 @@ def parse_model_props(self, model_props, strict=False): self.num = self.handle_prop(model_props, 'NUM', False, 1) # number of times to perform action self.timer_ix = self.handle_prop(model_props, 'when', False, 1) # when to begin the first attempt at action self.ctr_ix = self.constant_or_path('ctr', 0) # this initializes the counter for how many times an action has been performed - # now add the state value that we are operating on (the target) as an input, so that this gets executed AFTER this is set initially - #self.add_input('op1', ('/STATE/' + self.op_type + '_' + self.op_type[0] + str(self.range1).zfill(3) + "/" + self.vari ), 2, True ) - # or, should we set up a register for the target + # NOTE: since the spec-action modifies the same quantity that is it's input, it does *not* set it as a proper "input" since that would create a circular dependency domain = self.model_object_cache[('/STATE/' + self.op_type + '_' + self.op_type[0] + str(self.range1).zfill(3) )] var_register = self.insure_register(self.vari, 0.0, domain, False) #print("Created register", var_register.name, "with path", var_register.state_path) diff --git a/tests/testcbp/HSP2results/check_depends_endpoint.py b/tests/testcbp/HSP2results/check_depends_endpoint.py index 49f27f18..4241efaa 100644 --- a/tests/testcbp/HSP2results/check_depends_endpoint.py +++ b/tests/testcbp/HSP2results/check_depends_endpoint.py @@ -35,8 +35,16 @@ state_om_model_run_prep(state, io_manager, siminfo) # this creates all objects from the UCI and previous loads # state['model_root_object'].find_var_path('RCHRES_R001') +# Aggregate the list of all SEDTRN end point dependencies +domain = '/STATE/RCHRES_R005' +ep_list = ['RSED4', 'RSED5', 'RSED6'] +mello = model_domain_dependencies(state, domain, ep_list) +print("Dependency ordered execution for RSED constants and runnables influencing", domain, "=", mello) +mel_runnable = ModelObject.runnable_op_list(state['op_tokens'], mello) +print("Dependency ordered execution of RSED runnables only for", domain, "=", mel_runnable) -model_order_recursive(specl2, state['model_object_cache'], mel, mtl) + +#### Extra testing output def mel_path(mel, state): ixn = 1 for ix in mel: @@ -57,13 +65,6 @@ def mel_path(mel, state): print("Dependency ordered execution of runnables only for", endpoint.name) mel_path(mel_runnable, state) -# Aggregate the list of all SEDTRN end point dependencies -domain = '/STATE/RCHRES_R005' -ep_list = ['RSED4', 'RSED5', 'RSED6'] -mello = model_domain_dependencies(state, domain, ep_list) -print("Dependency ordered execution for RSED constants and runnables influencing", domain, "=", mello) -mel_runnable = ModelObject.runnable_op_list(state['op_tokens'], mello) -print("Dependency ordered execution of RSED runnables only for", domain, "=", mel_runnable) # Just for grins, we can show the dependency using the special action as an end point specl2 = state['model_object_cache']['/STATE/SPECACTION2'] From 333a5d8df6f006fc978d2683227a08a6448c8979 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Mon, 15 Apr 2024 10:10:14 -0400 Subject: [PATCH 32/35] added some informational items --- .../HSP2results/check_depends_endpoint.py | 22 +++++++------------ 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/tests/testcbp/HSP2results/check_depends_endpoint.py b/tests/testcbp/HSP2results/check_depends_endpoint.py index 4241efaa..8f9a9af9 100644 --- a/tests/testcbp/HSP2results/check_depends_endpoint.py +++ b/tests/testcbp/HSP2results/check_depends_endpoint.py @@ -1,3 +1,4 @@ +# Must be run from the HSPsquared source directory, the h5 file has already been setup with hsp import_uci test10.uci # bare bones tester - must be run from the HSPsquared source directory import os from HSP2.main import * @@ -41,29 +42,22 @@ mello = model_domain_dependencies(state, domain, ep_list) print("Dependency ordered execution for RSED constants and runnables influencing", domain, "=", mello) mel_runnable = ModelObject.runnable_op_list(state['op_tokens'], mello) -print("Dependency ordered execution of RSED runnables only for", domain, "=", mel_runnable) +print("Dependency ordered execution of RSED depemndencies (all)", domain, "=", +model_element_paths(mello, state)) +print("Dependency ordered execution of RSED runnables only for", domain, "=", +model_element_paths(mel_runnable, state)) -#### Extra testing output -def mel_path(mel, state): - ixn = 1 - for ix in mel: - ip = get_ix_path(state['state_paths'], ix) - im = state['model_object_cache'][ip] - print(ixn, ":", im.name, "->", im.state_path, '=', im.get_state()) - ixn = ixn + 1 - return - # Show order of ops based on dependencies endpoint = state['model_object_cache']['/STATE/RCHRES_R005/RSED5'] mel = [] mtl = [] model_order_recursive(endpoint, state['model_object_cache'], mel, mtl) print("Dependency ordered execution for constants and runnables influencing", endpoint.name) -mel_path(mel, state) +model_element_paths(mel, state) mel_runnable = ModelObject.runnable_op_list(state['op_tokens'], mel) print("Dependency ordered execution of runnables only for", endpoint.name) -mel_path(mel_runnable, state) +model_element_paths(mel_runnable, state) # Just for grins, we can show the dependency using the special action as an end point @@ -72,5 +66,5 @@ def mel_path(mel, state): mtl = [] print("Dependency ordered execution for constants and runnables influencing ", rsed4.name) model_order_recursive(specl2, state['model_object_cache'], mel, mtl) -mel_path(mel, state) +model_element_paths(mel, state) mel_runnable = ModelObject.runnable_op_list(state['op_tokens'], mel) From f643464c5a215e17c9133202bab7a4ac59a37550 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Mon, 22 Apr 2024 15:36:23 -0400 Subject: [PATCH 33/35] added new special action tester UCI and restored the HYDR njit to previous state --- HSP2/HYDR.py | 2 +- tests/test10/HSP2results/test10spec.uci | 1002 +++++++++++++++++++++++ 2 files changed, 1003 insertions(+), 1 deletion(-) create mode 100644 tests/test10/HSP2results/test10spec.uci diff --git a/HSP2/HYDR.py b/HSP2/HYDR.py index c714407b..2bade9f1 100644 --- a/HSP2/HYDR.py +++ b/HSP2/HYDR.py @@ -172,7 +172,7 @@ def hydr(io_manager, siminfo, uci, ts, ftables, state): return errors, ERRMSGS -@njit +@njit(cache=True) def _hydr_(ui, ts, COLIND, OUTDGT, rowsFT, funct, Olabels, OVOLlabels, state_info, state_paths, state_ix, dict_ix, ts_ix, state_step_hydr, op_tokens, model_exec_list): errors = zeros(int(ui['errlen'])).astype(int64) diff --git a/tests/test10/HSP2results/test10spec.uci b/tests/test10/HSP2results/test10spec.uci new file mode 100644 index 00000000..00657171 --- /dev/null +++ b/tests/test10/HSP2results/test10spec.uci @@ -0,0 +1,1002 @@ +RUN + +GLOBAL + Version 11 test run: PERLND and IMPLND w/ RCHRES (sediment, water quality) + START 1976 END 1976 + RUN INTERP OUTPUT LEVEL 3 + RESUME 0 RUN 1 UNIT SYSTEM 1 +END GLOBAL + + *** This test run uses MASS-LINK and SCHEMATIC blocks *** + +FILES + ***<----FILE NAME-------------------------------------------------> +WDM 21 test10.wdm +MESSU 22 test10.ech + 01 test10.out + 66 test10.d66 + 94 test10.p94 + 95 test10.p95 +END FILES + +OPN SEQUENCE + INGRP INDELT 01:00 + PERLND 1 + RCHRES 1 + DISPLY 5 + DISPLY 1 + GENER 1 + DISPLY 2 + RCHRES 2 + RCHRES 3 + RCHRES 4 + PLTGEN 2 + IMPLND 1 + RCHRES 5 + DISPLY 3 + GENER 2 + DISPLY 4 + PLTGEN 1 + END INGRP +END OPN SEQUENCE + +PERLND + ACTIVITY + Active Sections (1=Active, 0=Inactive) *** + # - # ATMP SNOW PWAT SED PST PWG PQAL MSTL PEST NITR PHOS TRAC *** + 1 1 1 1 1 + END ACTIVITY + + PRINT-INFO + Print-flags *** PIVL PYR + # - # ATMP SNOW PWAT SED PST PWG PQAL MSTL PEST NITR PHOS TRAC *** + 1 4 4 4 4 12 + END PRINT-INFO + + GEN-INFO + <-------Name-------> Unit-systems Printer *** + # - # t-series Engl Metr *** + in out *** + 1 BICKNELL FARM 1 1 1 0 + END GEN-INFO + + *** Section SNOW *** + + ICE-FLAG + 0= Ice formation not simulated, 1= Simulated *** + # - #ICEFG *** + 1 1 + END ICE-FLAG + + SNOW-PARM1 + Snow input info: Part 1 *** + # - # LAT MELEV SHADE SNOWCF COVIND *** + 1 42. 520. 0.0 1.45 0.5 + END SNOW-PARM1 + + SNOW-PARM2 + Snow input info: Part 2 *** + # - # RDCSN TSNOW SNOEVP CCFACT MWATER MGMELT *** + 1 0.12 32. 0.05 0.5 0.08 0.0001 + END SNOW-PARM2 + + SNOW-INIT1 + Initial snow conditions: Part 1 *** + # - # PACKSNOW PACKICE PACKWATER RDENPF DULL PAKTMP *** + 1 1.4 0.2 0.1 0.2 375. 27.5 + END SNOW-INIT1 + + SNOW-INIT2 + Initial snow conditions: Part 2 *** + # - # COVINX XLNMLT SKYCLR *** + 1 0.50 0.0 1.0 + END SNOW-INIT2 + + *** Section PWATER *** + + PWAT-PARM1 + PWATER variable monthly parameter value flags *** + # - # CSNO RTOP UZFG VCS VUZ VNN VIFW VIRC VLE *** + 1 1 0 0 1 1 1 0 0 1 + END PWAT-PARM1 + + PWAT-PARM2 + *** PWATER input info: Part 2 + # - # ***FOREST LZSN INFILT LSUR SLSUR KVARY AGWRC + 1 0.010 8.0 0.150 250. 0.050 0.5 0.98 + END PWAT-PARM2 + + PWAT-PARM3 + *** PWATER input info: Part 3 + # - # ***PETMAX PETMIN INFEXP INFILD DEEPFR BASETP AGWETP + 1 40. 35. 2.0 2.0 0.10 0.0 0.08 + END PWAT-PARM3 + + PWAT-PARM4 + PWATER input info: Part 4 *** + # - # CEPSC UZSN NSUR INTFW IRC LZETP *** + 1 0.01 0.1 1.0 0.60 + END PWAT-PARM4 + + MON-INTERCEP + Only required if VCSFG=1 in PWAT-PARM1 *** + # - # Interception storage capacity at start of each month *** + JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** + 1 0.04 0.04 0.03 0.03 0.03 0.03 0.10 0.17 0.19 0.14 0.05 0.04 + END MON-INTERCEP + + MON-UZSN + Only required if VUZFG=1 in PWAT-PARM1 *** + # - # Upper zone storage at start of each month *** + JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** + 1 0.4 0.4 0.4 0.4 1.6 1.1 1.1 1.3 1.3 1.3 1.1 0.9 + END MON-UZSN + + MON-MANNING + Only required if VNNFG=1 in PWAT-PARM1 *** + # - # Manning's n for overland flow at start of each month *** + JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** + 1 0.30 0.30 0.30 0.30 0.27 0.25 0.25 0.25 0.25 0.25 0.35 0.33 + END MON-MANNING + + MON-LZETPARM + Only required if VLEFG=1 in PWAT-PARM1 *** + # - # Lower zone ET parameter at start of each month *** + JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC *** + 1 0.20 0.20 0.20 0.23 0.23 0.25 0.60 0.80 0.75 0.50 0.30 0.20 + END MON-LZETPARM + + PWAT-STATE1 + *** Initial conditions at start of simulation + # - # *** CEPS SURS UZS IFWS LZS AGWS GWVS + 1 0.05 0.0 0.15 0.0 4.0 0.05 0.05 + END PWAT-STATE1 + + *** Section PSTEMP *** + + PSTEMP-PARM2 + *** + # - # ASLT BSLT ULTP1 ULTP2 LGTP1 LGTP2 *** + 1 14.5 .365 1.2 4.0 1.2 6.0 + END PSTEMP-PARM2 + + *** Section PWTGAS *** + + PWT-PARM2 + *** + # - # ELEV IDOXP ICO2P ADOXP ACO2P *** + 1 500. 6. .05 5. .05 + END PWT-PARM2 +END PERLND + +IMPLND + ACTIVITY + Active Sections *** + # - # ATMP SNOW IWAT SLD IWG IQAL *** + 1 1 1 1 1 1 + END ACTIVITY + + PRINT-INFO + Print-flags *** + # - # ATMP SNOW IWAT SLD IWG IQAL PIVL PYR *** + 1 4 4 4 4 4 12 + END PRINT-INFO + + GEN-INFO + <-------Name-------> Unit-systems Printer *** + # - # t-series Engl Metr *** + in out *** + 1 DONIGIAN INDUSTRY 1 1 1 0 + END GEN-INFO + + *** Section SNOW *** + + ICE-FLAG + 0= Ice formation not simulated, 1= Simulated *** + # - #ICEFG *** + 1 1 + END ICE-FLAG + + SNOW-PARM1 + Snow input info: Part 1 *** + # - # LAT MELEV SHADE SNOWCF COVIND *** + 1 42. 450. 0.0 1.45 0.5 + END SNOW-PARM1 + + SNOW-PARM2 + Snow input info: Part 2 *** + # - # RDCSN TSNOW SNOEVP CCFACT MWATER MGMELT *** + 1 0.12 32. 0.05 0.5 0.08 0.0001 + END SNOW-PARM2 + + SNOW-INIT1 + Initial snow conditions: Part 1 *** + # - # PACKSNOW PACKICE PACKWATER RDENPF DULL PAKTMP *** + 1 1.4 0.2 0.1 0.2 375. 27.5 + END SNOW-INIT1 + + SNOW-INIT2 + Initial snow conditions: Part 2 *** + # - # COVINX XLNMLT SKYCLR *** + 1 0.50 0.0 1.0 + END SNOW-INIT2 + + *** Section IWATER *** + + IWAT-PARM1 + Flags *** + # - # CSNO RTOP VRS VNN RTLI *** + 1 1 1 + END IWAT-PARM1 + + IWAT-PARM2 + *** + # - # LSUR SLSUR NSUR RETSC *** + 1 200. .010 .010 .01 + END IWAT-PARM2 + + IWAT-PARM3 + *** + # - # PETMAX PETMIN *** + 1 40. 35. + END IWAT-PARM3 + + IWAT-STATE1 + IWATER state variables *** + # - # RETS SURS *** + 1 .01 .01 + END IWAT-STATE1 + + *** Section SOLIDS *** + + SLD-PARM2 + *** + # - # KEIM JEIM ACCSDP REMSDP *** + 1 .08 1.9 .01 .5 + END SLD-PARM2 + + SLD-STOR + Solids storage (tons/acre) *** + # - # *** + 1 0.2 + END SLD-STOR + + *** Section IWTGAS *** + + IWT-PARM2 + *** + # - # ELEV AWTF BWTF *** + 1 410. 40. 0.8 + END IWT-PARM2 + + *** Section IQUAL *** + + NQUALS + *** + # - #NQUAL *** + 1 1 + END NQUALS + + QUAL-PROPS + Identifiers and Flags *** + # - #<--qualid--> QTID QSD VPFW QSO VQO *** + 1 COD LB 1 1 + END QUAL-PROPS + + QUAL-INPUT + Storage on surface and nonseasonal parameters *** + # - # SQO POTFW ACQOP SQOLIM WSQOP *** + 1 1.20 .175 .02 2.0 1.7 + END QUAL-INPUT +END IMPLND + +RCHRES + ACTIVITY + RCHRES Active Sections (1=Active, 0=Inactive) *** + # - # HYFG ADFG CNFG HTFG SDFG GQFG OXFG NUFG PKFG PHFG *** + 1 5 1 1 1 1 1 1 1 1 1 1 + END ACTIVITY + + PRINT-INFO + RCHRES Print-flags *** + # - # HYDR ADCA CONS HEAT SED GQL OXRX NUTR PLNK PHCB PIVL PYR *** + 1 4 5 5 5 5 5 5 5 5 5 5 12 + 5 4 4 4 4 4 4 4 4 4 4 12 + END PRINT-INFO + + GEN-INFO + RCHRES<-------Name------->Nexit Unit Systems Printer *** + # - # t-series Engl Metr LKFG *** + in out *** + 1 MEIER POND 2 1 1 1 0 1 + 2 OUTLET 1 1 1 1 0 + 3 SPILLWAY 1 1 1 1 0 + 4 UPPER KITTLE CREEK 1 1 1 1 0 + 5 LOWER KITTLE CREEK 1 1 1 1 0 + END GEN-INFO + + *** Section HYDR *** + + HYDR-PARM1 + RCHRES Flags for HYDR section *** + # - # VC A1 A2 A3 ODFVFG for each ODGTFG for each *** FUNCT for each + FG FG FG FG possible exit possible exit *** possible exit + 1 2 3 4 5 1 2 3 4 5 *** + + 1 1 1 1 -1 6 + 2 5 1 1 1 4 + END HYDR-PARM1 + + HYDR-PARM2 + RCHRES *** + # - # DSN FTBN LEN DELTH STCOR KS DB50 *** + 1 00 1 0.5 1. .5 + 2 00 2 0.25 20. .5 + 3 00 3 0.25 30. .5 + 4 00 4 2.0 40. .5 + 5 00 5 3.0 40. .5 + END HYDR-PARM2 + + HYDR-INIT + RCHRES Initial conditions for HYDR section *** + # - # VOL Initial value of COLIND *** Initial value of OUTDGT + (ac-ft) for each possible exit *** for each possible exit + EX1 EX2 EX3 EX4 EX5 *** EX1 EX2 EX3 EX4 EX5 + 1 30. 4.0 5.0 + 2 5 0.0 4.0 + END HYDR-INIT + + *** Section CONS *** + + NCONS + RCHRES *** + # - #NCONS *** + 1 5 1 + END NCONS + + CONS-DATA + RCHRES Data for conservative constituent No. 3 *** + # - #<---Substance-id---> Conc ID CONV QTYID *** + 1 5 ALKALINITY 1000. MG/L 35.31 KG + END CONS-DATA + + *** Section HTRCH *** + + HEAT-PARM + RCHRES ELEV ELDAT CFSAEX KATRAD KCOND KEVAP *** + # - # *** + 1 5 450. 100. .95 + END HEAT-PARM + + HEAT-INIT + RCHRES TW AIRTMP *** + # - # *** + 1 5 60. 40. + END HEAT-INIT + + *** Section SEDTRN *** + + SANDFG + RCHRES *** + # - # SNDFG *** + 1 2 1 + 3 4 2 + 5 3 + END SANDFG + + SED-GENPARM + RCHRES BEDWID BEDWRN POR *** + # - # *** + 1 200. 4. + 2 3 1.33 3. + 4 2.0 2. + 5 2.66 2. + END SED-GENPARM + + SAND-PM + RCHRES D W RHO KSAND EXPSND *** + # - # *** + 1 5 .014 2.5 1.5 1.2 + END SAND-PM + + SILT-CLAY-PM + RCHRES D W RHO TAUCD TAUCS M *** + # - # *** + 1 .00063 .0066 2.2 .2 .4 .5 + 2 3 .00063 .0066 2.2 1.E-10 500. .5 + 4 5 .00063 .0066 2.2 .2 .4 .5 + END SILT-CLAY-PM + + SILT-CLAY-PM + RCHRES D W RHO TAUCD TAUCS M *** + # - # *** + 1 .000055 .000034 2.0 .15 .3 .75 + 2 3 .000055 .000034 2.0 1.E-10 500. .75 + 4 5 .000055 .000034 2.0 .15 .3 .75 + END SILT-CLAY-PM + + SSED-INIT + RCHRES Suspended sed concs (mg/l) *** + # - # Sand Silt Clay *** + 1 5 5. 20. 30. + END SSED-INIT + + BED-INIT + RCHRES BEDDEP Initial bed composition *** + # - # (ft) Sand Silt Clay *** + 1 2. .8 .1 .1 + 2 3 2. .8 .1 .1 + 4 5 1. .8 .1 .1 + END BED-INIT + + *** Section GQUAL *** + + GQ-GENDATA + RCHRES NGQL TPFG PHFG ROFG CDFG SDFG PYFG LAT *** + # - # *** + 1 5 1 1 1 2 1 1 1 42 + END GQ-GENDATA + + GQ-QALDATA + RCHRES<-------GQID-------> DQAL CONCID CONV QTYID *** + # - # *** + 1 5 PESTICIDE B4 10. UG 1.E6 G + END GQ-QALDATA + + GQ-QALFG + RCHRES HDRL OXID PHOT VOLT BIOD GEN SDAS *** + # - # *** + 1 5 1 1 1 1 1 1 1 + END GQ-QALFG + + GQ-HYDPM + RCHRES KA KB KN THHYD *** + # - # *** + 1 5 .001 .01 .001 1.03 + END GQ-HYDPM + + GQ-ROXPM + RCHRES KOX THOX *** + # - # *** + 1 5 .1 1.03 + END GQ-ROXPM + + GQ-PHOTPM + # - #*** K1 K2 K3 K4 K5 K6 K7 + # - #*** K8 K9 K10 K11 K12 K13 K14 + # - #*** K15 K16 K17 K18 PHI THETA + 1 5 848. 544. 330. 195. 120. 68. 41. + 1 5 23. 13. 7. 4. 1. .1 + 1 5 .3 1.1 + END GQ-PHOTPM + + GQ-CFGAS + RCHRES CFGAS *** + # - # *** + 1 5 .001 + END GQ-CFGAS + + GQ-BIOPM + RCHRES BIOCON THBIO BIO *** + # - # *** + 1 5 .01 10. + END GQ-BIOPM + + GQ-GENDECAY + RCHRES FSTDEC THFST *** + # - # *** + 1 5 .2 + END GQ-GENDECAY + + GQ-SEDDECAY + RCHRES KSUSP THSUSP KBED THBED *** + # - # *** + 1 5 .002 + END GQ-SEDDECAY + + GQ-KD + RCHRES Partition coefficients *** + # - # ADPM(1,1) ADPM(2,1) ADPM(3,1) ADPM(4,1) ADPM(5,1) ADPM(6,1) *** + 1 .0001 .001 .001 .0001 .001 .001 + 2 3 .0001 .001 .001 1.E-10 1.E-10 1.E-10 + 4 5 .0001 .001 .001 .0001 .001 .001 + END GQ-KD + + GQ-ADRATE + RCHRES Adsorption/desorption rate parameters *** + # - # ADPM(1,2) ADPM(2,2) ADPM(3,2) ADPM(4,2) ADPM(5,2) ADPM(6,2) *** + 1 150. 150. 150. .25 .25 .25 + 2 3 150. 150. 150. 1000. 1000. 1000. + 4 5 150. 150. 150. .25 .25 .25 + END GQ-ADRATE + + GQ-SEDCONC + RCHRES SQAL1 SQAL2 SQAL3 SQAL4 SQAL5 SQAL6 *** + # - # *** + 1 .001 .01 .01 .001 .01 .01 + 2 3 .001 .01 .01 0. 0. 0. + 4 5 .001 .01 .01 .001 .01 .01 + END GQ-SEDCONC + + GQ-VALUES + RCHRES TWAT PHVAL ROC CLD SDCNC PHY *** + # - # *** + 1 5 1.E-5 + END GQ-VALUES + + GQ-ALPHA + RCHRES*** + # - #*** K1 K2 K3 K4 K5 K6 K7 + # - #*** K8 K9 K10 K11 K12 K13 K14 + # - #*** K15 K16 K17 K18 + 1 5 .008 .009 .010 .011 .011 .011 .012 + 1 5 .013 .015 .016 .017 .018 .019 .020 + 1 5 .021 .022 .024 .024 + END GQ-ALPHA + + GQ-GAMMA + RCHRES*** + # - #*** K1 K2 K3 K4 K5 K6 K7 + # - #*** K8 K9 K10 K11 K12 K13 K14 + # - #*** K15 K16 K17 K18 + 1 5 .001 .001 .001 .001 .001 .001 .001 + 1 5 .001 .002 .002 .002 .002 .002 .002 + 1 5 .002 .002 .002 .002 + END GQ-GAMMA + + GQ-DELTA + RCHRES*** + # - #*** K1 K2 K3 K4 K5 K6 K7 + # - #*** K8 K9 K10 K11 K12 K13 K14 + # - #*** K15 K16 K17 K18 + 1 5 .0007 .0007 .0007 .0007 .0007 .0007 .0007 + 1 5 .0007 .0007 .0007 .0007 .0007 .0007 .0007 + 1 5 .0007 .0007 .0007 .0007 + END GQ-DELTA + + GQ-CLDFACT + RCHRES*** + # - #*** F1 F2 F3 F4 F5 F6 F7 + # - #*** F8 F9 F10 F11 F12 F13 F14 + # - #*** F15 F16 F17 F18 + 1 5 .10 .10 .10 .15 .15 .15 .15 + 1 5 .17 .17 .17 .17 .18 .19 .20 + 1 5 .21 .21 .21 .21 + END GQ-CLDFACT + + *** Section RQUAL *** + + BENTH-FLAG + RCHRES BENF *** + # - # *** + 1 1 + 4 5 1 + END BENTH-FLAG + + SCOUR-PARMS + RCHRES SCRVEL SCRMUL *** + # - # *** + 1 5 3. + END SCOUR-PARMS + + *** Section OXRX *** + + OX-FLAGS + RCHRES REAM *** + # - # *** + 2 3 1 + 4 3 + 5 2 + END OX-FLAGS + + OX-GENPARM + RCHRES KBOD20 TCBOD KODSET SUPSAT *** + # - # /hr *** + 1 5 .1 8. + END OX-GENPARM + + OX-BENPARM + RCHRES BENOD TCBEN EXPOD BRBOD(1) BRBOD(2) EXPREL *** + # - # mg/m2.hr mg/m2.hr mg/m2.hr *** + 1 5 10. 1.1 1.2 20. 25. 1.3 + END OX-BENPARM + + OX-CFOREA + RCHRES CFOREA *** + # - # *** + 1 5. + END OX-CFOREA + + OX-REAPARM + RCHRES TCGINV REAK EXPRED EXPREV *** + # - # /hr *** + 4 2.0 -1.1 1.1 + END OX-REAPARM + + OX-INIT + RCHRES DOX BOD SATDO *** + # - # mg/l mg/l mg/l *** + 1 5 8. 100. + END OX-INIT + + *** Section NUTRX *** + + NUT-FLAGS + RCHRES TAM NO2 PO4 AMV DEN ADNH ADPO PHFL *** + # - # *** + 1 5 1 1 1 1 1 0 0 + END NUT-FLAGS + + NUT-BENPARM + RCHRES BRTAM(1) BRTAM(2) BRPO4(1) BRPO4(2) ANAER *** + # - # mg/m2.hr mg/m2.hr mg/m2.hr mg/m2.hr mg/l *** + 1 5 11.0 33.0 1.1 2.2 0.0005 + END NUT-BENPARM + + NUT-NITDENIT + RCHRES KTAM20 KNO220 TCNIT KNO320 TCDEN DENOXT *** + # - # /hr /hr /hr mg/l *** + 1 5 .002 .004 1.07 .001 1.04 0.2 + END NUT-NITDENIT + + NUT-NH3VOLAT + RCHRES EXPNVG EXPNVL *** + # - # *** + 1 5 .50 0.6667 + END NUT-NH3VOLAT + + NUT-BEDCONC + RCHRES Bed concentrations of NH4 & PO4 (mg/mg) *** + # - # NH4-sand NH4-silt NH4-clay PO4-sand PO4-silt PO4-clay *** + 1 5 0.00001 0.00001 0.00001 0.00001 0.00001 0.00001 + END NUT-BEDCONC + + NUT-ADSPARM + RCHRES Partition coefficients for NH4 AND PO4 (l/mg) *** + # - # NH4-sand NH4-silt NH4-clay PO4-sand PO4-silt PO4-clay *** + 1 5 0.0001 0.0001 0.0001 10. 10. 10. + END NUT-ADSPARM + + NUT-DINIT + RCHRES NO3 TAM NO2 PO4 PHVAL *** + # - # mg/l mg/l mg/l mg/l ph units *** + 1 5 40. 10. 1. 50. 7.0 + END NUT-DINIT + + NUT-ADSINIT + RCHRES Initial suspended NH4 and PO4 concentrations (mg/mg) *** + # - # NH4-sand NH4-silt NH4-clay PO4-sand PO4-silt PO4-clay *** + 1 5 0. 0. 0. 0. 0. 0. + END NUT-ADSINIT + + *** Section PLANK *** + + PLNK-FLAGS + RCHRES PHYF ZOOF BALF SDLT AMRF DECF NSFG ZFOO *** + # - # *** + 1 5 1 1 1 1 1 1 + END PLNK-FLAGS + + PLNK-PARM1 + RCHRES RATCLP NONREF LITSED ALNPR EXTB MALGR *** + # - # /ft /hr *** + 1 5 4.5 + END PLNK-PARM1 + + PHYTO-PARM + RCHRES SEED MXSTAY OREF CLALDH PHYSET REFSET *** + # - # mg/l mg/l ug/l *** + 1 5 .1 .1 .5 .5 + END PHYTO-PARM + + ZOO-PARM1 + RCHRES MZOEAT ZFIL20 ZRES20 ZD OXZD *** + # - # mg/l.hr l/mgzoo.hr /hr /hr /hr *** + 1 5 .2 + END ZOO-PARM1 + + PLNK-INIT + RCHRES PHYTO ZOO BENAL ORN ORP ORC *** + # - # mg/l org/l mg/m2 mg/l mg/l mg/l *** + 1 5 40. 200. 5. 20. 20. 20. + END PLNK-INIT + + *** Section PHCARB *** + + PH-PARM1 + RCHRES PHCN ALKC *** + # - # *** + 1 5 50 + END PH-PARM1 + + PH-INIT + RCHRES TIC CO2 PH *** + # - # mg/l mg/l *** + 1 5 20. 5. 8.5 + END PH-INIT +END RCHRES + +FTABLES + FTABLE 1 + ROWS COLS *** + 14 6 + WINTER SUMMER SPLWAY *** + DEPTH AREA VOLUME OUTLET OUTLET DISCH *** + (FT) (ACRES) (AC-FT) DISCH DISCH (CFS) *** + (CFS) (CFS) *** + .000 .000 .0000 .0000 .0000 .0000 + 2.000 1.212 1.2120 0.0000 .0000 .0000 + 4.000 2.424 4.8480 0.0000 .0000 .0000 + 6.000 3.636 10.9080 0.0000 .0000 .0000 + 8.000 4.848 19.3920 0.0000 .0000 .0000 + 10.000 6.061 30.3050 0.0000 .0000 .0000 + 12.000 7.273 43.6380 5.0000 3.5000 .0000 + 14.000 8.485 59.3950 6.2500 4.3750 .0000 + 16.000 9.697 77.5760 7.5000 5.2500 .0000 + 18.000 10.909 98.1810 8.7500 6.1250 .0000 + 20.000 12.121 121.2100 10.0000 7.0000 .0000 + 21.000 12.727 133.6360 10.6250 7.4375 50.0000 + 22.000 13.333 146.6630 11.2500 7.8750 100.0000 + 23.000 13.939 160.3030 11.8750 8.3125 500.0000 + END FTABLE 1 + + FTABLE 2 + ROWS COLS *** + 13 4 + DEPTH AREA VOLUME DISCH FLO-THRU *** + (FT) (ACRES) (AC-FT) (CFS) (MIN) *** + .000 .000 .0000 .000 0.0 + .167 .071 .0109 1.2241 6.5 + .333 .081 .0236 3.9148 4.4 + .500 .091 .0379 7.8193 3.5 + .667 .101 .0539 12.9032 3.0 + .833 .111 .0715 19.1853 2.7 + 1.000 .121 .0909 26.7046 2.5 + 1.333 .141 .1347 45.6529 2.1 + 1.667 .162 .1852 70.1757 1.9 + 2.000 .182 .2424 100.7192 1.7 + 2.667 .586 .4983 201.9005 1.8 + 3.333 .990 1.0236 344.6344 2.2 + 4.000 1.394 1.8182 537.0775 2.5 + END FTABLE 2 + + FTABLE 3 + ROWS COLS *** + 13 4 + DEPTH AREA VOLUME DISCH FLO-THRU *** + (FT) (ACRES) (AC-FT) (CFS) (MIN) *** + .000 .000 .0000 .000 0.0 + .167 .071 .0109 1.4992 5.3 + .333 .081 .0236 4.7947 3.6 + .500 .091 .0379 9.5766 2.9 + .667 .101 .0539 15.8032 2.5 + .833 .111 .0715 23.4971 2.2 + 1.000 .121 .0909 32.7063 2.0 + 1.333 .141 .1347 55.9132 1.7 + 1.667 .162 .1852 85.9474 1.6 + 2.000 .182 .2424 123.3553 1.4 + 2.667 .586 .4983 247.2766 1.5 + 3.333 .990 1.0236 422.0892 1.8 + 4.000 1.394 1.8182 657.7828 2.0 + END FTABLE 3 + + FTABLE 4 + ROWS COLS *** + 13 4 + DEPTH AREA VOLUME DISCH FLO-THRU *** + (FT) (ACRES) (AC-FT) (CFS) (MIN) *** + .000 .000 .0000 .000 0.0 + .250 .848 .1970 .9024 158.5 + .500 .970 .4242 2.8860 106.7 + .750 1.091 .6818 5.7642 85.9 + 1.000 1.212 .9697 9.5120 74.0 + 1.250 1.333 1.2879 14.1431 66.1 + 1.500 1.455 1.6364 19.6862 60.3 + 2.000 1.697 2.4242 33.6545 52.3 + 2.500 1.939 3.3333 51.7323 46.8 + 3.000 2.182 4.3636 74.2486 42.7 + 4.000 11.879 11.3939 155.5774 53.2 + 5.000 21.576 28.1212 296.8633 68.8 + 6.000 31.273 54.5454 522.1440 75.8 + END FTABLE 4 + + FTABLE 5 + ROWS COLS *** + 13 4 + DEPTH AREA VOLUME DISCH FLO-THRU *** + (FT) (ACRES) (AC-FT) (CFS) (MIN) *** + .000 .000 .0000 .000 0.0 + .333 1.697 .5253 1.5869 240.3 + .667 1.939 1.1313 5.0752 161.8 + 1.000 2.182 1.8182 10.1370 130.2 + 1.333 2.424 2.5859 16.7279 112.2 + 1.667 2.667 3.4343 24.8719 100.2 + 2.000 2.909 4.3636 34.6200 91.5 + 2.667 3.394 6.4646 59.1848 79.3 + 3.333 3.879 8.8889 90.9763 70.9 + 4.000 4.364 11.6364 130.5731 64.7 + 5.333 36.687 39.0034 284.8886 99.4 + 6.667 69.010 109.4680 593.7734 133.8 + 8.000 101.333 223.0302 1129.6948 143.3 + END FTABLE 5 +END FTABLES + +DISPLY + DISPLY-INFO1 + # - #<----------Title----------->***TRAN PIVL DIG1 FIL1 PYR DIG2 FIL2 YRND + 1 O2 CONC, MEIER POND (mg/l) AVER 1 2 66 12 + 2 PEST SED CONC, POND (mg/kg) AVER 1 2 66 12 + 3 O2 CONC,LOWER KITTLE C(mg/l) AVER 1 2 66 12 + 4 PEST SED CONC,L KTL C(mg/kg) AVER 1 2 66 12 + 5 WATER TEMP,MEIER POND (DEGF) AVER 1 2 66 12 + END DISPLY-INFO1 +END DISPLY + +GENER + OPCODE + # - # Op- *** + code *** + 1 2 19 + END OPCODE +END GENER + +PLTGEN + PLOTINFO + # - # FILE NPT NMN LABL PYR PIVL *** + 1 94 2 24 + 2 95 3 1 6 + END PLOTINFO + + GEN-LABELS + # - #<----------------Title ----------------> *** <------Y axis------> + 1 SIMULATED FLOWS (CFS) CFS + 2 SIMULATED VALS RELATED TO TEMP&PH,RCH 4 + END GEN-LABELS + + SCALING + # - # YMIN YMAX IVLIN *** + 1 2 0. 150. 20. + END SCALING + + CURV-DATA (first curve) + <-Curve label--> Line Intg Col Tran *** + # - # type eqv code code *** + 1 TOTAL POND OUTFL 7 1 AVER + 2 AVDEP FOR RCH 4 7 1 LAST + END CURV-DATA + + CURV-DATA (second curve) + <-Curve label--> Line Intg Col Tran *** + # - # type eqv code code *** + 1 LOWER KITTLE CR 8 2 AVER + 2 TW FOR RCH 4 8 2 LAST + END CURV-DATA + + CURV-DATA (third curve) + <-Curve label--> Line Intg Col Tran *** + # - # type eqv code code *** + 2 PH FOR RCH 4 9 2 LAST + END CURV-DATA + + CURV-DATA (fourth curve) + <-Curve label--> Line Intg Col Tran *** + # - # type eqv code code *** + 2 HTEXCH FOR RCH 4 10 2 + END CURV-DATA +END PLTGEN + +EXT SOURCES +<-Volume-> SsysSgap<--Mult-->Tran <-Target vols> <-Grp> <-Member-> *** + # # tem strg<-factor->strg # # # # *** +WDM 39 PREC ENGLZERO SAME PERLND 1 EXTNL PREC +WDM 131 PREC ENGLZERO SAME IMPLND 1 EXTNL PREC +WDM 39 PREC ENGLZERO SAME RCHRES 1 3 EXTNL PREC +WDM 131 PREC ENGLZERO SAME RCHRES 4 5 EXTNL PREC +WDM 123 ATMP ENGL SAME PERLND 1 ATEMP AIRTMP +WDM 122 ATMP ENGL SAME IMPLND 1 ATEMP AIRTMP +WDM 123 ATMP ENGL SAME RCHRES 1 3 EXTNL GATMP +WDM 122 ATMP ENGL SAME RCHRES 4 5 EXTNL GATMP +WDM 41 EVAP ENGL .7 DIV PERLND 1 EXTNL PETINP +WDM 41 EVAP ENGL .7 DIV IMPLND 1 EXTNL PETINP +WDM 41 EVAP ENGL .7 DIV RCHRES 1 5 EXTNL POTEV +WDM 42 WIND ENGL DIV PERLND 1 EXTNL WINMOV +WDM 42 WIND ENGL DIV IMPLND 1 EXTNL WINMOV +WDM 42 WIND ENGL DIV RCHRES 1 5 EXTNL WIND +WDM 46 SOLR ENGL DIV PERLND 1 EXTNL SOLRAD +WDM 46 SOLR ENGL DIV IMPLND 1 EXTNL SOLRAD +WDM 46 SOLR ENGL DIV RCHRES 1 5 EXTNL SOLRAD +WDM 126 DEWP ENGL SAME PERLND 1 EXTNL DTMPG +WDM 125 DEWP ENGL SAME IMPLND 1 EXTNL DTMPG +WDM 126 DEWP ENGL SAME RCHRES 1 3 EXTNL DEWTMP +WDM 125 DEWP ENGL SAME RCHRES 4 5 EXTNL DEWTMP +WDM 140 CLND ENGL SAME RCHRES 1 EXTNL COLIND +WDM 135 CLDC ENGL SAME PERLND 1 EXTNL CLOUD +WDM 135 CLDC ENGL SAME IMPLND 1 EXTNL CLOUD +WDM 135 CLDC ENGL SAME RCHRES 1 5 EXTNL CLOUD +END EXT SOURCES + +SCHEMATIC +<-Source-> <--Area--> <-Target-> *** + # <-factor-> # # *** +PERLND 1 6000. RCHRES 1 1 +IMPLND 1 3000. RCHRES 5 2 +RCHRES 1 RCHRES 2 3 +RCHRES 1 RCHRES 3 4 +RCHRES 2 RCHRES 4 5 +RCHRES 3 RCHRES 4 5 +RCHRES 4 RCHRES 5 5 +END SCHEMATIC + +MASS-LINK + + MASS-LINK 1 + <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** + # #<-factor-> # # *** +PERLND PWATER PERO 0.0833333 RCHRES INFLOW IVOL +PERLND PWTGAS POHT RCHRES INFLOW IHEAT +PERLND PWTGAS PODOXM RCHRES INFLOW OXIF 1 +PERLND PWTGAS POCO2M RCHRES INFLOW PHIF 2 + END MASS-LINK 1 + + MASS-LINK 2 + <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** + # #<-factor-> # # *** +IMPLND IWATER SURO 0.0833333 RCHRES INFLOW IVOL +IMPLND SOLIDS SOSLD 0.10 RCHRES INFLOW ISED 1 +IMPLND SOLIDS SOSLD 0.46 RCHRES INFLOW ISED 2 +IMPLND SOLIDS SOSLD 0.44 RCHRES INFLOW ISED 3 +IMPLND IWTGAS SOHT RCHRES INFLOW IHEAT +IMPLND IWTGAS SODOXM RCHRES INFLOW OXIF 1 +IMPLND IWTGAS SOCO2M RCHRES INFLOW PHIF 2 +IMPLND IQUAL SOQUAL RCHRES INFLOW OXIF 2 + END MASS-LINK 2 + + MASS-LINK 3 + <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** + # #<-factor-> # # *** +RCHRES OFLOW 1 RCHRES INFLOW + END MASS-LINK 3 + + MASS-LINK 4 + <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** + # #<-factor-> # # *** +RCHRES OFLOW 2 RCHRES INFLOW + END MASS-LINK 4 + + MASS-LINK 5 + <-Grp> <-Member-><--Mult--> <-Grp> <-Member-> *** + # #<-factor-> # # *** +RCHRES ROFLOW RCHRES INFLOW + END MASS-LINK 5 + +END MASS-LINK + +NETWORK +<-Volume-> <-Grp> <-Member-><--Mult-->Tran <-Target vols> <-Grp> <-Member-> *** + # # #<-factor->strg # # # # *** +RCHRES 1 HTRCH TW 1. DISPLY 5 INPUT TIMSER +RCHRES 1 OXRX DOX 1. DISPLY 1 INPUT TIMSER +RCHRES 1 GQUAL RSQAL 12 1. GENER 1 INPUT ONE +RCHRES 1 SEDTRN RSED 10 1. GENER 1 INPUT TWO +GENER 1 OUTPUT TIMSER 1.1 DISPLY 2 INPUT TIMSER +RCHRES 1 HYDR ROVOL 12.1 PLTGEN 1 INPUT MEAN 1 +RCHRES 4 HYDR AVDEP 1. PLTGEN 2 INPUT POINT 1 +RCHRES 4 HTRCH TW 1. PLTGEN 2 INPUT POINT 2 +RCHRES 4 PHCARB PHST 3 1. PLTGEN 2 INPUT POINT 3 +RCHRES 4 HTRCH HTEXCH 1. PLTGEN 2 INPUT MEAN 1 +RCHRES 5 OXRX DOX 1. DISPLY 3 INPUT TIMSER +RCHRES 5 GQUAL RSQAL 12 1. GENER 2 INPUT ONE +RCHRES 5 SEDTRN RSED 10 1. GENER 2 INPUT TWO +GENER 2 OUTPUT TIMSER 1.1 DISPLY 4 INPUT TIMSER +RCHRES 5 HYDR ROVOL 12.1 PLTGEN 1 INPUT MEAN 2 +END NETWORK + +SPEC-ACTIONS +*** test special actions + RCHRES 5 RSED 4 += 2.50E+05 + RCHRES 5 RSED 5 += 6.89E+05 + RCHRES 5 RSED 6 += 4.01E+05 +END SPEC-ACTIONS + + +END RUN From a53fc20b72829249ae30435917630cdcd1893ebe Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Mon, 22 Apr 2024 16:16:53 -0400 Subject: [PATCH 34/35] point check_depends_endpoint.py to use new test10spec.uci --- .gitignore | 1 + tests/testcbp/HSP2results/check_depends_endpoint.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index c67b3a04..ddcd2a35 100644 --- a/.gitignore +++ b/.gitignore @@ -65,6 +65,7 @@ tests/GLWACSO/HSP2results/hspp007.uci tests/test_report_conversion.html # Omit big files +*.h5 tests/land_spec/hwmA51800.h5 tests/testcbp/HSP2results/PL3_5250_0001.h5 tests/testcbp/HSP2results/*.csv diff --git a/tests/testcbp/HSP2results/check_depends_endpoint.py b/tests/testcbp/HSP2results/check_depends_endpoint.py index 8f9a9af9..26e11136 100644 --- a/tests/testcbp/HSP2results/check_depends_endpoint.py +++ b/tests/testcbp/HSP2results/check_depends_endpoint.py @@ -7,7 +7,7 @@ import numpy from HSP2IO.hdf import HDF5 from HSP2IO.io import IOManager -fpath = './tests/test10/HSP2results/test10.h5' +fpath = './tests/test10/HSP2results/test10spec.h5' # try also: # fpath = './tests/testcbp/HSP2results/PL3_5250_0001.h5' # sometimes when testing you may need to close the file, so try: @@ -68,3 +68,5 @@ model_order_recursive(specl2, state['model_object_cache'], mel, mtl) model_element_paths(mel, state) mel_runnable = ModelObject.runnable_op_list(state['op_tokens'], mel) +model_element_paths(mel_runnable, state) + From 6e3d639ca6fcabad8f00a1183c65e27c7da3a445 Mon Sep 17 00:00:00 2001 From: Burgholzer Date: Wed, 24 Apr 2024 07:09:17 -0400 Subject: [PATCH 35/35] revert changes to test10.uci to omit SPECIAL ACTIONS --- tests/test10/HSP2results/test10.uci | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/test10/HSP2results/test10.uci b/tests/test10/HSP2results/test10.uci index 101dab69..8b4c44fb 100644 --- a/tests/test10/HSP2results/test10.uci +++ b/tests/test10/HSP2results/test10.uci @@ -991,12 +991,4 @@ GENER 2 OUTPUT TIMSER 1.1 DISPLY 4 INPUT TIMSER RCHRES 5 HYDR ROVOL 12.1 PLTGEN 1 INPUT MEAN 2 END NETWORK -SPEC-ACTIONS -*** test special actions - RCHRES 5 RSED 4 += 2.50E+05 - RCHRES 5 RSED 5 += 6.89E+05 - RCHRES 5 RSED 6 += 4.01E+05 -END SPEC-ACTIONS - - END RUN