Skip to content

Commit

Permalink
misc fixes
Browse files Browse the repository at this point in the history
Fixes
* Allow the HDF filename to be passed to main function instead
  of only io_manager instance.  Having main only accept io_manager
  broke all ipython notebooks.
* Removed "print(table)" from readUCI which I think was a
  debugging statement.
IPython Notebook Fixes
* Edited some of the ipython notebooks that tried to find an
  element time-series with an old name, for example tried to
  find "SURO" instead of the new name "SURO_sum".
* Edited some of the ipython notebooks so that filenames were
  case sensitive.
Deprecations
* The append function is no longer available for DataFrames and
  changed to pd.concat form.
* In pandas 3.0, to_hdf will require the "key" to be a keyword
  argument instead of a positional argument and made that change
  now to silence the deprecation message.
Format
* Removed trailing spaces from some files.
* Fixed not a multiple of 4 spaces indentation in at least one file.
* Renamed files with spaces in their names, replacing spaces with "_".
Documentation
* Additional edits to the README.rst.
* Reshaped some docstrings into Numpy format.
  • Loading branch information
timcera committed Apr 30, 2024
1 parent fd31a26 commit 14d1f6b
Show file tree
Hide file tree
Showing 25 changed files with 14,522 additions and 30,420 deletions.
4 changes: 3 additions & 1 deletion HSP2/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@
Authors: Robert Heaphy, Ph.D. and Paul Duda
License: LGPL2
'''
from importlib.metadata import version

from HSP2.main import main
from HSP2.mainDoE import main as mainDoE
from HSP2.utilities import versions, flowtype
from _version import __version__

__version__ = version("hsp2")

57 changes: 32 additions & 25 deletions HSP2/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,28 +8,35 @@
from pandas import DataFrame, date_range
from pandas.tseries.offsets import Minute
from datetime import datetime as dt
from typing import Union
import os
from HSP2IO.hdf import HDF5
from HSP2.utilities import versions, get_timeseries, expand_timeseries_names, save_timeseries, get_gener_timeseries
from HSP2.configuration import activities, noop, expand_masslinks
from HSP2.state import *

from HSP2IO.io import IOManager, SupportsReadTS, Category

def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None:
"""Runs main HSP2 program.
def main(io_manager:Union[str, IOManager], saveall:bool=False, jupyterlab:bool=True) -> None:
"""
Run main HSP2 program.
Parameters
----------
saveall: Boolean - [optional] Default is False.
io_manager
An instance of IOManager class.
saveall: bool, default=False
Saves all calculated data ignoring SAVE tables.
jupyterlab: Boolean - [optional] Default is True.
Flag for specific output behavior for jupyter lab.
Return
------------
jupyterlab: bool, default=True
Flag for specific output behavior for jupyter lab.
Returns
-------
None
"""
if isinstance(io_manager, str):
hdf5_instance = HDF5(io_manager)
io_manager = IOManager(hdf5_instance)

hdfname = io_manager._input.file_path
if not os.path.exists(hdfname):
Expand All @@ -46,12 +53,12 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None
ddext_sources = uci_obj.ddext_sources
ddgener = uci_obj.ddgener
uci = uci_obj.uci
siminfo = uci_obj.siminfo
siminfo = uci_obj.siminfo
ftables = uci_obj.ftables
specactions = uci_obj.specactions
monthdata = uci_obj.monthdata
specactions = {} # placeholder till added to uci parser

start, stop = siminfo['start'], siminfo['stop']

copy_instances = {}
Expand All @@ -64,12 +71,12 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None
state = init_state_dicts()
state_siminfo_hsp2(uci_obj, siminfo)
# Add support for dynamic functins to operate on STATE
# - Load any dynamic components if present, and store variables on objects
# - Load any dynamic components if present, and store variables on objects
state_load_dynamics_hsp2(state, io_manager, siminfo)
# - finally stash specactions in state, not domain (segment) dependent so do it once
state['specactions'] = specactions # stash the specaction dict in state
#######################################################################################

# main processing loop
msg(1, f'Simulation Start: {start}, Stop: {stop}')
tscat = {}
Expand All @@ -80,7 +87,7 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None
siminfo['steps'] = len(siminfo['tindex'])

if operation == 'COPY':
copy_instances[segment] = activities[operation](io_manager, siminfo, ddext_sources[(operation,segment)])
copy_instances[segment] = activities[operation](io_manager, siminfo, ddext_sources[(operation,segment)])
elif operation == 'GENER':
try:
ts = get_timeseries(io_manager, ddext_sources[(operation, segment)], siminfo)
Expand All @@ -102,7 +109,7 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None
flags['ADNHFG'] = uci[(operation, 'NUTRX', segment)]['FLAGS']['ADNHFG']
flags['PO4FG'] = uci[(operation, 'NUTRX', segment)]['FLAGS']['PO4FG']
flags['ADPOFG'] = uci[(operation, 'NUTRX', segment)]['FLAGS']['ADPOFG']

get_flows(io_manager, ts, flags, uci, segment, ddlinks, ddmasslinks, siminfo['steps'], msg)

for activity, function in activities[operation].items():
Expand All @@ -118,7 +125,7 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None
msg(3, f'{activity}')
# Set context for dynamic executables.
state_context_hsp2(state, operation, segment, activity)

ui = uci[(operation, activity, segment)] # ui is a dictionary
if operation == 'PERLND' and activity == 'SEDMNT':
# special exception here to make CSNOFG available
Expand Down Expand Up @@ -182,7 +189,7 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None
elif flags['PLANK']:
if 'CFSAEX' in uci[(operation, 'PLANK', segment)]['PARAMETERS']:
ui['PARAMETERS']['CFSAEX'] = uci[(operation, 'PLANK', segment)]['PARAMETERS']['CFSAEX']

if activity == 'RQUAL':
# RQUAL inputs:
ui['advectData'] = uci[(operation, 'ADCALC', segment)]['adcalcData']
Expand All @@ -202,12 +209,12 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None
ui['PARAMETERS']['NCONS'] = uci[(operation, 'CONS', segment)]['PARAMETERS']['NCONS']

# OXRX module inputs:
ui_oxrx = uci[(operation, 'OXRX', segment)]
ui_oxrx = uci[(operation, 'OXRX', segment)]

if flags['HYDR']:
ui_oxrx['PARAMETERS']['LEN'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['LEN']
ui_oxrx['PARAMETERS']['DELTH'] = uci[(operation, 'HYDR', segment)]['PARAMETERS']['DELTH']

if flags['HTRCH']:
ui_oxrx['PARAMETERS']['ELEV'] = uci[(operation, 'HTRCH', segment)]['PARAMETERS']['ELEV']

Expand All @@ -221,17 +228,17 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None
ui['PARAMETERS']['CFSAEX'] = uci[(operation, 'HTRCH', segment)]['PARAMETERS']['CFSAEX']

# NUTRX, PLANK, PHCARB module inputs:
ui_nutrx = uci[(operation, 'NUTRX', segment)]
ui_plank = uci[(operation, 'PLANK', segment)]
ui_phcarb = uci[(operation, 'PHCARB', segment)]
ui_nutrx = uci[(operation, 'NUTRX', segment)]
ui_plank = uci[(operation, 'PLANK', segment)]
ui_phcarb = uci[(operation, 'PHCARB', segment)]

############ calls activity function like snow() ##############
if operation not in ['COPY','GENER']:
if (activity == 'HYDR'):
errors, errmessages = function(io_manager, siminfo, ui, ts, ftables, state)
elif (activity != 'RQUAL'):
errors, errmessages = function(io_manager, siminfo, ui, ts)
else:
else:
errors, errmessages = function(io_manager, siminfo, ui, ui_oxrx, ui_nutrx, ui_plank, ui_phcarb, ts, monthdata)
###############################################################

Expand All @@ -256,7 +263,7 @@ def main(io_manager:IOManager, saveall:bool=False, jupyterlab:bool=True) -> None

if 'SAVE' in ui:
save_timeseries(io_manager,ts,ui['SAVE'],siminfo,saveall,operation,segment,activity,jupyterlab,outstep)

if (activity == 'RQUAL'):
if 'SAVE' in ui_oxrx: save_timeseries(io_manager,ts,ui_oxrx['SAVE'],siminfo,saveall,operation,segment,'OXRX',jupyterlab,outstep_oxrx)
if 'SAVE' in ui_nutrx and flags['NUTRX'] == 1: save_timeseries(io_manager,ts,ui_nutrx['SAVE'],siminfo,saveall,operation,segment,'NUTRX',jupyterlab,outstep_nutrx)
Expand Down
Empty file added HSP2IO/__init__.py
Empty file.
44 changes: 22 additions & 22 deletions HSP2IO/hdf.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
from typing import Any, Union

import pandas as pd
from pandas.io.pytables import read_hdf
from HSP2IO.protocols import Category
from collections import defaultdict
from typing import Union, Any

from HSP2.uci import UCI
from HSP2IO.protocols import Category

class HDF5():

class HDF5:

def __init__(self, file_path:str) -> None:
self.file_path = file_path
Expand All @@ -24,9 +24,9 @@ def __exit__(self, exc_type, exc_value, trace):

def read_uci(self) -> UCI:
"""Read UCI related tables
Parameters: None
Returns: UCITuple
"""
Expand Down Expand Up @@ -78,44 +78,44 @@ def read_uci(self) -> UCI:
uci.monthdata[f'{op}/{module}'] = self._store[path]
return uci

def read_ts(self,
def read_ts(self,
category:Category,
operation:Union[str,None]=None,
segment:Union[str,None]=None,
operation:Union[str,None]=None,
segment:Union[str,None]=None,
activity:Union[str,None]=None) -> pd.DataFrame:
try:
path = ''
if category == category.INPUTS:
path = f'TIMESERIES/{segment}'
elif category == category.RESULTS:
path = f'RESULTS/{operation}_{segment}/{activity}'
return read_hdf(self._store, path)
return pd.read_hdf(self._store, path)
except KeyError:
return pd.DataFrame()

def write_ts(self,
data_frame:pd.DataFrame,
def write_ts(self,
data_frame:pd.DataFrame,
category: Category,
operation:str,
segment:str,
activity:str,
*args:Any,
operation:str,
segment:str,
activity:str,
*args:Any,
**kwargs:Any) -> None:
"""Saves timeseries to HDF5"""
path=f'{operation}_{segment}/{activity}'
if category:
path = 'RESULTS/' + path
complevel = None
complevel = None
if 'compress' in kwargs:
if kwargs['compress']:
complevel = 9
data_frame.to_hdf(self._store, path, format='t', data_columns=True, complevel=complevel)
#data_frame.to_hdf(self._store, path)
data_frame.to_hdf(self._store, key=path, format='t', data_columns=True, complevel=complevel)
#data_frame.to_hdf(self._store, key=path)

def write_log(self, hsp2_log:pd.DataFrame) -> None:
hsp2_log.to_hdf(self._store, 'RUN_INFO/LOGFILE', data_columns=True, format='t')
hsp2_log.to_hdf(self._store, key='RUN_INFO/LOGFILE', data_columns=True, format='t')

def write_versioning(self, versioning:pd.DataFrame) -> None:
versioning.to_hdf(self._store, 'RUN_INFO/VERSIONS', data_columns=True, format='t')
versioning.to_hdf(self._store, key='RUN_INFO/VERSIONS', data_columns=True, format='t')


Loading

0 comments on commit 14d1f6b

Please sign in to comment.