diff --git a/.gitignore b/.gitignore index bff7ab9..4aa600f 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,7 @@ docs/build/ docs/site/ docs/tutorials docs/start +docs/code docs/.vscode # Files generated by invoking Julia with --code-coverage @@ -25,4 +26,7 @@ docs/.vscode *.jl.*.cov # Files generated by invoking Julia with --track-allocation -*.jl.mem \ No newline at end of file +*.jl.mem + + +log \ No newline at end of file diff --git a/docs/Manifest.toml b/docs/Manifest.toml index 41e6ca4..d7406c0 100644 --- a/docs/Manifest.toml +++ b/docs/Manifest.toml @@ -2,7 +2,7 @@ julia_version = "1.10.4" manifest_format = "2.0" -project_hash = "620786bb65c2992bc73165cb134818fb2070b24e" +project_hash = "af317a1f42bb3442bc90a60ed5937220c08fca6a" [[deps.ADTypes]] git-tree-sha1 = "72af59f5b8f09faee36b4ec48e014a79210f2f4f" @@ -1055,11 +1055,17 @@ git-tree-sha1 = "50aedf345a709ab75872f80a2779568dc0bb461b" uuid = "e33a78d0-f292-5ffc-b300-72abe9b543c8" version = "2.11.2+1" +[[deps.HydroModelTools]] +deps = ["ComponentArrays", "DataFrames", "Dates", "IterTools", "NamedTupleTools", "Optimization", "OptimizationBBO", "OptimizationOptimisers", "OrdinaryDiffEq", "ProgressMeter", "Reexport", "SciMLSensitivity", "Statistics", "TOML"] +path = "D:\\Julia\\Julia-1.10.4\\packages\\dev\\HydroModelTools" +uuid = "31f4d4b3-d71d-422d-8932-b4ab24c6e7e3" +version = "0.1.0" + [[deps.HydroModels]] -deps = ["Accessors", "CSV", "ComponentArrays", "DataFrames", "DataInterpolations", "Dates", "ForwardDiff", "Graphs", "Integrals", "IterTools", "LinearAlgebra", "LinearSolve", "Lux", "LuxCore", "ModelingToolkit", "NNlib", "NamedTupleTools", "Optimization", "OptimizationBBO", "OptimizationOptimisers", "OrdinaryDiffEq", "ProgressMeter", "Random", "Reexport", "RuntimeGeneratedFunctions", "SciMLBase", "SciMLSensitivity", "SparseArrays", "StableRNGs", "Statistics", "SymbolicUtils", "Symbolics", "TOML", "Zygote"] +deps = ["Accessors", "ComponentArrays", "DataInterpolations", "Graphs", "Integrals", "LinearAlgebra", "LinearSolve", "Lux", "LuxCore", "ModelingToolkit", "NNlib", "Random", "Reexport", "RuntimeGeneratedFunctions", "SparseArrays", "StableRNGs", "SymbolicUtils", "Symbolics", "TOML"] path = "E:\\JlCode\\HydroModels" uuid = "7e3cde01-c141-467b-bff6-5350a0af19fc" -version = "0.1.0" +version = "0.1.1" [[deps.HypergeometricFunctions]] deps = ["LinearAlgebra", "OpenLibm_jll", "SpecialFunctions"] diff --git a/docs/Project.toml b/docs/Project.toml index 86a2689..e5f2a1c 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -6,6 +6,8 @@ ComponentArrays = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66" DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" DataInterpolations = "82cc6244-b520-54b8-b5a6-8a565e85f1d0" Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" +Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6" +HydroModelTools = "31f4d4b3-d71d-422d-8932-b4ab24c6e7e3" HydroModels = "7e3cde01-c141-467b-bff6-5350a0af19fc" JLD2 = "033835bb-8acc-5ee8-8aae-3f567f8a3819" Lux = "b2108857-7c20-44ae-9111-449ecde12c47" diff --git a/docs/make.jl b/docs/make.jl index 1ecb38e..93f8d24 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -2,6 +2,7 @@ push!(LOAD_PATH, "../src/") using Documenter using HydroModels +# English Documentation makedocs( sitename = "HydroModels.jl", authors = "xin jing", @@ -19,32 +20,81 @@ makedocs( doctest = false, linkcheck = true, source = "src", - build = "build", + build = "build_en", # 配置页面结构 pages = [ "Home" => "index.md", - "Model Implementations" => [ - "construct a ExpHydro Model" => "implements/build_exphydro_model_en.md", - "construct a M50 Model" => "implements/build_m50_model_en.md", - "construct discharge route model" => "implements/build_discharge_route_en.md", - ], "Run Models" => [ - "Run a Bucket Model" => "tutorials/run_a_bucket.md", - "Run ExpHydro Model" => "tutorials/run_a_exphydro_model.md" + "Run ExpHydro Model" => "tutorials/run_exphydro_model.md", ], - "Extent Content" => [ - "Modeling Framework Comparisons" => "extent/framework_comparision_en.md", + "Basic Concepts" => "basic_concepts_en.md", + "Model Implementations" => [ + "construct the ExpHydro Model" => "implements/build_exphydro_model_en.md", + "construct the M50 Model" => "implements/build_m50_model_en.md", + "construct the discharge route model" => "implements/build_discharge_route_en.md", ], - - ], - # 其他选项 - checkdocs = :none, + "Extend Contents" => [ + "Why not using ModelingToolkit.jl directly" => "extent/why_not_MTK_en.md", + "Framework Comparision" => "extent/framework_comparision_en.md", + ] + ] ) +# # Chinese Documentation +# makedocs( +# sitename = "HydroModels.jl", +# authors = "xin jing", +# format = Documenter.HTML( +# # 启用 pretty URLs,移除 .html 后缀 +# # 设置文档的规范 URL +# canonical = "https://chooron.github.io/HydroModels.jl/dev", +# # 配置侧边栏 +# collapselevel = 2, +# sidebar_sitename = true +# ), +# # 配置模块 +# modules = [HydroModels], +# clean = true, +# doctest = false, +# linkcheck = true, +# source = "src", +# build = "build_zh", +# lang = "zh", +# # 配置页面结构 +# pages = [ +# "主页" => "index.md", +# "入门指南" => "getting_started.md", +# "基本概念" => [ +# "蓄水模型" => "concepts/bucket_model.md", +# "通量模型" => "concepts/flux_model.md", +# "汇流模型" => "concepts/route_model.md", +# "求解器模型" => "concepts/solver_model.md", +# "分布式模型" => "concepts/spatial_model.md", +# "集总式模型" => "concepts/lumped_model.md", +# ], +# "模型构建" => [ +# "构建 ExpHydro 模型" => "implements/build_exphydro_model_zh.md", +# "构建 M50 模型" => "implements/build_m50_model_zh.md", +# "构建河道汇流模型" => "implements/build_discharge_route_zh.md", +# ], +# "模型运行" => [ +# "运行蓄水模型" => "tutorials/run_a_bucket.md", +# "运行 ExpHydro 模型" => "tutorials/run_a_exphydro_model.md" +# ] +# ] +# ) + # 部署配置 deploydocs( repo = "github.com/chooron/HydroModels.jl", devbranch = "main", push_preview = true, - target = "build" # 确保这里指定了正确的构建目录 -) \ No newline at end of file + target = "build_en" # 确保这里指定了正确的构建目录 +) + +# deploydocs( +# repo = "github.com/chooron/HydroModels.jl", +# devbranch = "main", +# push_preview = true, +# target = "build_zh" # 确保这里指定了正确的构建目录 +# ) \ No newline at end of file diff --git a/docs/src/assets/concept.jpg b/docs/src/assets/concept.jpg new file mode 100644 index 0000000..7c9306c Binary files /dev/null and b/docs/src/assets/concept.jpg differ diff --git a/docs/src/assets/exphydro_predict.png b/docs/src/assets/exphydro_predict.png new file mode 100644 index 0000000..9e64b17 Binary files /dev/null and b/docs/src/assets/exphydro_predict.png differ diff --git a/docs/src/code/build_model.jl b/docs/src/code/build_model.jl new file mode 100644 index 0000000..09fdfe7 --- /dev/null +++ b/docs/src/code/build_model.jl @@ -0,0 +1,69 @@ +using CSV +using DataFrames +using ComponentArrays +using BenchmarkTools +using NamedTupleTools +using Plots +using HydroModels +using HydroModelTools +using OrdinaryDiffEq +include("../models/ExpHydro.jl") +println(exphydro_model) + +# Define model parameters +params = ComponentVector( + f = 0.01674478, # Infiltration parameter + Smax = 1709.461015, # Maximum soil water storage + Qmax = 18.46996175, # Maximum discharge + Df = 2.674548848, # Degree-day factor + Tmax = 0.175739196, # Maximum temperature threshold + Tmin = -2.092959084 # Minimum temperature threshold +) + +# Define initial states +inistates = ComponentVector( + snowpack = 0.0, # Snow accumulation + soilwater = 1303.004248 # Soil water content +) + +pas = ComponentVector(params=params,initstates=inistates) + +# Read input data +file_path = "../data/exphydro/01013500.csv" +df = DataFrame(CSV.File(file_path)) +ts = collect(1:10000) # Time series length +input_ntp = (lday=df[ts, "dayl(day)"], temp=df[ts, "tmean(C)"], prcp=df[ts, "prcp(mm/day)"]) +input = Matrix(reduce(hcat, collect(input_ntp[HydroModels.get_input_names(exphydro_model)]))') + +# Configure solver +# solver = HydroModels.ManualSolver{true}() +solver = ODESolver() + +# Run model with benchmarking +@btime result = exphydro_model(input, pas, config=(solver=solver, timeidx=ts)); + +@btime result1 = exphydro_model(input, pas, config=(solver=ODESolver(alg=Tsit5(), abstol=1e-3, reltol=1e-3), timeidx=ts)); +@btime result2 = exphydro_model(input, pas, config=(solver=DiscreteSolver(), timeidx=ts)); + +# convert to NamedTuple +model_total_output_names = vcat(HydroModels.get_state_names(exphydro_model), HydroModels.get_output_names(exphydro_model)) +result_ntp = NamedTuple{Tuple(model_total_output_names)}(eachslice(result, dims=1)) + +DataFrame(result_ntp) +# Visualize results +plot(result_ntp.flow, label="Simulated") +plot!(df[ts, "flow(mm)"], label="Observed") + +savefig("exphydro_predict.png") + +# Setup multiple nodes +node_num = 10 # Number of nodes +inputs = repeat(reshape(input, size(input)[1], 1, size(input)[2]), 1, node_num, 1) +ptypes = [Symbol(:node, i) for i in 1:node_num] + +# Configure parameters and states for multiple nodes +params_multi = ComponentVector(NamedTuple{Tuple(ptypes)}(repeat([params], node_num))) +init_states_multi = ComponentVector(NamedTuple{Tuple(ptypes)}(repeat([inistates], node_num))) +pas_multi = ComponentVector(params=params_multi, initstates=init_states_multi) +# Run multi-node simulation +results = exphydro_model(inputs, pas_multi, config=(solver=solver, timeidx=ts)) \ No newline at end of file diff --git a/docs/src/concepts_en.md b/docs/src/concepts_en.md new file mode 100644 index 0000000..c0d620a --- /dev/null +++ b/docs/src/concepts_en.md @@ -0,0 +1,67 @@ +# HydroModels.jl Concepts + +HydroModels.jl implements a modular and extensible architecture, designed to support diverse hydrological modeling paradigms. The framework's core structure comprises four main classes: Flux, Bucket, Route, Model and Wrapper, each playing a vital role in constructing comprehensive hydrological systems. The framework is implemented in Julia programming language for model development, computation, and parameter optimization. It leverages Lux.jl as the deep learning framework and integrates with SciML for scientific computing, symbolic programming, and parameter optimization capabilities. The conceptual framework of the model design philosophy is illustrated in the following diagram: + +![Framework Concept](assets/concept.jpg) +Architecture and ecosystem of the HydroModels.jl framework. (a) illustrates the supporting ecosystem and its functional capabilities, highlighting the key dependencies and their roles in the framework; (b) shows the core architectural design of HydroModels.jl, demonstrating the main components and their interactions. + +## Flux class: Water Process Transfer Representation + +The Flux class serves as the fundamental building block of HydroModels.jl, drawing inspiration from flux library of the MARRMoT while extending its capabilities. This class encapsulates the physical equations governing water movement throughout the hydrological cycle. The conceptual formulation of the Flux class can be expressed as: + + +```math +\begin{aligned} +Y(t) = f(X(t),\theta) && (1) \\ +\end{aligned} +``` + +where the functional representation $f$ maps input variables $X(t)$ and parameters $\theta$ to output variables $Y(t)$. Here, $X(t)$ represents input variables like precipitation and temperature, $\theta$ denotes the parameters that can be calibrated, and $Y(t)$ represents output variables like runoff and infiltration. Based on different applications, the Flux class includes: + +- HydroFlux: Implements fundamental hydrological processes through mathematical formulations +- StateFlux: Handles mass-balance equations for state variables like soil moisture and snowpack +- NeuralFlux: Uses neural networks to model processes or predict parameters, leveraging Lux.jl capabilities + +## Bucket Class: Storage Volume Change Simulation + +The Bucket class represents fundamental water storage components within the hydrological system. It consists of multiple HydroFlux components (including NeuralFlux) and StateFlux components, which collectively define the water balance dynamics through coupled differential equations. These components can represent various hydrological stores such as soil moisture, groundwater reservoirs, and surface water bodies. + +The Bucket class generates two essential functions: +1. An ODE solver function for state variables +2. A hydrological flux computation function for output calculations + +This dual-function architecture enables efficient computation of hydrological processes, following a two-step process of solving ODEs and computing output fluxes. + +## Route Class: Spatial Flow Propagation + +The Route class simulates lateral water fluxes across landscapes and river networks, supporting both lumped "integral" and distributed "differential" models. Its core components include: + +```math +\begin{aligned} +Q_{out}(t) &= f_{rflux}(S_{route}(t),Q_{gen}(t); ps) && (2) \\ +\frac{dS_{route}}{dt} &= Q_{in}(t) - Q_{out}(t) && (3) \\ +Q_{in}(t+1) &= f_{aggr}(Q_{out}(t)) && (4) \\ +\end{aligned} +``` + +Where: $Q_{in}$ is inflow, $Q_{out}$ is outflow, $S_{route}$ is the routing state variable, and $f_{aggr}$ is the aggregation function. + +The class supports various routing methods including: +- Standard state-update approaches +- Unit hydrograph through convolution operations +- Dynamic routing methods (Muskingum, kinematic wave) + +## Model Class: Hydrological Process Integration + +The Model class serves as the central management component, orchestrating the integration of Flux, Bucket, and Route components. It facilitates: +- Construction of distributed hydrological models +- Integration of vertical water movement with lateral connectivity +- Systematic parameter optimization and sensitivity analysis +- Efficient computation through metadata-driven approaches + +## Wrapper Class: Enhanced Component Capabilities + +The Wrapper class extends component customization capabilities while maintaining interface uniformity. Key features include: +- NamedTupleIO wrapper for customized input/output specifications +- EstimateParam wrapper for parameter prediction based on basin characteristics +- RecordComponentState wrapper for state variable storage, supporting both batch training and online forecasting \ No newline at end of file diff --git a/docs/src/extent/why_not_MTK.md b/docs/src/extent/why_not_MTK.md new file mode 100644 index 0000000..d74269f --- /dev/null +++ b/docs/src/extent/why_not_MTK.md @@ -0,0 +1,19 @@ +# HydroModels.jl与ModelingToolkit.jl + +## 为什么不直接使用[ModelingToolkit.jl](https://github.com/SciML/ModelingToolkit.jl) + +HydroModels.jl的设计理念事实上是与ModelingToolkit.jl完全一致的,两者都是使用Symbolics.jl进行符号计算的框架,ModelingToolkit.jl是一个更通用符号系统,支持多种问题的构建,同时针对特定领域[ModelingToolkitStandardLibrary.jl](https://github.com/SciML/ModelingToolkitStandardLibrary.jl)提供了基础组件的支持. +但是我之前在ModelingToolkit.jl的使用中存在一些问题: + +- 尽管ModelingToolkit.jl同样能够根据符号编程表达水文模型的常微分方程,但对水文模型中单位线计算,汇流过程计算等计算过程的支持却并不是特别理想 +- 在同一种模块中,比如蒸发计算公式和土壤计算模块,不同区域所需要构建的模块会因为公式的不同存在差异,需要分别构建计算公式支撑模型模型构建 +- 水文模型通常包括多个常微分方程,使用一个ODESystem构建会相对混乱,使用多个ODESystem会相对复杂 +- 同时对于多节点输入,空间汇流过程计算,ODESystem或无法直接支持. +- 对于神经网络模型的嵌入,尽管存在ModelingToolkitNeuralNets.jl,然而嵌入性却没有想象的那么简单 +- 基于ModelingToolkit.jl对于自动微分的支持,尤其是Zygote.jl我在使用时也存在问题. + +因此我决定自己构建一个模型库,参考了符号编程的模型构建方式,使其更能够支撑水文模型的一些建模需求. + +## 未来的工作 + +不可否认ModelingToolkit.jl是一个非常好的框架,在满足了水文模型的需求之后,我会尽量将HydroModels.jl中的模块能够成为 ModelingToolkitStandardLibrary.jl中相似的模块,并继承ModelingToolkit.jl提供的AbstractSystem类的支持,提供一致的功能支持,从而兼容更多SciML生态的计算功能. \ No newline at end of file diff --git a/docs/src/extent/why_not_MTK_en.md b/docs/src/extent/why_not_MTK_en.md new file mode 100644 index 0000000..224c89f --- /dev/null +++ b/docs/src/extent/why_not_MTK_en.md @@ -0,0 +1,20 @@ +# HydroModels.jl and ModelingToolkit.jl + +## Why Not Use [ModelingToolkit.jl](https://github.com/SciML/ModelingToolkit.jl) Directly + +The design philosophy of HydroModels.jl is actually completely aligned with ModelingToolkit.jl. Both frameworks use Symbolics.jl for symbolic computation. ModelingToolkit.jl is a more general symbolic system that supports the construction of various problems, and [ModelingToolkitStandardLibrary.jl](https://github.com/SciML/ModelingToolkitStandardLibrary.jl) provides support for basic components in specific domains. + +However, I encountered several issues when using ModelingToolkit.jl: + +- Although ModelingToolkit.jl can express the ordinary differential equations of hydrological models through symbolic programming, its support for calculations like unit hydrograph and routing processes in hydrological models is not particularly ideal +- Within the same module, such as evaporation calculation formulas and soil calculation modules, the modules needed for different regions may differ due to different formulas, requiring separate construction of calculation formulas to support model building +- Hydrological models typically include multiple ordinary differential equations. Using a single ODESystem becomes relatively chaotic, while using multiple ODESystems becomes relatively complex +- Additionally, ODESystem may not directly support multi-node input and spatial routing process calculations +- Although ModelingToolkitNeuralNets.jl exists for neural network model embedding, the integration is not as straightforward as imagined +- I also encountered issues with automatic differentiation support based on ModelingToolkit.jl, especially with Zygote.jl + +Therefore, I decided to build my own model library, referencing the symbolic programming model construction approach, to better support the modeling needs of hydrological models. + +## Future Work + +It's undeniable that ModelingToolkit.jl is an excellent framework. After meeting the requirements of hydrological models, I will try to make the modules in HydroModels.jl become similar modules in ModelingToolkitStandardLibrary.jl, and inherit the support of ModelingToolkit.jl's AbstractSystem class to provide consistent functional support, thereby maintaining compatibility with more computational capabilities in the SciML ecosystem. diff --git a/docs/src/implements/build_discharge_route_en.md b/docs/src/implements/build_discharge_route_en.md index 6274944..7bafe47 100644 --- a/docs/src/implements/build_discharge_route_en.md +++ b/docs/src/implements/build_discharge_route_en.md @@ -32,6 +32,7 @@ Q_{rf,n} &= S_{rf,n} \cdot \frac{1}{\text{LAG}_{rf} + 1} && (5) The $Q_{rf,n-1}$ term is removed because in continuous change, $S_{rf,n}$ is constantly evolving, making it unnecessary to consider $Q_{rf,n-1}$ in calculating $Q_{rf,n}$. Additionally, there are dimensional differences between $S_{rf,n}$ and $Q_{rf,n}$, and including $Q_{rf,n}$ would add an instantaneous state variable, which is unsuitable as a state variable. In HydroRoute construction, the primary focus is expressing the outflow calculation formula. Like HydroBucket, HydroRoute accepts HydroFlux to represent the $Q_{rf,n}$ calculation formula: ```julia +using HydroModels # Define parameters and variables @variables q q_routed s_river @parameters lag @@ -48,6 +49,7 @@ The method of obtaining upstream flow inputs is another crucial component of the The Vector-Based routing model is based on watershed subdivision and topological relationships, using an adjacency matrix for flow accumulation: ```julia +using Graphs # Build river network topology network = DiGraph(9) add_edge!(network, 1, 2) diff --git a/docs/src/implements/build_exphydro_model_en.md b/docs/src/implements/build_exphydro_model_en.md index cfd11e3..7eec92e 100644 --- a/docs/src/implements/build_exphydro_model_en.md +++ b/docs/src/implements/build_exphydro_model_en.md @@ -37,6 +37,8 @@ using HydroModels @variables snowpack soilwater @parameters Tmin Tmax Df Smax Qmax f +step_func(x) = (tanh(5.0 * x) + 1.0) * 0.5 + # define snowpack bucket fluxes_1 = [ HydroFlux([temp, lday] => [pet], exprs=[29.8 * lday * 24 * 0.611 * exp((17.3 * temp) / (temp + 237.3)) / (temp + 273.2)]), @@ -88,6 +90,9 @@ In this code segment, we define all variables used in the ExpHydro model, includ The definition of `HydroFlux` requires determining input/output variables and model parameters based on calculation formulas. For example, in the rain-snow partitioning formula, the input variables are `prcp` and `temp`, output variables are `snowfall` and `rainfall`, and the model parameter is `Tmin`. The formula translation to `HydroFlux` looks like this: ```julia +# define the smooth function +step_func(x) = (tanh(5.0 * x) + 1.0) * 0.5 +# define the rain-snow partitioning flux split_flux = HydroFlux([prcp, temp] => [snowfall, rainfall], [Tmin], exprs=[step_func(Tmin - temp) * prcp, step_func(temp - Tmin) * prcp]) ``` diff --git a/docs/src/implements/build_exphydro_model_zh.md b/docs/src/implements/build_exphydro_model_zh.md index eeef4f8..4f55828 100644 --- a/docs/src/implements/build_exphydro_model_zh.md +++ b/docs/src/implements/build_exphydro_model_zh.md @@ -89,6 +89,9 @@ using HydroModels `HydroFlux`的定义需要根据计算公式确定模型的输入输出变量和模型参数,例如在模型的雨雪划分计算公式,该公式的输入变量为`prcp`和`temp`,输出变量为`snowfall`和`rainfall`,模型参数为`Tmin`, 公式转译为`HydroFlux`的结果如下所示: ```julia +# 定义平滑函数 +step_func(x) = (tanh(5.0 * x) + 1.0) * 0.5 +# 定义雨雪划分函数 split_flux = HydroFlux([prcp, temp] => [snowfall, rainfall], [Tmin], exprs=[step_func(Tmin - temp) * prcp, step_func(temp - Tmin) * prcp]) ``` diff --git a/docs/src/implements/build_m50_model_en.md b/docs/src/implements/build_m50_model_en.md index 283ee4f..3b47414 100644 --- a/docs/src/implements/build_m50_model_en.md +++ b/docs/src/implements/build_m50_model_en.md @@ -43,9 +43,34 @@ While this code follows the standard style of the DifferentialEquations.jl libra ## M50 Implementation in HydroModels.jl +First, we need to define the parameters and variables designed in the M50 model: + +```julia +using HydroModels + +#! parameters in the Exp-Hydro model +@parameters Tmin Tmax Df Smax f Qmax +#! parameters in normalize flux +@parameters snowpack_std snowpack_mean +@parameters soilwater_std soilwater_mean +@parameters prcp_std prcp_mean +@parameters temp_std temp_mean + +#! hydrological flux in the Exp-Hydro model +@variables prcp temp lday pet rainfall snowfall +@variables snowpack soilwater lday pet +@variables melt log_evap_div_lday log_flow flow +@variables norm_snw norm_slw norm_temp norm_prcp + +step_func(x) = (tanh(5.0 * x) + 1.0) * 0.5 +``` + Compared to conventional conceptual hydrological models, the M50 model uses neural networks to replace hydrological flux calculation formulas (`ET` and `Q`). These neural networks differ significantly from standard calculation formulas, so HydroModels.jl uses `NeuralFlux` to represent neural networks. The NeuralFlux for ET and Q predictions is shown below: ```julia +using Lux +using StableRNGs + # define the ET NN and Q NN ep_nn = Lux.Chain( Lux.Dense(3 => 16, tanh), @@ -94,7 +119,7 @@ soil_funcs = [ q_nn_flux ] state_expr = rainfall + melt - step_func(soilwater) * lday * exp(log_evap_div_lday) - step_func(soilwater) * exp(log_flow) -soil_dfuncs = [StateFlux([soilwater, rainfall, melt, lday, log_evap_div_lday, log_flow], soilwater, Num[], expr=state_expr)] +soil_dfuncs = [StateFlux([soilwater, rainfall, melt, lday, log_evap_div_lday, log_flow], soilwater, expr=state_expr)] soil_ele = HydroBucket(name=:m50_soil, funcs=soil_funcs, dfuncs=soil_dfuncs) convert_flux = HydroFlux([log_flow] => [flow], exprs=[exp(log_flow)]) # define the Exp-Hydro model diff --git a/docs/src/implements/build_m50_model_zh.md b/docs/src/implements/build_m50_model_zh.md index 4832d1f..2be8c90 100644 --- a/docs/src/implements/build_m50_model_zh.md +++ b/docs/src/implements/build_m50_model_zh.md @@ -43,6 +43,29 @@ Qout_ = exp.(ann_Q(permutedims([S1_ P_interp]),p[:p2])[1,:]) ## HydroModels.jl中M50的实现方法 +首先需要定义M50模型中设计的参数和变量: + +```julia +using HydroModels + +#! parameters in the Exp-Hydro model +@parameters Tmin Tmax Df Smax f Qmax +#! parameters in normalize flux +@parameters snowpack_std snowpack_mean +@parameters soilwater_std soilwater_mean +@parameters prcp_std prcp_mean +@parameters temp_std temp_mean + +#! hydrological flux in the Exp-Hydro model +@variables prcp temp lday pet rainfall snowfall +@variables snowpack soilwater lday pet +@variables melt log_evap_div_lday log_flow flow +@variables norm_snw norm_slw norm_temp norm_prcp + +step_func(x) = (tanh(5.0 * x) + 1.0) * 0.5 +``` + + 相比较普通的概念式水文模型,M50模型使用神经网络模型替换了水文通量的计算公式(`ET`和`Q`),这个神经网络相比普通的计算公式有着明显的差异,因此在HydroModels.jl中采用了`NeuralFlux`来表示神经网络, 用于ET和Q预测的NeuralFlux如下表示: ```julia @@ -95,7 +118,7 @@ soil_funcs = [ q_nn_flux ] state_expr = rainfall + melt - step_func(soilwater) * lday * exp(log_evap_div_lday) - step_func(soilwater) * exp(log_flow) -soil_dfuncs = [StateFlux([soilwater, rainfall, melt, lday, log_evap_div_lday, log_flow], soilwater, Num[], expr=state_expr)] +soil_dfuncs = [StateFlux([soilwater, rainfall, melt, lday, log_evap_div_lday, log_flow], soilwater, expr=state_expr)] soil_ele = HydroBucket(name=:m50_soil, funcs=soil_funcs, dfuncs=soil_dfuncs) convert_flux = HydroFlux([log_flow] => [flow], exprs=[exp(log_flow)]) # define the Exp-Hydro model diff --git a/docs/src/models/HBV.jl b/docs/src/models/HBV.jl new file mode 100644 index 0000000..0811e28 --- /dev/null +++ b/docs/src/models/HBV.jl @@ -0,0 +1,54 @@ +using ModelingToolkit + +step_func(x) = (tanh(5.0 * x) + 1.0) * 0.5 +@variables soilwater snowpack meltwater suz slz +@variables prcp pet temp + +@variables rainfall snowfall +@variables melt refreeze infil +@variables wetness excess recharge evap +@variables q0 q1 q2 q perc + +@parameters TT CFMAX CFR CWH LP FC BETA PPERC UZL k0 k1 k2 kp + +#* snowfall and rainfall split flux +split_flux = HydroFlux([prcp, temp] => [snowfall, rainfall], [TT], exprs=[step_func(TT - temp) * prcp, step_func(temp - TT) * prcp]) +#* snowpack bucket +snow_funcs = [ + HydroFlux([temp, snowpack] => [melt], [TT, CFMAX], exprs=[min(snowpack, max(0.0, temp - TT) * CFMAX)]), + HydroFlux([temp, meltwater] => [refreeze], [TT, CFR, CFMAX], + exprs=[min(max((TT - temp), 0.0) * CFR * CFMAX, meltwater)]), + HydroFlux([meltwater] => [infil], [CWH], exprs=[max(0.0, meltwater - snowpack * CWH)]) +] +snow_dfuncs = [StateFlux([snowfall, refreeze] => [melt], snowpack), StateFlux([melt] => [refreeze, infil], meltwater)] +snow_bucket = HydroBucket(name=:hbv_snow, funcs=snow_funcs, dfuncs=snow_dfuncs) + +#* soilwater bucket +soil_funcs = [ + HydroFlux([infil, rainfall] => [recharge], [FC, BETA], exprs=[(rainfall + infil) * clamp((max(soilwater / FC,0.0))^BETA, 0, 1)]), + HydroFlux([soilwater] => [excess], [FC], exprs=[max(soilwater - FC, 0.0)]), + HydroFlux([soilwater, pet] => [evap], [LP, FC], exprs=[clamp(soilwater / (LP * FC), 0, 1) * pet]), +] +soil_dfuncs = [StateFlux([rainfall, infil] => [recharge, excess, evap], soilwater)] +soil_bucket = HydroBucket(name=:hbv_soil, funcs=soil_funcs, dfuncs=soil_dfuncs) + +zone_funcs = [ + HydroFlux([suz] => [perc], [PPERC], exprs=[suz * PPERC]), + HydroFlux([suz] => [q0], [UZL, k0], exprs=[max(0.0, suz - UZL) * k0]), + HydroFlux([suz] => [q1], [k1], exprs=[suz * k1]), + HydroFlux([slz] => [q2], [k2], exprs=[slz * k2]), + HydroFlux([q0, q1, q2] => [q], exprs=[q0 + q1 + q2]), +] + +zone_dfuncs = [ + StateFlux([recharge, excess] => [perc, q0, q1], suz), + StateFlux([perc] => [q2], slz), +] + +zone_bucket = HydroBucket( + name=:hbv_zone, + funcs=zone_funcs, + dfuncs=zone_dfuncs, +) + +hbv_model = HydroModel(name=:dpl_hbv, components=[split_flux, snow_bucket, soil_bucket, zone_bucket]) \ No newline at end of file diff --git a/docs/src/models/dplHBV.jl b/docs/src/models/dplHBV.jl new file mode 100644 index 0000000..b3e0bfd --- /dev/null +++ b/docs/src/models/dplHBV.jl @@ -0,0 +1,62 @@ +using Lux + +step_func(x) = (tanh(5.0 * x) + 1.0) * 0.5 + +function LSTMCompact(in_dims, hidden_dims, out_dims) + lstm_cell = LSTMCell(in_dims => hidden_dims) + classifier = Dense(hidden_dims => out_dims, sigmoid) + return @compact(; lstm_cell, classifier) do x::AbstractArray{T,2} where {T} + x = reshape(x, size(x)..., 1) + x_init, x_rest = Iterators.peel(LuxOps.eachslice(x, Val(2))) + y, carry = lstm_cell(x_init) + output = [vec(classifier(y))] + for x in x_rest + y, carry = lstm_cell((x, carry)) + output = vcat(output, [vec(classifier(y))]) + end + @return reduce(hcat, output) + end +end + +@variables soilwater snowpack meltwater suz slz +@variables prcp pet temp +@variables rainfall snowfall melt refreeze infil excess recharge evap q0 q1 q2 q perc +@parameters TT CFMAX CFR CWH LP FC PPERC UZL k0 k1 k2 kp +@variables BETA GAMMA +#* parameters estimate by NN +params_nn = LSTMCompact(3, 10, 2) +nn_wrapper = HydroModels.NeuralWrapper([prcp, temp, pet] => [BETA, GAMMA], params_nn, name=:pnn) + +#* snowfall and rainfall split flux +split_flux = HydroFlux([prcp, temp] => [snowfall, rainfall], [TT], + exprs=[step_func(TT - temp) * prcp, step_func(temp - TT) * prcp]) +#* snowpack bucket +snow_funcs = [ + HydroFlux([temp, snowpack] => [melt], [TT, CFMAX], exprs=[min(snowpack, max(0.0, temp - TT) * CFMAX)]), + HydroFlux([temp, meltwater] => [refreeze], [TT, CFR, CFMAX], exprs=[min(max((TT - temp), 0.0) * CFR * CFMAX, meltwater)]), + HydroFlux([meltwater] => [infil], [CWH], exprs=[max(0.0, meltwater - snowpack * CWH)]) +] +snow_dfuncs = [StateFlux([snowfall, refreeze] => [melt], snowpack), StateFlux([melt] => [refreeze, infil], meltwater)] +snow_bucket = HydroBucket(name=:hbv_snow, funcs=snow_funcs, dfuncs=snow_dfuncs) + +#* soilwater bucket +soil_funcs = [ + HydroFlux([infil, rainfall, BETA] => [recharge], [FC], exprs=[(rainfall + infil) * clamp(max(0.0, soilwater / FC)^(BETA * 5 + 1), 0, 1)]), + HydroFlux([soilwater] => [excess], [FC], exprs=[max(soilwater - FC, 0.0)]), + HydroFlux([soilwater, pet, GAMMA] => [evap], [LP, FC], exprs=[clamp(max(0.0, soilwater / (LP * FC))^(GAMMA + 1), 0, 1) * pet]), +] +soil_dfuncs = [StateFlux([rainfall, infil] => [recharge, excess, evap], soilwater)] +soil_bucket = HydroBucket(name=:hbv_soil, funcs=soil_funcs, dfuncs=soil_dfuncs) + +#* up and low zone bucket +zone_funcs = [ + HydroFlux([suz] => [perc], [PPERC], exprs=[suz * PPERC]), + HydroFlux([suz] => [q0], [UZL, k0], exprs=[max(0.0, suz - UZL) * k0]), + HydroFlux([suz] => [q1], [k1], exprs=[suz * k1]), + HydroFlux([slz] => [q2], [k2], exprs=[slz * k2]), + HydroFlux([q0, q1, q2] => [q], exprs=[q0 + q1 + q2]), +] +zone_dfuncs = [StateFlux([recharge, excess] => [perc, q0, q1], suz), StateFlux([perc] => [q2], slz),] +zone_bucket = HydroBucket(name=:hbv_zone, funcs=zone_funcs, dfuncs=zone_dfuncs) +dpl_hbv_model = HydroModel(name=:dpl_hbv, components=[nn_wrapper, split_flux, snow_bucket, soil_bucket, zone_bucket]) + diff --git a/docs/src/models/exphydro.jl b/docs/src/models/exphydro.jl new file mode 100644 index 0000000..128613b --- /dev/null +++ b/docs/src/models/exphydro.jl @@ -0,0 +1,29 @@ +step_func(x) = (tanh(5.0 * x) + 1.0) * 0.5 +# define variables and parameters + +@variables temp lday pet prcp snowfall rainfall snowpack melt +@parameters Tmin Tmax Df Smax Qmax f + +@variables soilwater pet evap baseflow surfaceflow flow rainfall + +# define model components +fluxes_1 = [ + HydroFlux([temp, lday] => [pet], exprs=[29.8 * lday * 24 * 0.611 * exp((17.3 * temp) / (temp + 237.3)) / (temp + 273.2)]), + HydroFlux([prcp, temp] => [snowfall, rainfall], [Tmin], exprs=[step_func(Tmin - temp) * prcp, step_func(temp - Tmin) * prcp]), + HydroFlux([snowpack, temp] => [melt], [Tmax, Df], exprs=[step_func(temp - Tmax) * step_func(snowpack) * min(snowpack, Df * (temp - Tmax))]), +] +dfluxes_1 = [StateFlux([snowfall] => [melt], snowpack),] +bucket_1 = HydroBucket(name=:surface, funcs=fluxes_1, dfuncs=dfluxes_1) + +fluxes_2 = [ + HydroFlux([soilwater, pet] => [evap], [Smax], exprs=[step_func(soilwater) * pet * min(1.0, soilwater / Smax)]), + HydroFlux([soilwater] => [baseflow], [Smax, Qmax, f], exprs=[step_func(soilwater) * Qmax * exp(-f * (max(0.0, Smax - soilwater)))]), + HydroFlux([soilwater] => [surfaceflow], [Smax], exprs=[max(0.0, soilwater - Smax)]), + HydroFlux([baseflow, surfaceflow] => [flow], exprs=[baseflow + surfaceflow]), +] +dfluxes_2 = [StateFlux([rainfall, melt] => [evap, flow], soilwater)] +bucket_2 = HydroBucket(name=:soil, funcs=fluxes_2, dfuncs=dfluxes_2) + +exphydro_model = HydroModel(name=:exphydro, components=[bucket_1, bucket_2]) + +export bucket_1, exphydro_model diff --git a/docs/src/models/gr4j.jl b/docs/src/models/gr4j.jl new file mode 100644 index 0000000..182bab2 --- /dev/null +++ b/docs/src/models/gr4j.jl @@ -0,0 +1,40 @@ +using HydroModels +using ModelingToolkit: @parameters, @variables + +#* parameters in the GR4J model +@parameters x1 x2 x3 x4 lag area_coef +#* hydrological flux in the production store +@variables prcp ep soilwater pn en ps es perc pr slowflow fastflow +#* hydrological flux in the unit hydrograph +@variables slowflow_routed fastflow_routed +#* hydrological flux in the routing store +@variables routingstore exch routedflow flow q q_routed s_river + +#* define the production store +prod_funcs = [ + HydroFlux([prcp, ep] => [pn, en], exprs=[prcp - min(prcp, ep), ep - min(prcp, ep)]), + HydroFlux([pn, soilwater] => [ps], [x1], exprs=[max(0.0, pn * (1 - (soilwater / x1)^2))]), + HydroFlux([en, soilwater] => [es], [x1], exprs=[en * (2 * soilwater / x1 - (soilwater / x1)^2)]), + HydroFlux([soilwater] => [perc], [x1], exprs=[((x1)^(-4)) / 4 * ((4 / 9)^(4)) * (soilwater^5)]), + HydroFlux([pn, ps, perc] => [pr], exprs=[pn - ps + perc]), + HydroFlux([pr] => [slowflow, fastflow], exprs=[0.9 * pr, 0.1 * pr]), +] +prod_dfuncs = [StateFlux([ps] => [es, perc], soilwater)] +prod_ele = HydroBucket(name=:gr4j_prod, funcs=prod_funcs, dfuncs=prod_dfuncs) + +#* define the routing store +rst_funcs = [ + HydroFlux([routingstore] => [exch], [x2, x3], exprs=[x2 * abs(routingstore / x3)^3.5]), + HydroFlux([routingstore, slowflow, exch] => [routedflow], [x3], exprs=[x3^(-4) / 4 * (routingstore + slowflow + exch)^5]), + HydroFlux([routedflow, fastflow_routed, exch] => [flow], exprs=[routedflow + max(fastflow_routed + exch, 0.0)]), +] +rst_dfuncs = [StateFlux([exch, slowflow] => [routedflow], routingstore)] +rst_ele = HydroBucket(name=:gr4j_rst, funcs=rst_funcs, dfuncs=rst_dfuncs) + + +# convert_flux = HydroFlux([flow] => [q], [area_coef], exprs=[flow * area_coef]) + +uh_flux_1 = HydroModels.UnitHydroFlux(slowflow, slowflow_routed, x4, uhfunc=HydroModels.UHFunction(:UH_1_HALF), solvetype=:SPARSE) +uh_flux_2 = HydroModels.UnitHydroFlux(fastflow, fastflow_routed, x4, uhfunc=HydroModels.UHFunction(:UH_2_FULL), solvetype=:SPARSE) + +gr4j_model = HydroModel(name=:gr4j, components=[prod_ele, uh_flux_1, uh_flux_2, rst_ele]) \ No newline at end of file diff --git a/docs/src/models/m100.jl b/docs/src/models/m100.jl new file mode 100644 index 0000000..e6074ac --- /dev/null +++ b/docs/src/models/m100.jl @@ -0,0 +1,58 @@ +using ModelingToolkit +using Lux +using StableRNGs + +step_func(x) = (tanh(5.0 * x) + 1.0) * 0.5 + +#! parameters in the Exp-Hydro model +@parameters Tmin Tmax Df Smax f Qmax +#! parameters in normalize flux +@parameters snowpack_std snowpack_mean +@parameters soilwater_std soilwater_mean +@parameters prcp_std prcp_mean +@parameters temp_std temp_mean + +#! hydrological flux in the Exp-Hydro model +@variables prcp temp lday pet rainfall snowfall +@variables snowpack soilwater lday pet +@variables melt log_evap_div_lday log_flow asinh_melt asinh_ps asinh_pr +@variables norm_snw norm_slw norm_temp norm_prcp + +#! define the m100 NN +m100_nn = Lux.Chain( + Lux.Dense(4, 32, tanh), + Lux.Dense(32, 32, leakyrelu), + Lux.Dense(32, 32, leakyrelu), + Lux.Dense(32, 32, leakyrelu), + Lux.Dense(32, 32, leakyrelu), + Lux.Dense(32, 5), + name=:m100nn +) +m100_nn_params = Vector(ComponentVector(first(Lux.setup(StableRNGs.LehmerRNG(1234), m100_nn)))) + +#! get init parameters for each NN + +#! define the soil water reservoir +m100_funcs = [ + #* normalize + HydroFlux([snowpack] => [norm_snw], [snowpack_mean, snowpack_std], exprs=[(snowpack - snowpack_mean) / snowpack_std]), + HydroFlux([soilwater] => [norm_slw], [soilwater_mean, soilwater_std], exprs=[(soilwater - soilwater_mean) / soilwater_std]), + HydroFlux([prcp] => [norm_prcp], [prcp_mean, prcp_std], exprs=[(prcp - prcp_mean) / prcp_std]), + HydroFlux([temp] => [norm_temp], [temp_mean, temp_std], exprs=[(temp - temp_mean) / temp_std]), + NeuralFlux([norm_snw, norm_slw, norm_prcp, norm_temp] => [log_evap_div_lday, log_flow, asinh_melt, asinh_ps, asinh_pr], m100_nn), + HydroFlux([asinh_melt, snowpack] => [melt], exprs=[relu(sinh(asinh_melt) * step_func(snowpack))]), +] + +m100_nn_flux = soil_funcs[end-1] +state_expr1 = relu(sinh(asinh_ps)) * step_func(-temp) - melt +state_expr2 = relu(sinh(asinh_pr)) + melt - step_func(soilwater) * lday * exp(log_evap_div_lday) - step_func(soilwater) * exp(log_flow) +m100_dfuncs = [ + StateFlux([asinh_ps, temp, melt], snowpack, Num[], expr=state_expr1), + StateFlux([asinh_pr, melt, soilwater, lday, log_evap_div_lday, log_flow], soilwater, Num[], expr=state_expr2), +] +m100_bucket = HydroBucket(name=:m100_bucket, funcs=m100_funcs, dfuncs=m100_dfuncs) +m100_bucket.ode_func +#! define the Exp-Hydro model +m100_model = HydroModel(name=:m100, components=[m100_bucket]); + +export m100_model \ No newline at end of file diff --git a/docs/src/models/m50.jl b/docs/src/models/m50.jl new file mode 100644 index 0000000..07812bd --- /dev/null +++ b/docs/src/models/m50.jl @@ -0,0 +1,70 @@ +HydroFlux = HydroModels.HydroFlux +StateFlux = HydroModels.StateFlux +NeuralFlux = HydroModels.NeuralFlux +HydroBucket = HydroModels.HydroBucket +HydroModel = HydroModels.HydroModel + +step_func(x) = (tanh(5.0 * x) + 1.0) * 0.5 +#! parameters in the Exp-Hydro model +@parameters Tmin Tmax Df Smax f Qmax +#! parameters in normalize flux +@parameters snowpack_std snowpack_mean +@parameters soilwater_std soilwater_mean +@parameters prcp_std prcp_mean +@parameters temp_std temp_mean + +#! hydrological flux in the Exp-Hydro model +@variables prcp temp lday pet rainfall snowfall +@variables snowpack soilwater lday pet +@variables melt log_evap_div_lday log_flow flow +@variables norm_snw norm_slw norm_temp norm_prcp + +#! define the snow pack reservoir +snow_funcs = [ + HydroFlux([temp, lday] => [pet], exprs=[29.8 * lday * 24 * 0.611 * exp((17.3 * temp) / (temp + 237.3)) / (temp + 273.2)]), + HydroFlux([prcp, temp] => [snowfall, rainfall], [Tmin], exprs=[step_func(Tmin - temp) * prcp, step_func(temp - Tmin) * prcp]), + HydroFlux([snowpack, temp] => [melt], [Tmax, Df], exprs=[step_func(temp - Tmax) * min(snowpack, Df * (temp - Tmax))]), +] +snow_dfuncs = [StateFlux([snowfall] => [melt], snowpack)] +snow_ele = HydroBucket(name=:exphydro_snow, funcs=snow_funcs, dfuncs=snow_dfuncs) + +#! define the ET NN and Q NN +ep_nn = Lux.Chain( + Lux.Dense(3 => 16, tanh), + Lux.Dense(16 => 16, leakyrelu), + Lux.Dense(16 => 1, leakyrelu), + name=:epnn +) +ep_nn_params = Vector(ComponentVector(first(Lux.setup(StableRNGs.LehmerRNG(1234), ep_nn)))) +q_nn = Lux.Chain( + Lux.Dense(2 => 16, tanh), + Lux.Dense(16 => 16, leakyrelu), + Lux.Dense(16 => 1, leakyrelu), + name=:qnn +) +q_nn_params = Vector(ComponentVector(first(Lux.setup(StableRNGs.LehmerRNG(1234), q_nn)))) + +#! get init parameters for each NN +ep_nn_flux = NeuralFlux([norm_snw, norm_slw, norm_temp] => [log_evap_div_lday], ep_nn) +q_nn_flux = NeuralFlux([norm_slw, norm_prcp] => [log_flow], q_nn) + +#! define the soil water reservoir +soil_funcs = [ + #* normalize + HydroFlux([snowpack, soilwater, prcp, temp] => [norm_snw, norm_slw, norm_prcp, norm_temp], + [snowpack_mean, soilwater_mean, prcp_mean, temp_mean, snowpack_std, soilwater_std, prcp_std, temp_std], + exprs=[(var - mean) / std for (var, mean, std) in zip([snowpack, soilwater, prcp, temp], + [snowpack_mean, soilwater_mean, prcp_mean, temp_mean], + [snowpack_std, soilwater_std, prcp_std, temp_std] + )]), + ep_nn_flux, + q_nn_flux +] +state_expr = rainfall + melt - step_func(soilwater) * lday * exp(log_evap_div_lday) - step_func(soilwater) * exp(log_flow) +soil_dfuncs = [StateFlux([soilwater, rainfall, melt, lday, log_evap_div_lday, log_flow], soilwater, Num[], expr=state_expr)] +soil_ele = HydroBucket(name=:m50_soil, funcs=soil_funcs, dfuncs=soil_dfuncs) +convert_flux = HydroFlux([log_flow] => [flow], exprs=[exp(log_flow)]) +#! define the Exp-Hydro model +m50_model = HydroModel(name=:m50, components=[snow_ele, soil_ele, convert_flux]); + +export m50_model, eq_nn_flux, q_nn_flux \ No newline at end of file diff --git a/docs/src/tutorials/run_a_exphydro_model.md b/docs/src/tutorials/run_a_exphydro_model.md deleted file mode 100644 index 38e299a..0000000 --- a/docs/src/tutorials/run_a_exphydro_model.md +++ /dev/null @@ -1,124 +0,0 @@ -# ExpHydro Model Tutorial - -## Overview - -This tutorial demonstrates how to use the ExpHydro model for hydrological calculations, including: -- Lumped calculation (Single HRU) -- Distributed calculation (Multiple HRUs) - -## Dependencies - -First, import the required Julia packages: - -```julia -using CSV # Data reading -using DataFrames # Data processing -using ComponentArrays # Parameter organization -using BenchmarkTools # Performance testing -using NamedTupleTools # Named tuple utilities -using Plots # Result visualization -``` - -## Model Setup - -### Parameter Configuration - -The ExpHydro model contains 6 key parameters defined using ComponentVector: - -| Parameter | Value | Description | -| --------- | ------------ | ----------------------------- | -| `f` | 0.01674478 | Infiltration parameter | -| `Smax` | 1709.461015 | Maximum soil water storage | -| `Qmax` | 18.46996175 | Maximum discharge | -| `Df` | 2.674548848 | Degree-day factor | -| `Tmax` | 0.175739196 | Maximum temperature threshold | -| `Tmin` | -2.092959084| Minimum temperature threshold | - -```julia -# Define model parameters -params = ComponentVector( - f = 0.01674478, # Infiltration parameter - Smax = 1709.461015, # Maximum soil water storage - Qmax = 18.46996175, # Maximum discharge - Df = 2.674548848, # Degree-day factor - Tmax = 0.175739196, # Maximum temperature threshold - Tmin = -2.092959084 # Minimum temperature threshold -) -``` - -### Initial States - -Set initial states for the two calculation modules: - -```julia -# Define initial states -inistates = ComponentVector( - snowpack = 0.0, # Snow accumulation - soilwater = 1303.004248 # Soil water content -) -``` - -## Data Preparation - -The model uses hydrometeorological data from "data/exphydro/01013500.csv": - -```julia -# Read input data -file_path = "data/exphydro/01013500.csv" -data = CSV.File(file_path) -df = DataFrame(data) -ts = collect(1:10000) # Time series length -``` - -Input variables include: - -- Day length (dayl) -- Mean temperature (tmean) -- Precipitation (prcp) - -## Model Testing - -### Single Node Testing - -```julia -# Configure solver -solver = HydroModels.ManualSolver() - -# Run model with benchmarking -result = model(input, params, config=(solver=solver, timeidx=ts), convert_to_ntp=true) - -# Visualize results -plot(result.flow, label="Simulated") -plot!(df[ts, "flow(mm)"], label="Observed") -``` - -### Single Node Benchmarking - -```julia -# Performance testing using BenchmarkTools -@btime model(input, params, config=(solver=solver, timeidx=ts), convert_to_ntp=true); -``` - -### Multi-Node Testing - -Multi-node setup for distributed computing: - -```julia -# Setup multiple nodes -node_num = 10 # Number of nodes -inputs = repeat([input], node_num) -ptypes = [Symbol(:node, i) for i in 1:node_num] - -# Configure parameters and states for multiple nodes -params_multi = ComponentVector(NamedTuple{Tuple(ptypes)}(repeat([params], node_num))) -init_states_multi = ComponentVector(NamedTuple{Tuple(ptypes)}(repeat([inistates], node_num))) - -# Run multi-node simulation -results = model(inputs, params_multi, config=(solver=solver, timeidx=ts), convert_to_ntp=true) -``` - -### Multi-Node Benchmarking - -```julia -# Multi-node performance testing -@btime model(inputs, params_multi, config=(solver=solver, timeidx=ts), convert_to_ntp=true); \ No newline at end of file diff --git a/docs/src/tutorials/run_any_component_directly.md b/docs/src/tutorials/run_any_component_directly.md new file mode 100644 index 0000000..b433a2a --- /dev/null +++ b/docs/src/tutorials/run_any_component_directly.md @@ -0,0 +1,3 @@ +# Run Any Component Directly + +在HydroModels.jl中,任何组件(Flux,Bucket,Route,Model)都是可以直接通过数据和参数来进行计算的。这些组件的结构和使用方式如下: \ No newline at end of file diff --git a/docs/src/tutorials/run_exphydro_model.md b/docs/src/tutorials/run_exphydro_model.md new file mode 100644 index 0000000..9fec271 --- /dev/null +++ b/docs/src/tutorials/run_exphydro_model.md @@ -0,0 +1,188 @@ +# ExpHydro Model Tutorial + +## Overview + +This tutorial demonstrates how to use the ExpHydro model for hydrological calculations, including: + +- Lumped calculation (Single HRU) +- Distributed calculation (Multiple HRUs) + +## Dependencies + +First, import the required Julia packages: + +```julia +using CSV +using DataFrames +using ComponentArrays +using BenchmarkTools +using NamedTupleTools +using Plots +``` + +## Model Setup + +### Parameter Configuration + +The ExpHydro model contains 6 key parameters defined using ComponentVector: + +```julia +# Define model parameters +params = ComponentVector( + f = 0.01674478, # Infiltration parameter + Smax = 1709.461015, # Maximum soil water storage + Qmax = 18.46996175, # Maximum discharge + Df = 2.674548848, # Degree-day factor + Tmax = 0.175739196, # Maximum temperature threshold + Tmin = -2.092959084 # Minimum temperature threshold +) +``` + +### Initial States + +Set initial states for the two calculation modules: + +```julia +# Define initial states +inistates = ComponentVector( + snowpack = 0.0, # Snow accumulation + soilwater = 1303.004248 # Soil water content +) +``` + +## Data Preparation + +The model uses hydrometeorological data from "data/exphydro/01013500.csv": + +```julia +# Read input data +file_path = "data/exphydro/01013500.csv" +data = CSV.File(file_path) +df = DataFrame(data) +ts = collect(1:10000) # Time series length +``` + +Input variables include: + +- Day length (dayl) +- Mean temperature (tmean) +- Precipitation (prcp) + +## Import the ExpHydro Model + +```julia +include("../models/ExpHydro.jl") +println(exphydro_model) +``` + +```txt +HydroModel: exphydro + Components: surface, soil + Inputs: temp, lday, prcp + Outputs: pet, snowfall, rainfall, melt, evap, baseflow, surfaceflow, flow + Parameters: Tmin, Tmax, Df, Smax, Qmax, f + States: snowpack, soilwater + Components: + Fluxes: 0 fluxes + Buckets: 2 buckets + Route: nothing +``` + +## Model Testing + +### Single Node Testing + +```julia +# Configure solver +solver = HydroModels.ManualSolver{true}() +pas = ComponentVector(params=params, inistates=inistates) +# Run model with benchmarking +result = exphydro_model(input, pas, config=(solver=solver, timeidx=ts)) + +# convert to NamedTuple +model_total_output_names = vcat(HydroModels.get_state_names(exphydro_model), HydroModels.get_output_names(exphydro_model)) +result_ntp = NamedTuple{Tuple(model_total_output_names)}(eachslice(result, dims=1)) + +# Visualize results +plot(result_ntp.flow, label="Simulated") +plot!(df[ts, "flow(mm)"], label="Observed") +``` + +![exphydro predict](../assets/exphydro_predict.png) + +### The other hydrological fluxes are also available + +```julia +output_df = DataFrame(result) +``` + +```txt +10000×10 DataFrame + Row │ snowpack soilwater pet snowfall rainfall melt evap baseflow surfaceflow flow + │ Float64 Float64 Float64 Float64 Float64 Float64 Float64 Float64 Float64 Float64 +───────┼───────────────────────────────────────────────────────────────────────────────────────────────────────── + 1 │ 0.0 0.0 1.13779 0.0 3.1 0.0204477 0.0 1303.0 0.867263 0.0204477 + 2 │ 0.0 0.0 1.51019 0.0 4.24 0.0211591 0.0 1305.05 1.15292 0.0211591 + 3 │ 0.0 0.0 1.63204 0.0 8.02 0.0228657 0.0 1309.68 1.25036 0.0228657 + 4 │ 0.0 0.0 1.21771 0.0 15.27 0.0274505 0.0 1320.59 0.940706 0.0274505 + 5 │ 0.0 0.0 1.02779 0.0 8.48 0.033228 0.0 1332.0 0.800846 0.033228 + ⋮ │ ⋮ ⋮ ⋮ ⋮ ⋮ ⋮ ⋮ ⋮ ⋮ ⋮ + 9996 │ 0.0 261.111 0.221762 0.0 0.0 0.495441 -0.0 1493.37 0.193729 0.495441 + 9997 │ 0.0 266.638 0.238907 12.06 0.0 0.49106 -0.0 1492.84 0.208632 0.49106 + 9998 │ 0.0 278.143 0.288362 11.55 0.0 0.486664 -0.0 1492.3 0.25173 0.486664 + 9999 │ 0.0 285.026 0.311022 1.84 0.0 0.482119 -0.0 1491.74 0.271409 0.482119 + 10000 │ 0.0 286.074 0.176836 0.14 0.0 0.477275 -0.0 1491.14 0.154251 0.477275 +``` + +### trial another solver (using [HydroModelTools](https://github.com/chooron/HydroModelTools.jl)) + +```julia +using HydroModelTools: ODESolver, DiscreteSolver +using OrdinalDiffEq + +# using continuous solver +result1 = exphydro_model(input, pas, config=(solver=ODESolver(alg=Tsit5(), abstol=1e-3, reltol=1e-3), timeidx=ts)) +# using discrete solver +result2 = exphydro_model(input, pas, config=(solver=DiscreteSolver(), timeidx=ts)) +``` + +### Single Node Benchmarking + +```julia +# Performance testing using BenchmarkTools +@btime exphydro_model(input, params, config=(solver=solver, timeidx=ts), convert_to_ntp=true); +@btime exphydro_model(input, params, config=(solver=ODESolver(alg=Tsit5(), abstol=1e-3, reltol=1e-3), timeidx=ts), convert_to_ntp=true); +@btime exphydro_model(input, params, config=(solver=DiscreteSolver(), timeidx=ts), convert_to_ntp=true); +``` + +Efficiency of each solver + +```txt +16.612 ms (328670 allocations: 35.08 MiB) +16.748 ms (328670 allocations: 35.08 MiB) +7.844 ms (160412 allocations: 20.11 MiB) +``` + +### Multi-Node Testing + +Multi-node setup for distributed computing: + +```julia +# Setup multiple nodes +node_num = 10 # Number of nodes +inputs = repeat(reshape(input, size(input)[1], 1, size(input)[2]), 1, node_num, 1) +ptypes = [Symbol(:node, i) for i in 1:node_num] + +# Configure parameters and states for multiple nodes +params_multi = ComponentVector(NamedTuple{Tuple(ptypes)}(repeat([params], node_num))) +init_states_multi = ComponentVector(NamedTuple{Tuple(ptypes)}(repeat([inistates], node_num))) +pas_multi = ComponentVector(params=params_multi, initstates=init_states_multi) +# Run multi-node simulation +results = exphydro_model(inputs, pas_multi, config=(solver=solver, timeidx=ts)) +``` + +### Multi-Node Benchmarking + +```julia +# Multi-node performance testing +@btime exphydro_model(inputs, pas_multi, config=(solver=solver, timeidx=ts)); \ No newline at end of file diff --git a/src/HydroModels.jl b/src/HydroModels.jl index 2693646..5ff3a13 100644 --- a/src/HydroModels.jl +++ b/src/HydroModels.jl @@ -40,7 +40,6 @@ using Lux using LuxCore using NNlib -struct HydroEquation end ## Abstract Component Types abstract type AbstractComponent end abstract type AbstractIOAdapter end @@ -64,14 +63,13 @@ export AbstractFlux, AbstractHydroFlux, AbstractNeuralFlux, AbstractStateFlux export AbstractElement, AbstractBucket, AbstractHydrograph, AbstractRoute, AbstractHydroRoute, AbstractModel # utils -include("utils/attr.jl") include("utils/name.jl") +include("utils/attr.jl") include("utils/show.jl") include("utils/build.jl") include("utils/sort.jl") include("utils/check.jl") -include("utils/io.jl") -export NamedTupleIOAdapter +# A discrete ODE solver, if want to use more efficient solver, please import HydroModelTools.jl include("utils/solver.jl") export ManualSolver @@ -86,6 +84,16 @@ include("uh.jl") export UHFunction, UnitHydrograph include("model.jl") export HydroModel -include("wrapper.jl") -export RecordComponentState, EstimateComponentParams, WeightSumComponentOutlet, ComputeComponentOutlet + +# include model wrappers +include("wappers/estimate_params.jl") +export EstimateComponentParams +include("wappers/record_states.jl") +export RecordComponentState +include("wappers/neural_wrapper.jl") +export NeuralWrapper +include("wappers/io_adapter.jl") +export NamedTupleIOAdapter +include("wappers/stats_outlet.jl") +export WeightSumComponentOutlet, ComputeComponentOutlet end # module HydroModels diff --git a/src/flux.jl b/src/flux.jl index c65ef06..024aef8 100644 --- a/src/flux.jl +++ b/src/flux.jl @@ -51,7 +51,7 @@ struct HydroFlux{T<:Num,F<:Function,M<:HydroMeta} <: AbstractHydroFlux inputs::Vector{T}, outputs::Vector{T}, params::Vector{T}; - exprs::Vector{T}=T[], + exprs::Vector{T}, ) where {T<:Num} #* name the flux output_names = Symbolics.tosymbol.(outputs, escape=false) @@ -62,12 +62,11 @@ struct HydroFlux{T<:Num,F<:Function,M<:HydroMeta} <: AbstractHydroFlux #* if no expression provided, use the hydrology formula library to build the flux #* build flux function flux_func = build_flux_func(inputs, outputs, params, exprs) - return new{T,typeof(flux_func),typeof(meta)}(inputs, outputs, params, exprs, flux_func, meta) end #* construct hydro flux with input fluxes and output fluxes - HydroFlux(fluxes::Pair{Vector{Num},Vector{Num}}, params::Vector{Num}=Num[]; exprs::Vector{Num}=Num[]) = HydroFlux(fluxes[1], fluxes[2], params, exprs=exprs) + HydroFlux(fluxes::Pair{Vector{Num},Vector{Num}}, params::Vector{Num}=Num[]; exprs::Vector{Num}) = HydroFlux(fluxes[1], fluxes[2], params, exprs=exprs) end """ diff --git a/src/model.jl b/src/model.jl index c7b8016..2baae1f 100644 --- a/src/model.jl +++ b/src/model.jl @@ -25,15 +25,15 @@ as inputs to later ones, effectively simulating the hydrological system over tim Each component's kwargs may be different, include solver, interp """ -struct HydroModel{M<:HydroMeta,C<:AbstractComponent,VI<:AbstractVector{<:AbstractVector{<:Integer}},VN<:AbstractVector{<:Symbol}} <: AbstractModel - "meta data of hydrological model" - meta::M +struct HydroModel{C<:AbstractComponent,M<:HydroMeta} <: AbstractModel "hydrological computation elements" components::Vector{C} "input variables index for each components" - varindices::VI - "all variables names" - varnames::VN + varindices::AbstractVector{<:AbstractVector{<:Integer}} + "output variables index for sort output variables" + outindices::AbstractVector{<:Integer} + "meta data of hydrological model" + meta::M function HydroModel(; name::Symbol, @@ -42,28 +42,33 @@ struct HydroModel{M<:HydroMeta,C<:AbstractComponent,VI<:AbstractVector{<:Abstrac ) where {C<:AbstractComponent} components = sort_components ? sort_components(components) : components input_names, output_names, state_names = get_var_names(components) + vcat_names = reduce(vcat, [input_names, output_names, state_names]) nn_names = reduce(union, get_nn_names.(components)) param_names = reduce(union, get_param_names.(components)) var_names = input_names input_idx = Vector{Int}[] + output_idx = Int[] for component in components tmp_input_idx = map((nm) -> findfirst(varnm -> varnm == nm, var_names), get_input_names(component)) - var_names = reduce(vcat, [var_names, get_state_names(component), get_output_names(component)]) + tmp_cpt_vcat_names = vcat(get_state_names(component), get_output_names(component)) + var_names = vcat(var_names, tmp_cpt_vcat_names) push!(input_idx, tmp_input_idx) + tmp_output_idx = map((nm) -> findfirst(varnm -> varnm == nm, vcat_names), tmp_cpt_vcat_names) + output_idx = vcat(output_idx, tmp_output_idx) end model_meta = HydroMeta(name, input_names, output_names, param_names, state_names, nn_names) - new{typeof(model_meta),C,typeof(input_idx),typeof(var_names)}( - model_meta, + new{C,typeof(model_meta)}( components, input_idx, - var_names, + output_idx, + model_meta, ) end end # 求解并计算 function (model::HydroModel)( - input::AbstractArray{T,2}, + input::AbstractArray{T,2}, pas::ComponentVector; config::Union{NamedTuple,Vector{<:NamedTuple}}=NamedTuple(), kwargs... @@ -75,7 +80,7 @@ function (model::HydroModel)( tmp_fluxes = comp_(fluxes[idx, :], pas; config=config_, convert_to_ntp=false) fluxes = cat(fluxes, tmp_fluxes, dims=1) end - return fluxes + return fluxes[model.outindices, :] end function (model::HydroModel)( @@ -86,11 +91,11 @@ function (model::HydroModel)( ) where {T<:Number} comp_configs = config isa NamedTuple ? fill(config, length(model.components)) : config @assert length(comp_configs) == length(model.components) "component configs length must be equal to components length" - + fluxes = input for (comp_, idx_, config_) in zip(model.components, model.varindices, comp_configs) tmp_fluxes = comp_(fluxes[idx_, :, :], pas; config=config_, convert_to_ntp=false) fluxes = cat(fluxes, tmp_fluxes, dims=1) end - return fluxes + return fluxes[model.outindices, :, :] end diff --git a/src/wappers/estimate_params.jl b/src/wappers/estimate_params.jl new file mode 100644 index 0000000..2be9d39 --- /dev/null +++ b/src/wappers/estimate_params.jl @@ -0,0 +1,81 @@ +""" + EstimateComponentParams <: AbstractHydroWrapper + +A wrapper component that estimates parameters for a hydrological component during simulation. + +# Fields +- `component::AbstractComponent`: The wrapped hydrological component +- `estfuncs::AbstractVector{<:AbstractHydroFlux}`: Vector of estimation functions +- `ptypes::AbstractVector{Symbol}`: Parameter types to be estimated +- `meta::HydroMeta`: Metadata about the component + +# Constructors + EstimateComponentParams(component::AbstractComponent, estfuncs::AbstractVector{<:AbstractHydroFlux}, ptypes::AbstractVector{Symbol}=Symbol[]) + +# Description +`EstimateComponentParams` wraps a hydrological component and dynamically estimates its parameters +using provided estimation functions during simulation. + +The wrapper requires at least one estimation function. Each estimation function should implement: +- `get_input_names`: Returns required input variable names +- `get_param_names`: Returns parameter names it can estimate +- `get_var_names`: Returns both input and output variable names + +When called, it: +1. Applies each estimation function to calculate parameters +2. Updates the parameter set with estimated values +3. Runs the wrapped component with updated parameters +4. Returns the component output + +The estimation process uses the parameter types specified in `ptypes` to organize +and update the correct parameter groups in the component. +""" +struct EstimateComponentParams{C<:AbstractComponent,E<:AbstractHydroFlux,P<:Union{AbstractVector{Symbol},Nothing},M<:HydroMeta} <: AbstractHydroWrapper + component::C + estfuncs::AbstractVector{E} + pkeys::P + meta::M + + function EstimateComponentParams( + component::C, estfuncs::AbstractVector{E}, pkeys::P=nothing + ) where {C<:AbstractComponent,E<:AbstractHydroFlux,P<:Union{AbstractVector{Symbol},Nothing}} + @assert length(estfuncs) != 0 "At least one estimation function is required" + comp_meta = component.meta + est_input_names, est_output_names = get_var_names(estfuncs) + est_param_names = reduce(union, get_param_names.(estfuncs)) + # todo: due to ComponentVector merging limitations, arbitrary values still need to be provided for estimated parameters + # new_param_names = setdiff(union(comp_meta.params, est_input_names, est_param_names), est_output_names) + new_param_names = union(comp_meta.params, est_input_names, est_param_names) + new_meta = HydroMeta(comp_meta.name, comp_meta.inputs, comp_meta.outputs, new_param_names, comp_meta.states, comp_meta.nns) + return new{C,E,P,typeof(new_meta)}(component, estfuncs, pkeys, new_meta) + end +end + +function (wrapper::EstimateComponentParams{C,E,P,M})(input::Any, pas::ComponentVector; kwargs...) where {C,E,P<:Nothing,M} + est_params_ntp = reduce(merge, map(wrapper.estfuncs) do estfunc + tmp_input = collect(pas[:params][get_input_names(estfunc)]) + tmp_pas = collect(pas[:params][get_param_names(estfunc)]) + NamedTuple{Tuple(get_output_names(estfunc))}(estfunc(tmp_input, pas)) + end) + new_pas = update_ca(pas, ComponentVector(params=est_params_ntp)) + output = wrapper.component(input, new_pas; kwargs...) + return output +end + +function (wrapper::EstimateComponentParams{C,E,P,M})(input::Any, pas::ComponentVector; kwargs...) where {C,E,P<:AbstractVector{Symbol},M} + est_config = (ptypes=wrapper.pkeys,) + est_params_mat = reduce(vcat, map(wrapper.estfuncs) do estfunc + tmp_input_mat = reduce(vcat, map(wrapper.pkeys) do pkey + tmp_input_vec = collect([pas[:params][pkey][nm] for nm in get_input_names(estfunc)]) + reshape(tmp_input_vec, 1, length(tmp_input_vec)) + end) + tmp_input_arr = permutedims(reshape(tmp_input_mat, 1, size(tmp_input_mat)...), (3, 2, 1)) + tmp_output_mat = estfunc(tmp_input_arr, pas, config=est_config)[:, :, 1] + tmp_output_mat + end) + est_param_names = reduce(union, get_output_names.(wrapper.estfuncs)) + est_params_ntp = NamedTuple{Tuple(wrapper.pkeys)}([NamedTuple{Tuple(est_param_names)}(est_params_mat[:, i]) for i in eachindex(wrapper.pkeys)]) + new_pas = update_ca(pas, ComponentVector(params=est_params_ntp)) + output = wrapper.component(input, new_pas; kwargs...) + return output +end \ No newline at end of file diff --git a/src/utils/io.jl b/src/wappers/io_adapter.jl similarity index 63% rename from src/utils/io.jl rename to src/wappers/io_adapter.jl index 42c446e..d720294 100644 --- a/src/utils/io.jl +++ b/src/wappers/io_adapter.jl @@ -3,6 +3,7 @@ struct NamedTupleIOAdapter{C,M} <: AbstractIOAdapter where {C<:AbstractComponent meta::M function NamedTupleIOAdapter(component::C) where {C<:AbstractComponent} + @assert !(typeof(component) isa AbstractIOAdapter) "Component $component is already an IO adapter" new{C,typeof(component.meta)}(component, component.meta) end end @@ -13,11 +14,12 @@ function (adapter::NamedTupleIOAdapter)( config::Union{<:NamedTuple,Vector{<:NamedTuple}}=NamedTuple(), kwargs... )::NamedTuple - @assert((all(input_name in keys(input) for input_name in get_input_names(adapter))), - "Missing required inputs. Expected all of $(get_input_names(adapter)), but got $(keys(input)).") - input_matrix = Matrix(reduce(hcat, [input[k] for k in get_input_names(adapter)])') + @assert((all(input_name in keys(input) for input_name in get_input_names(adapter.component))), + "Missing required inputs. Expected all of $(get_input_names(adapter.component)), but got $(keys(input)).") + input_matrix = Matrix(reduce(hcat, [input[k] for k in get_input_names(adapter.component)])') output_matrix = adapter.component(input_matrix, pas; config=config, kwargs...) - return NamedTuple{Tuple(vcat(get_state_names(adapter), get_output_names(adapter)))}(eachslice(output_matrix, dims=1)) + output_names_tuple = Tuple(vcat(get_state_names(adapter.component), get_output_names(adapter.component))) + return NamedTuple{output_names_tuple}(eachslice(output_matrix, dims=1)) end function (adapter::NamedTupleIOAdapter)( @@ -27,12 +29,12 @@ function (adapter::NamedTupleIOAdapter)( kwargs... )::Vector{<:NamedTuple} for i in eachindex(input) - @assert(all(input_name in keys(input[i]) for input_name in get_input_names(adapter)), - "Missing required inputs. Expected all of $(get_input_names(adapter)), but got $(keys(input[i])) at $i input.") + @assert(all(input_name in keys(input[i]) for input_name in get_input_names(adapter.component)), + "Missing required inputs. Expected all of $(get_input_names(adapter.component)), but got $(keys(input[i])) at $i input.") end - input_mats = [reduce(hcat, collect(input[i][k] for k in get_input_names(adapter))) for i in eachindex(input)] - input_arr = reduce((m1, m2) -> cat(m1, m2, dims=3), input_mats) + input_mats = [reduce(hcat, collect(input[i][k] for k in get_input_names(adapter.component))) for i in eachindex(input)] + input_arr = permutedims(reduce((m1, m2) -> cat(m1, m2, dims=3), input_mats), (2, 3, 1)) output_arr = adapter.component(input_arr, pas; config=config, kwargs...) - output_names_tuple = Tuple(vcat(get_state_names(adapter), get_output_names(adapter))) + output_names_tuple = Tuple(vcat(get_state_names(adapter.component), get_output_names(adapter.component))) return [NamedTuple{output_names_tuple}(eachslice(output_arr_, dims=1)) for output_arr_ in eachslice(output_arr, dims=2)] end \ No newline at end of file diff --git a/src/wappers/neural_wrapper.jl b/src/wappers/neural_wrapper.jl new file mode 100644 index 0000000..9cc919e --- /dev/null +++ b/src/wappers/neural_wrapper.jl @@ -0,0 +1,27 @@ +struct NeuralWrapper{N,F,M} <: AbstractNeuralWrapper + model::N + func::F + meta::M + + function NeuralWrapper(fluxes::Pair, model::N, model_name::Symbol; rng::R=StableRNG(42)) where {N,R} + #* assert the chain has a name + #* extract the parameter and state + ps, st = Lux.setup(rng, model) + ps_axes = getaxes(ComponentVector(ps)) + nn_func(x, p) = LuxCore.apply(model, x, ComponentVector(p, ps_axes), st)[1] + meta = HydroMeta(name=Symbol(model_name, :_nwrapper), inputs=fluxes.first, outputs=fluxes.second, nn_names=[model_name]) + return new{N,typeof(nn_func),typeof(meta)}(model, nn_func, meta) + end +end + +function (wrapper::NeuralWrapper)(input::Array{T,2}, pas::ComponentVector; kwargs...) where {T} + nn_ps = pas[:nn][wrapper.meta.name] + output = wrapper.func(input, nn_ps) + return output +end + +function (wrapper::NeuralWrapper)(input::Array{T,3}, pas::ComponentVector; kwargs...) where {T} + nn_ps = pas[:nn][wrapper.meta.name] + output = wrapper.func.(Matrix.(eachslice(input, dims=2)), Ref(nn_ps)) + return permutedims(reduce((m1, m2) -> cat(m1, m2, dims=3), output), (1, 3, 2)) +end \ No newline at end of file diff --git a/src/wappers/record_states.jl b/src/wappers/record_states.jl new file mode 100644 index 0000000..3588f3a --- /dev/null +++ b/src/wappers/record_states.jl @@ -0,0 +1,77 @@ +""" + RecordComponentState <: AbstractHydroWrapper + +A wrapper component that records the state values of a hydrological component during simulation. + +# Fields +- `component::AbstractComponent`: The wrapped hydrological component +- `states::ComponentVector`: Current state values of the component +- `meta::HydroMeta`: Metadata about the component + +# Constructors + RecordComponentState(component::AbstractComponent, initstates::Union{Nothing,ComponentVector}=nothing) + +# Description +`RecordComponentState` wraps a hydrological component and tracks its state values during simulation. +It updates and stores the latest state values after each simulation step. + +The wrapper requires that the wrapped component has state variables. If no initial states are provided, +it will use default states from the component. + +When called, it: +1. Updates the input parameters with current state values +2. Runs the wrapped component simulation +3. Extracts and stores the final state values +4. Returns the original component output + +The state values can be accessed through the `states` field at any time. +""" +struct RecordComponentState{C<:AbstractComponent,T<:Number,M<:HydroMeta} <: AbstractHydroWrapper + component::C + states::ComponentVector{T} + meta::M + + function RecordComponentState(component::C, initstates::Union{Nothing,ComponentVector}=nothing) where {C<:AbstractComponent} + @assert length(get_state_names(component)) != 0 "Component $component must have state flux" + initstates = isnothing(initstates) ? get_default_states(component) : initstates + new{C,eltype(initstates),typeof(component.meta)}(component, initstates, component.meta) + end +end + +function (wrapper::RecordComponentState{C,T,M})(input::Any, pas::ComponentVector; kwargs...) where {C,T,M} + reset_state = get(kwargs, :reset_state, false) + if reset_state + wrapper.states = pas[:initstates] + end + pas_replace_states = update_ca(pas, ComponentVector(initstates=wrapper.states)) + output = wrapper.component(input, pas_replace_states; kwargs...) + state_names = get_state_names(wrapper.component) + if get(kwargs, :convert_to_ntp, false) + component_nodes = get_node_names(wrapper.component) + if isnothing(component_nodes) + states_ca = ComponentVector(NamedTuple{Tuple(state_names)}([output[nm][end] for nm in state_names])) + else + states_ca = ComponentVector(NamedTuple{Tuple(component_nodes)}(map(component_nodes) do node + NamedTuple{Tuple(state_names)}([output[node][nm][end] for nm in state_names]) + end)) + end + else + if output isa AbstractArray{<:Number,2} + var_names = wrapper.component.varnames + state_indices = map(state_names) do sname + findfirst(vname -> vname == sname, var_names) + end + latest_states = output[state_indices, end] + states_ca = ComponentVector(NamedTuple{Tuple(state_names)}(latest_states)) + elseif output isa AbstractArray{<:Number,3} + latest_states = output[state_indices, :, end] + component_nodes = get_node_names(wrapper.component) + states_ca = ComponentVector(NamedTuple{Tuple(component_nodes)}(map(component_nodes) do node + NamedTuple{Tuple(state_names)}(latest_states[:, node]) + end + )) + end + end + @reset wrapper.states = states_ca + return output +end diff --git a/src/wappers/stats_outlet.jl b/src/wappers/stats_outlet.jl new file mode 100644 index 0000000..01aae6a --- /dev/null +++ b/src/wappers/stats_outlet.jl @@ -0,0 +1,96 @@ +""" + ComputeComponentOutlet <: AbstractHydroWrapper + +A wrapper component that computes the outlet values of a hydrological component during simulation. + +# Fields +- `component::AbstractComponent`: The wrapped hydrological component +- `outlet::Symbol`: The name of the outlet node to compute values for +- `meta::HydroMeta`: Metadata about the component + +# Constructors + ComputeComponentOutlet(component::AbstractComponent, outlet::Symbol, meta::HydroMeta) + +# Description +`ComputeComponentOutlet` wraps a hydrological component and extracts the values at a specific outlet node +during simulation. + +The wrapper requires that the outlet name exists in the component's node names. When called, it: +1. Runs the wrapped component simulation +2. Extracts the values at the specified outlet node +3. Returns just the outlet values, either as a named tuple or array depending on configuration + +This is useful for focusing analysis on a particular outlet point in a hydrological network. +""" +struct ComputeComponentOutlet{C<:AbstractComponent,M<:HydroMeta} <: AbstractHydroWrapper + component::C + outlet::Integer + meta::M + + function ComputeComponentOutlet(component::C, outlet::I) where {C<:AbstractComponent,I<:Integer} + new{C,typeof(component.meta)}(component, outlet, component.meta) + end +end + +function (wrapper::ComputeComponentOutlet)(input::AbstractVector{<:NamedTuple}, pas::ComponentVector; kwargs...) + convert_to_ntp = get(kwargs, :convert_to_ntp, true) + output = wrapper.component(input, pas; kwargs...) + return convert_to_ntp ? output[wrapper.outlet] : output[:, wrapper.outlet, :] +end + +""" + ComputeComponentOutlet <: AbstractHydroWrapper + +A wrapper component that computes the outlet values of a hydrological component during simulation. + +# Fields +- `component::AbstractComponent`: The wrapped hydrological component +- `outlet::Symbol`: The name of the outlet node to compute values for +- `meta::HydroMeta`: Metadata about the component + +# Constructor + ComputeComponentOutlet(component::AbstractComponent, outlet::Symbol) + +# Description +`ComputeComponentOutlet` wraps a hydrological component and extracts the values at a specific outlet node +during simulation. + +The wrapper requires that the outlet name exists in the component's node names. When called, it: +1. Runs the wrapped component simulation +2. Extracts the values at the specified outlet node +3. Returns just the outlet values, either as a named tuple or array depending on configuration + +This is useful for focusing analysis on a particular outlet point in a hydrological network. +""" +struct WeightSumComponentOutlet{C<:AbstractComponent,M<:HydroMeta,T<:Number} <: AbstractHydroWrapper + component::C + vars::AbstractVector{T} + meta::M + weight_names::Symbol + + function WeightSumComponentOutlet(component::C, vars::AbstractVector{T}; weight_names::Symbol=:weight) where {C<:AbstractComponent,T<:Num} + new_params = vcat(component.meta.param_names, [weight_names]) + var_names = Symbolics.tosymbol.(vars) + new_meta = HydroMeta(component.meta.name, component.meta.input_names, new_params, var_names, component.meta.state_names, component.meta.nn_names) + new{C,typeof(meta),T}(component, vars, new_meta, weight_names) + end +end + +function (wrapper::WeightSumComponentOutlet)(input::Union{AbstractArray{<:Number,3},AbstractVector{<:NamedTuple}}, pas::ComponentVector; kwargs...) + convert_to_ntp = get(kwargs, :convert_to_ntp, true) + output = wrapper.component(input, pas; kwargs...) + var_names = Symbolics.tosymbol.(wrapper.vars) + weight_vec = [pas[:params][ptype][wrapper.weight_names] for ptype in wrapper.ptypes] + if convert_to_ntp + var_vec = map(var_names) do var_nm + sum(reduce(hcat, [out[var_nm] .* weight_vec[i] for (i, out) in enumerate(weight_vec)])) + end + return NamedTuple{Tuple(var_names)}(var_vec) + else + var_idx = [findfirst(nm -> nm == var_nm, wrapper.component.varnames) for var_nm in var_names] + var_vec = map(var_idx) do idx + sum([output[idx, i, :] .* weight_vec[i] for i in eachindex(weight_vec)]) + end + return reduce(hcat, var_vec) + end +end \ No newline at end of file diff --git a/src/wrapper.jl b/src/wrapper.jl deleted file mode 100644 index f95f180..0000000 --- a/src/wrapper.jl +++ /dev/null @@ -1,292 +0,0 @@ -""" - RecordComponentState <: AbstractHydroWrapper - -A wrapper component that records the state values of a hydrological component during simulation. - -# Fields -- `component::AbstractComponent`: The wrapped hydrological component -- `states::ComponentVector`: Current state values of the component -- `meta::HydroMeta`: Metadata about the component - -# Constructors - RecordComponentState(component::AbstractComponent, initstates::Union{Nothing,ComponentVector}=nothing) - -# Description -`RecordComponentState` wraps a hydrological component and tracks its state values during simulation. -It updates and stores the latest state values after each simulation step. - -The wrapper requires that the wrapped component has state variables. If no initial states are provided, -it will use default states from the component. - -When called, it: -1. Updates the input parameters with current state values -2. Runs the wrapped component simulation -3. Extracts and stores the final state values -4. Returns the original component output - -The state values can be accessed through the `states` field at any time. -""" -struct RecordComponentState{C<:AbstractComponent,T<:Number,M<:HydroMeta} <: AbstractHydroWrapper - component::C - states::ComponentVector{T} - meta::M - - function RecordComponentState(component::C, initstates::Union{Nothing,ComponentVector}=nothing) where {C<:AbstractComponent} - @assert length(get_state_names(component)) != 0 "Component $component must have state flux" - initstates = isnothing(initstates) ? get_default_states(component) : initstates - new{C,eltype(initstates),typeof(component.meta)}(component, initstates, component.meta) - end -end - -function (wrapper::RecordComponentState{C,T,M})(input::Any, pas::ComponentVector; kwargs...) where {C,T,M} - reset_state = get(kwargs, :reset_state, false) - if reset_state - wrapper.states = pas[:initstates] - end - pas_replace_states = update_ca(pas, ComponentVector(initstates=wrapper.states)) - output = wrapper.component(input, pas_replace_states; kwargs...) - state_names = get_state_names(wrapper.component) - if get(kwargs, :convert_to_ntp, false) - component_nodes = get_node_names(wrapper.component) - if isnothing(component_nodes) - states_ca = ComponentVector(NamedTuple{Tuple(state_names)}([output[nm][end] for nm in state_names])) - else - states_ca = ComponentVector(NamedTuple{Tuple(component_nodes)}(map(component_nodes) do node - NamedTuple{Tuple(state_names)}([output[node][nm][end] for nm in state_names]) - end)) - end - else - if output isa AbstractArray{<:Number,2} - var_names = wrapper.component.varnames - state_indices = map(state_names) do sname - findfirst(vname -> vname == sname, var_names) - end - latest_states = output[state_indices, end] - states_ca = ComponentVector(NamedTuple{Tuple(state_names)}(latest_states)) - elseif output isa AbstractArray{<:Number,3} - latest_states = output[state_indices, :, end] - component_nodes = get_node_names(wrapper.component) - states_ca = ComponentVector(NamedTuple{Tuple(component_nodes)}(map(component_nodes) do node - NamedTuple{Tuple(state_names)}(latest_states[:, node]) - end - )) - end - end - @reset wrapper.states = states_ca - return output -end - -""" - EstimateComponentParams <: AbstractHydroWrapper - -A wrapper component that estimates parameters for a hydrological component during simulation. - -# Fields -- `component::AbstractComponent`: The wrapped hydrological component -- `estfuncs::AbstractVector{<:AbstractHydroFlux}`: Vector of estimation functions -- `ptypes::AbstractVector{Symbol}`: Parameter types to be estimated -- `meta::HydroMeta`: Metadata about the component - -# Constructors - EstimateComponentParams(component::AbstractComponent, estfuncs::AbstractVector{<:AbstractHydroFlux}, ptypes::AbstractVector{Symbol}=Symbol[]) - -# Description -`EstimateComponentParams` wraps a hydrological component and dynamically estimates its parameters -using provided estimation functions during simulation. - -The wrapper requires at least one estimation function. Each estimation function should implement: -- `get_input_names`: Returns required input variable names -- `get_param_names`: Returns parameter names it can estimate -- `get_var_names`: Returns both input and output variable names - -When called, it: -1. Applies each estimation function to calculate parameters -2. Updates the parameter set with estimated values -3. Runs the wrapped component with updated parameters -4. Returns the component output - -The estimation process uses the parameter types specified in `ptypes` to organize -and update the correct parameter groups in the component. -""" -struct EstimateComponentParams{C<:AbstractComponent,E<:AbstractHydroFlux,P<:Union{AbstractVector{Symbol},Nothing},M<:HydroMeta} <: AbstractHydroWrapper - component::C - estfuncs::AbstractVector{E} - pkeys::P - meta::M - - function EstimateComponentParams( - component::C, estfuncs::AbstractVector{E}, pkeys::P=nothing - ) where {C<:AbstractComponent,E<:AbstractHydroFlux,P<:Union{AbstractVector{Symbol},Nothing}} - @assert length(estfuncs) != 0 "At least one estimation function is required" - comp_meta = component.meta - est_input_names, est_output_names = get_var_names(estfuncs) - est_param_names = reduce(union, get_param_names.(estfuncs)) - # todo: due to ComponentVector merging limitations, arbitrary values still need to be provided for estimated parameters - # new_param_names = setdiff(union(comp_meta.params, est_input_names, est_param_names), est_output_names) - new_param_names = union(comp_meta.params, est_input_names, est_param_names) - new_meta = HydroMeta(comp_meta.name, comp_meta.inputs, comp_meta.outputs, new_param_names, comp_meta.states, comp_meta.nns) - return new{C,E,P,typeof(new_meta)}(component, estfuncs, pkeys, new_meta) - end -end - -function (wrapper::EstimateComponentParams{C,E,P,M})(input::Any, pas::ComponentVector; kwargs...) where {C,E,P<:Nothing,M} - est_params_ntp = reduce(merge, map(wrapper.estfuncs) do estfunc - tmp_input = collect(pas[:params][get_input_names(estfunc)]) - tmp_pas = collect(pas[:params][get_param_names(estfunc)]) - NamedTuple{Tuple(get_output_names(estfunc))}(estfunc(tmp_input, pas)) - end) - new_pas = update_ca(pas, ComponentVector(params=est_params_ntp)) - output = wrapper.component(input, new_pas; kwargs...) - return output -end - -function (wrapper::EstimateComponentParams{C,E,P,M})(input::Any, pas::ComponentVector; kwargs...) where {C,E,P<:AbstractVector{Symbol},M} - est_config = (ptypes=wrapper.pkeys,) - est_params_mat = reduce(vcat, map(wrapper.estfuncs) do estfunc - tmp_input_mat = reduce(vcat, map(wrapper.pkeys) do pkey - tmp_input_vec = collect([pas[:params][pkey][nm] for nm in get_input_names(estfunc)]) - reshape(tmp_input_vec, 1, length(tmp_input_vec)) - end) - tmp_input_arr = permutedims(reshape(tmp_input_mat, 1, size(tmp_input_mat)...), (3, 2, 1)) - tmp_output_mat = estfunc(tmp_input_arr, pas, config=est_config)[:, :, 1] - tmp_output_mat - end) - est_param_names = reduce(union, get_output_names.(wrapper.estfuncs)) - est_params_ntp = NamedTuple{Tuple(wrapper.pkeys)}([NamedTuple{Tuple(est_param_names)}(est_params_mat[:, i]) for i in eachindex(wrapper.pkeys)]) - new_pas = update_ca(pas, ComponentVector(params=est_params_ntp)) - output = wrapper.component(input, new_pas; kwargs...) - return output -end - -""" - ComputeComponentOutlet <: AbstractHydroWrapper - -A wrapper component that computes the outlet values of a hydrological component during simulation. - -# Fields -- `component::AbstractComponent`: The wrapped hydrological component -- `outlet::Symbol`: The name of the outlet node to compute values for -- `meta::HydroMeta`: Metadata about the component - -# Constructors - ComputeComponentOutlet(component::AbstractComponent, outlet::Symbol, meta::HydroMeta) - -# Description -`ComputeComponentOutlet` wraps a hydrological component and extracts the values at a specific outlet node -during simulation. - -The wrapper requires that the outlet name exists in the component's node names. When called, it: -1. Runs the wrapped component simulation -2. Extracts the values at the specified outlet node -3. Returns just the outlet values, either as a named tuple or array depending on configuration - -This is useful for focusing analysis on a particular outlet point in a hydrological network. -""" -struct ComputeComponentOutlet{C<:AbstractComponent,M<:HydroMeta} <: AbstractHydroWrapper - component::C - outlet::Integer - meta::M - - function ComputeComponentOutlet(component::C, outlet::I) where {C<:AbstractComponent,I<:Integer} - new{C,typeof(component.meta)}(component, outlet, component.meta) - end -end - -function (wrapper::ComputeComponentOutlet)(input::AbstractVector{<:NamedTuple}, pas::ComponentVector; kwargs...) - convert_to_ntp = get(kwargs, :convert_to_ntp, true) - output = wrapper.component(input, pas; kwargs...) - return convert_to_ntp ? output[wrapper.outlet] : output[:, wrapper.outlet, :] -end - -""" - ComputeComponentOutlet <: AbstractHydroWrapper - -A wrapper component that computes the outlet values of a hydrological component during simulation. - -# Fields -- `component::AbstractComponent`: The wrapped hydrological component -- `outlet::Symbol`: The name of the outlet node to compute values for -- `meta::HydroMeta`: Metadata about the component - -# Constructor - ComputeComponentOutlet(component::AbstractComponent, outlet::Symbol) - -# Description -`ComputeComponentOutlet` wraps a hydrological component and extracts the values at a specific outlet node -during simulation. - -The wrapper requires that the outlet name exists in the component's node names. When called, it: -1. Runs the wrapped component simulation -2. Extracts the values at the specified outlet node -3. Returns just the outlet values, either as a named tuple or array depending on configuration - -This is useful for focusing analysis on a particular outlet point in a hydrological network. -""" -struct WeightSumComponentOutlet{C<:AbstractComponent,M<:HydroMeta,T<:Number} <: AbstractHydroWrapper - component::C - vars::AbstractVector{T} - meta::M - weight_names::Symbol - - function WeightSumComponentOutlet(component::C, vars::AbstractVector{T}; weight_names::Symbol=:weight) where {C<:AbstractComponent,T<:Num} - new_params = vcat(component.meta.param_names, [weight_names]) - var_names = Symbolics.tosymbol.(vars) - new_meta = HydroMeta(component.meta.name, component.meta.input_names, new_params, var_names, component.meta.state_names, component.meta.nn_names) - new{C,typeof(meta),T}(component, vars, new_meta, weight_names) - end -end - -function (wrapper::WeightSumComponentOutlet)(input::Union{AbstractArray{<:Number,3},AbstractVector{<:NamedTuple}}, pas::ComponentVector; kwargs...) - convert_to_ntp = get(kwargs, :convert_to_ntp, true) - output = wrapper.component(input, pas; kwargs...) - var_names = Symbolics.tosymbol.(wrapper.vars) - weight_vec = [pas[:params][ptype][wrapper.weight_names] for ptype in wrapper.ptypes] - if convert_to_ntp - var_vec = map(var_names) do var_nm - sum(reduce(hcat, [out[var_nm] .* weight_vec[i] for (i, out) in enumerate(weight_vec)])) - end - return NamedTuple{Tuple(var_names)}(var_vec) - else - var_idx = [findfirst(nm -> nm == var_nm, wrapper.component.varnames) for var_nm in var_names] - var_vec = map(var_idx) do idx - sum([output[idx, i, :] .* weight_vec[i] for i in eachindex(weight_vec)]) - end - return reduce(hcat, var_vec) - end -end - -struct NeuralWrapper{N,F,M} <: AbstractNeuralWrapper - model::N - func::F - meta::M - - function NeuralWrapper(fluxes::Pair, model::N; name::Union{Nothing,Symbol}=nothing, rng::R=StableRNG(42)) where {N,R} - #* assert the chain has a name - # @assert model.name isa Symbol "Neural network chain should have a name with Symbol type" - #* extract the parameter and state - ps, st = Lux.setup(rng, model) - ps_axes = getaxes(ComponentVector(ps)) - nn_func(x, p) = LuxCore.apply(model, x, ComponentVector(p, ps_axes), st)[1] - wrapper_name = name === nothing ? Symbol(model.name, :_wrapper) : name - meta = HydroMeta(name=wrapper_name, inputs=fluxes.first, outputs=fluxes.second) # , nn_names=[model.name] - new{N,typeof(nn_func),typeof(meta)}(model, nn_func, meta) - end -end - -function (wrapper::NeuralWrapper)(input::Array{T,2}, pas::ComponentVector; kwargs...) where {T} - nn_ps = pas[:nn][wrapper.meta.name] - output = wrapper.func(input, nn_ps) - convert_to_ntp = get(kwargs, :convert_to_ntp, false) - return convert_to_ntp ? NamedTuple{Tuple(get_output_names(wrapper))}(eachslice(output, dims=1)) : output -end - -function (wrapper::NeuralWrapper)(input::Array{T,3}, pas::ComponentVector; kwargs...) where {T} - nn_ps = pas[:nn][wrapper.meta.name] - output = wrapper.func.(Matrix.(eachslice(input, dims=2)), Ref(nn_ps)) - convert_to_ntp = get(kwargs, :convert_to_ntp, false) - if convert_to_ntp - return [NamedTuple{Tuple(get_output_names(wrapper))}(eachslice(output_, dims=1)) for output_ in eachslice(output, dims=2)] - else - return permutedims(reduce((m1, m2) -> cat(m1, m2, dims=3), output), (1,3,2)) - end -end \ No newline at end of file diff --git a/test/run_aqua_test.jl b/test/run_aqua_test.jl deleted file mode 100644 index bcb45cc..0000000 --- a/test/run_aqua_test.jl +++ /dev/null @@ -1,4 +0,0 @@ -using Aqua -using HydroModels - -Aqua.test_all(HydroModels; ambiguities=false) \ No newline at end of file diff --git a/test/run_d8_grid.jl b/test/run_d8_grid.jl index 3c31365..3733154 100644 --- a/test/run_d8_grid.jl +++ b/test/run_d8_grid.jl @@ -1,18 +1,19 @@ -#* 测试d8 grid routing -@testset "d8 grid routing (regular grid)" begin - input = ones(9) - positions = [(1, 1), (1, 2), (1, 3), (2, 1), (2, 2), (2, 3), (3, 1), (3, 2), (3, 3)] - flwdir = [1 4 8; 1 4 4; 1 1 2] - result = HydroModels.grid_routing(input, positions, flwdir) - target = [0.0, 1.0, 0.0, 0.0, 3.0, 0.0, 0.0, 2.0, 2.0] - @test result == target -end +@testset "d8 grid routing" begin + @testset "d8 grid routing (regular grid)" begin + input = ones(9) + positions = [(1, 1), (1, 2), (1, 3), (2, 1), (2, 2), (2, 3), (3, 1), (3, 2), (3, 3)] + flwdir = [1 4 8; 1 4 4; 1 1 2] + result = HydroModels.grid_routing(input, positions, flwdir) + target = [0.0, 1.0, 0.0, 0.0, 3.0, 0.0, 0.0, 2.0, 2.0] + @test result == target + end -@testset "d8 grid routing (irregular grid)" begin - input = ones(10) - positions = [(1, 2), (1, 3), (2, 1), (2, 2), (2, 3), (2, 4), (3, 2), (3, 3), (4, 3), (4, 4)] - flwdir = [0 4 4 0; 1 2 4 8; 0 1 4 0; 0 0 1 1] - result = HydroModels.grid_routing(input, positions, flwdir) - target = [0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 0.0, 4.0, 1.0, 1.0] - @test result == target -end \ No newline at end of file + @testset "d8 grid routing (irregular grid)" begin + input = ones(10) + positions = [(1, 2), (1, 3), (2, 1), (2, 2), (2, 3), (2, 4), (3, 2), (3, 3), (4, 3), (4, 4)] + flwdir = [0 4 4 0; 1 2 4 8; 0 1 4 0; 0 0 1 1] + result = HydroModels.grid_routing(input, positions, flwdir) + target = [0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 0.0, 4.0, 1.0, 1.0] + @test result == target + end +end diff --git a/test/run_io.jl b/test/run_io.jl new file mode 100644 index 0000000..c2f2c8e --- /dev/null +++ b/test/run_io.jl @@ -0,0 +1,52 @@ +ts = collect(1:100) +df = DataFrame(CSV.File("../data/exphydro/01013500.csv")) +input_ntp = (lday=df[ts, "dayl(day)"], temp=df[ts, "tmean(C)"], prcp=df[ts, "prcp(mm/day)"]) + +initstates = ComponentVector(snowpack=0.0, soilwater=1303.00) +params = ComponentVector(f=0.0167, Smax=1709.46, Qmax=18.47, Df=2.674, Tmax=0.17, Tmin=-2.09) +pas = ComponentVector(initstates=initstates, params=params) + +step_func(x) = (tanh(5.0 * x) + 1.0) * 0.5 +@parameters Tmin Tmax Df Smax f Qmax +@variables prcp temp lday pet snowpack soilwater rainfall snowfall evap melt baseflow surfaceflow flow +#! define the snow pack reservoir +snow_funcs = [ + HydroFlux([temp, lday] => [pet], exprs=[29.8 * lday * 24 * 0.611 * exp((17.3 * temp) / (temp + 237.3)) / (temp + 273.2)]), + HydroFlux([prcp, temp] => [snowfall, rainfall], [Tmin], exprs=[step_func(Tmin - temp) * prcp, step_func(temp - Tmin) * prcp]), + HydroFlux([snowpack, temp] => [melt], [Tmax, Df], exprs=[step_func(temp - Tmax) * step_func(snowpack) * min(snowpack, Df * (temp - Tmax))]), +] +snow_dfuncs = [StateFlux([snowfall] => [melt], snowpack)] +snow_ele = HydroBucket(funcs=snow_funcs, dfuncs=snow_dfuncs) + +#! define the soil water reservoir +soil_funcs = [ + HydroFlux([soilwater, pet] => [evap], [Smax], exprs=[step_func(soilwater) * pet * min(1.0, soilwater / Smax)]), + HydroFlux([soilwater] => [baseflow], [Smax, Qmax, f], exprs=[step_func(soilwater) * Qmax * exp(-f * (max(0.0, Smax - soilwater)))]), + HydroFlux([soilwater] => [surfaceflow], [Smax], exprs=[max(0.0, soilwater - Smax)]), + HydroFlux([baseflow, surfaceflow] => [flow], exprs=[baseflow + surfaceflow]), +] +soil_dfuncs = [StateFlux([rainfall, melt] => [evap, flow], soilwater)] +soil_ele = HydroBucket(funcs=soil_funcs, dfuncs=soil_dfuncs) + +#! define the Exp-Hydro model +model = HydroModel(name=:exphydro, components=[snow_ele, soil_ele]) + + +@testset "test NamedTupleIOAdapter" begin + @testset "test single node input" begin + ioadapter = NamedTupleIOAdapter(model) + output_ntp = ioadapter(input_ntp, pas) + @test Set(keys(output_ntp)) == Set(vcat(HydroModels.get_state_names(model), HydroModels.get_output_names(model))) + end + + @testset "test multiple node input" begin + ioadapter = NamedTupleIOAdapter(model) + node_names = [Symbol(:node_, i) for i in 1:10] + node_params = NamedTuple{Tuple(node_names)}(repeat([params], 10)) + node_initstates = NamedTuple{Tuple(node_names)}(repeat([initstates], 10)) + node_pas = ComponentVector(params=node_params, initstates=node_initstates) + output_ntp = ioadapter(repeat([input_ntp], 10), node_pas, config=(timeidx=ts, ptypes=node_names)) + @test length(output_ntp) == 10 + @test Set(keys(output_ntp[1])) == Set(vcat(HydroModels.get_state_names(model), HydroModels.get_output_names(model))) + end +end \ No newline at end of file diff --git a/test/run_sort.jl b/test/run_sort.jl index e9e3586..92a1f42 100644 --- a/test/run_sort.jl +++ b/test/run_sort.jl @@ -2,9 +2,9 @@ @variables a b c d e f @parameters p1 p2 p3 p4 - flux_1 = HydroModels.SimpleFlux([a, b] => [c, d], [p1, p2], exprs=[a * p1 + p2, b * p2 + p1]) - flux_2 = HydroModels.SimpleFlux([a, c] => [e], [p3], exprs=[a * p3 + c]) - flux_3 = HydroModels.SimpleFlux([e, d] => [f], exprs=[e + d]) + flux_1 = HydroModels.HydroFlux([a, b] => [c, d], [p1, p2], exprs=[a * p1 + p2, b * p2 + p1]) + flux_2 = HydroModels.HydroFlux([a, c] => [e], [p3], exprs=[a * p3 + c]) + flux_3 = HydroModels.HydroFlux([e, d] => [f], exprs=[e + d]) sorted_fluxes = HydroModels.sort_fluxes([flux_2, flux_3, flux_1]) @test sorted_fluxes == [flux_1, flux_2, flux_3] end @@ -14,9 +14,9 @@ end @parameters Tmin Tmax Df name = :test snow_fluxes = [ - SimpleFlux([temp, lday] => [pet], exprs=[29.8 * lday * 24 * 0.611 * exp((17.3 * temp) / (temp + 237.3)) / (temp + 273.2)]), - SimpleFlux([prcp, temp] => [snowfall, rainfall], [Tmin], exprs=[step_func(Tmin - temp) * prcp, step_func(temp - Tmin) * prcp]), - SimpleFlux([snowpack, temp] => [melt], [Tmax, Df], exprs=[step_func(temp - Tmax) * step_func(snowpack) * min(snowpack, Df * (temp - Tmax))]), + HydroFlux([temp, lday] => [pet], exprs=[29.8 * lday * 24 * 0.611 * exp((17.3 * temp) / (temp + 237.3)) / (temp + 273.2)]), + HydroFlux([prcp, temp] => [snowfall, rainfall], [Tmin], exprs=[step_func(Tmin - temp) * prcp, step_func(temp - Tmin) * prcp]), + HydroFlux([snowpack, temp] => [melt], [Tmax, Df], exprs=[step_func(temp - Tmax) * step_func(snowpack) * min(snowpack, Df * (temp - Tmax))]), ] snow_dfluxes = [StateFlux([snowfall] => [melt], snowpack),] snow_bucket = HydroBucket(Symbol(name, :_surface), funcs=snow_fluxes, dfuncs=snow_dfluxes) @@ -24,12 +24,12 @@ end @variables soilwater pet evap baseflow surfaceflow flow rainfall melt @parameters Smax Qmax f soil_fluxes = [ - SimpleFlux([soilwater, pet] => [evap], [Smax], exprs=[step_func(soilwater) * pet * min(1.0, soilwater / Smax)]), - SimpleFlux([soilwater] => [baseflow], [Smax, Qmax, f], exprs=[step_func(soilwater) * Qmax * exp(-f * (max(0.0, Smax - soilwater)))]), - SimpleFlux([soilwater] => [surfaceflow], [Smax], exprs=[max(0.0, soilwater - Smax)]), + HydroFlux([soilwater, pet] => [evap], [Smax], exprs=[step_func(soilwater) * pet * min(1.0, soilwater / Smax)]), + HydroFlux([soilwater] => [baseflow], [Smax, Qmax, f], exprs=[step_func(soilwater) * Qmax * exp(-f * (max(0.0, Smax - soilwater)))]), + HydroFlux([soilwater] => [surfaceflow], [Smax], exprs=[max(0.0, soilwater - Smax)]), ] soil_dfluxes = [StateFlux([rainfall, melt] => [evap, baseflow, surfaceflow], soilwater)] soil_bucket = HydroBucket(Symbol(name, :_soil), funcs=soil_fluxes, dfuncs=soil_dfluxes) - flow_flux = SimpleFlux([baseflow, surfaceflow] => [flow], exprs=[baseflow + surfaceflow]) + flow_flux = HydroFlux([baseflow, surfaceflow] => [flow], exprs=[baseflow + surfaceflow]) sorted_fluxes = HydroModels.sort_components([flow_flux, soil_bucket, snow_bucket]) end \ No newline at end of file diff --git a/test/run_wrapper.jl b/test/run_wrapper.jl index 0b06c94..560a54c 100644 --- a/test/run_wrapper.jl +++ b/test/run_wrapper.jl @@ -1,11 +1,3 @@ -HydroFlux = HydroModels.HydroFlux -StateFlux = HydroModels.StateFlux -NeuralFlux = HydroModels.NeuralFlux -HydroBucket = HydroModels.HydroBucket -HydroModel = HydroModels.HydroModel -RecordComponentState = HydroModels.RecordComponentState -EstimateComponentParams = HydroModels.EstimateComponentParams - @parameters Tmin Tmax Df Smax f Qmax @variables prcp temp lday pet snowpack soilwater rainfall snowfall evap melt baseflow surfaceflow flow diff --git a/test/runtests.jl b/test/runtests.jl index b79313e..82911fb 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -21,6 +21,7 @@ using Test include("run_route.jl") include("run_lumped_model.jl") include("run_spatial_model.jl") + include("run_io.jl") end Aqua.test_all(HydroModels) \ No newline at end of file