From d3dca1568b11cf8c0a0db9d3492bb75e7e7cbb79 Mon Sep 17 00:00:00 2001 From: James Tomlinson Date: Sat, 25 Nov 2023 21:56:23 +0000 Subject: [PATCH 1/7] feat: Upgrade to pywr-v1-schema v0.8.0. (#66) Required implementing several new parameters: - InterpolatedParameter - DiscountFactorParameter - DataFrameParameter Conversion from Storage, Deficit and Flow parameters is now returns an error as these should become metrics in pywr-next. Todos are left for HydropowerTarget, RollingMeanFlowNode and ScenarioWrapper parameter. Also the control curve interpolated parameter was renamed due to a naming conflict with InterpolatedParameter. --- Cargo.toml | 2 +- pywr-core/src/lib.rs | 4 +- pywr-core/src/model.rs | 4 +- pywr-core/src/parameters/array.rs | 26 +- .../parameters/control_curves/interpolated.rs | 8 +- .../src/parameters/control_curves/mod.rs | 2 +- .../parameters/control_curves/piecewise.rs | 4 +- pywr-core/src/parameters/delay.rs | 2 +- pywr-core/src/parameters/discount_factor.rs | 86 +++++ pywr-core/src/parameters/interpolate.rs | 110 +++++++ pywr-core/src/parameters/interpolated.rs | 63 ++++ pywr-core/src/parameters/mod.rs | 8 +- pywr-core/src/test_utils.rs | 4 +- pywr-schema/src/data_tables/mod.rs | 304 ++---------------- pywr-schema/src/data_tables/scalar.rs | 250 ++++++++++++++ pywr-schema/src/data_tables/vec.rs | 114 +++++++ pywr-schema/src/error.rs | 10 + pywr-schema/src/model.rs | 4 +- pywr-schema/src/parameters/control_curves.rs | 30 +- pywr-schema/src/parameters/core.rs | 2 +- pywr-schema/src/parameters/data_frame.rs | 58 +++- pywr-schema/src/parameters/discount_factor.rs | 61 ++++ pywr-schema/src/parameters/interpolated.rs | 218 +++++++++++++ pywr-schema/src/parameters/mod.rs | 109 +++++-- pywr-schema/src/parameters/profiles.rs | 8 +- pywr-schema/src/parameters/tables.rs | 11 +- 26 files changed, 1172 insertions(+), 330 deletions(-) create mode 100644 pywr-core/src/parameters/discount_factor.rs create mode 100644 pywr-core/src/parameters/interpolate.rs create mode 100644 pywr-core/src/parameters/interpolated.rs create mode 100644 pywr-schema/src/data_tables/scalar.rs create mode 100644 pywr-schema/src/data_tables/vec.rs create mode 100644 pywr-schema/src/parameters/discount_factor.rs create mode 100644 pywr-schema/src/parameters/interpolated.rs diff --git a/Cargo.toml b/Cargo.toml index 5c2f0629..ac4fd74b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,4 +47,4 @@ tracing = "0.1" csv = "1.1" hdf5 = { version="0.8.1" } hdf5-sys = { version="0.8.1", features=["static"] } -pywr-v1-schema = { git = "https://github.com/pywr/pywr-schema/", tag="v0.7.0", package = "pywr-schema" } +pywr-v1-schema = { git = "https://github.com/pywr/pywr-schema/", tag="v0.8.0", package = "pywr-schema" } diff --git a/pywr-core/src/lib.rs b/pywr-core/src/lib.rs index 1eafe82b..02c79633 100644 --- a/pywr-core/src/lib.rs +++ b/pywr-core/src/lib.rs @@ -4,7 +4,7 @@ extern crate core; use crate::derived_metric::DerivedMetricIndex; use crate::node::NodeIndex; -use crate::parameters::{IndexParameterIndex, MultiValueParameterIndex, ParameterIndex}; +use crate::parameters::{IndexParameterIndex, InterpolationError, MultiValueParameterIndex, ParameterIndex}; use crate::recorders::RecorderIndex; use pyo3::exceptions::{PyException, PyRuntimeError}; use pyo3::{create_exception, PyErr}; @@ -133,6 +133,8 @@ pub enum PywrError { ParameterVariableValuesIncorrectLength, #[error("missing solver features")] MissingSolverFeatures, + #[error("interpolation error: {0}")] + Interpolation(#[from] InterpolationError), #[error("parameters do not provide an initial value")] ParameterNoInitialValue, } diff --git a/pywr-core/src/model.rs b/pywr-core/src/model.rs index da236409..4e5ae3f6 100644 --- a/pywr-core/src/model.rs +++ b/pywr-core/src/model.rs @@ -1535,7 +1535,7 @@ mod tests { use crate::metric::Metric; use crate::model::Model; use crate::node::{Constraint, ConstraintValue}; - use crate::parameters::{ActivationFunction, InterpolatedParameter, Parameter, VariableParameter}; + use crate::parameters::{ActivationFunction, ControlCurveInterpolatedParameter, Parameter, VariableParameter}; use crate::recorders::AssertionRecorder; use crate::scenario::{ScenarioGroupCollection, ScenarioIndex}; #[cfg(feature = "clipm")] @@ -1758,7 +1758,7 @@ mod tests { // Set-up a control curve that uses the proportional volume // This should be use the initial proportion (100%) on the first time-step, and then the previous day's end value - let cc = InterpolatedParameter::new( + let cc = ControlCurveInterpolatedParameter::new( "interp", Metric::DerivedMetric(dm_idx), vec![], diff --git a/pywr-core/src/parameters/array.rs b/pywr-core/src/parameters/array.rs index 14f2019c..e0d143ae 100644 --- a/pywr-core/src/parameters/array.rs +++ b/pywr-core/src/parameters/array.rs @@ -4,19 +4,21 @@ use crate::scenario::ScenarioIndex; use crate::state::State; use crate::timestep::Timestep; use crate::PywrError; -use ndarray::{Array1, Array2}; +use ndarray::{Array1, Array2, Axis}; use std::any::Any; pub struct Array1Parameter { meta: ParameterMeta, array: Array1, + timestep_offset: Option, } impl Array1Parameter { - pub fn new(name: &str, array: Array1) -> Self { + pub fn new(name: &str, array: Array1, timestep_offset: Option) -> Self { Self { meta: ParameterMeta::new(name), array, + timestep_offset, } } } @@ -36,8 +38,12 @@ impl Parameter for Array1Parameter { _state: &State, _internal_state: &mut Option>, ) -> Result { + let idx = match self.timestep_offset { + None => timestep.index, + Some(offset) => (timestep.index as i32 + offset).max(0).min(self.array.len() as i32 - 1) as usize, + }; // This panics if out-of-bounds - let value = self.array[[timestep.index]]; + let value = self.array[[idx]]; Ok(value) } } @@ -46,14 +52,16 @@ pub struct Array2Parameter { meta: ParameterMeta, array: Array2, scenario_group_index: usize, + timestep_offset: Option, } impl Array2Parameter { - pub fn new(name: &str, array: Array2, scenario_group_index: usize) -> Self { + pub fn new(name: &str, array: Array2, scenario_group_index: usize, timestep_offset: Option) -> Self { Self { meta: ParameterMeta::new(name), array, scenario_group_index, + timestep_offset, } } } @@ -75,8 +83,14 @@ impl Parameter for Array2Parameter { _internal_state: &mut Option>, ) -> Result { // This panics if out-of-bounds - let idx = scenario_index.indices[self.scenario_group_index]; + let t_idx = match self.timestep_offset { + None => timestep.index, + Some(offset) => (timestep.index as i32 + offset) + .max(0) + .min(self.array.len_of(Axis(0)) as i32 - 1) as usize, + }; + let s_idx = scenario_index.indices[self.scenario_group_index]; - Ok(self.array[[timestep.index, idx]]) + Ok(self.array[[t_idx, s_idx]]) } } diff --git a/pywr-core/src/parameters/control_curves/interpolated.rs b/pywr-core/src/parameters/control_curves/interpolated.rs index f4ce150d..42381d45 100644 --- a/pywr-core/src/parameters/control_curves/interpolated.rs +++ b/pywr-core/src/parameters/control_curves/interpolated.rs @@ -1,6 +1,6 @@ -use super::interpolate; use crate::metric::Metric; use crate::model::Model; +use crate::parameters::interpolate::interpolate; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -8,14 +8,14 @@ use crate::timestep::Timestep; use crate::PywrError; use std::any::Any; -pub struct InterpolatedParameter { +pub struct ControlCurveInterpolatedParameter { meta: ParameterMeta, metric: Metric, control_curves: Vec, values: Vec, } -impl InterpolatedParameter { +impl ControlCurveInterpolatedParameter { pub fn new(name: &str, metric: Metric, control_curves: Vec, values: Vec) -> Self { Self { meta: ParameterMeta::new(name), @@ -26,7 +26,7 @@ impl InterpolatedParameter { } } -impl Parameter for InterpolatedParameter { +impl Parameter for ControlCurveInterpolatedParameter { fn as_any_mut(&mut self) -> &mut dyn Any { self } diff --git a/pywr-core/src/parameters/control_curves/mod.rs b/pywr-core/src/parameters/control_curves/mod.rs index af4778b2..4a6eb1b5 100644 --- a/pywr-core/src/parameters/control_curves/mod.rs +++ b/pywr-core/src/parameters/control_curves/mod.rs @@ -7,7 +7,7 @@ mod volume_between; pub use apportion::ApportionParameter; pub use index::ControlCurveIndexParameter; -pub use interpolated::InterpolatedParameter; +pub use interpolated::ControlCurveInterpolatedParameter; pub use piecewise::PiecewiseInterpolatedParameter; pub use simple::ControlCurveParameter; pub use volume_between::VolumeBetweenControlCurvesParameter; diff --git a/pywr-core/src/parameters/control_curves/piecewise.rs b/pywr-core/src/parameters/control_curves/piecewise.rs index 59fe8802..1e83fc53 100644 --- a/pywr-core/src/parameters/control_curves/piecewise.rs +++ b/pywr-core/src/parameters/control_curves/piecewise.rs @@ -1,6 +1,6 @@ -use super::interpolate; use crate::metric::Metric; use crate::model::Model; +use crate::parameters::interpolate::interpolate; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -82,7 +82,7 @@ mod test { let mut model = simple_model(1); // Create an artificial volume series to use for the interpolation test - let volume = Array1Parameter::new("test-x", Array1::linspace(1.0, 0.0, 21)); + let volume = Array1Parameter::new("test-x", Array1::linspace(1.0, 0.0, 21), None); let volume_idx = model.add_parameter(Box::new(volume)).unwrap(); diff --git a/pywr-core/src/parameters/delay.rs b/pywr-core/src/parameters/delay.rs index 8e04c3fe..d247e63a 100644 --- a/pywr-core/src/parameters/delay.rs +++ b/pywr-core/src/parameters/delay.rs @@ -97,7 +97,7 @@ mod test { // Create an artificial volume series to use for the delay test let volumes = Array1::linspace(1.0, 0.0, 21); - let volume = Array1Parameter::new("test-x", volumes.clone()); + let volume = Array1Parameter::new("test-x", volumes.clone(), None); let volume_idx = model.add_parameter(Box::new(volume)).unwrap(); diff --git a/pywr-core/src/parameters/discount_factor.rs b/pywr-core/src/parameters/discount_factor.rs new file mode 100644 index 00000000..b06c024e --- /dev/null +++ b/pywr-core/src/parameters/discount_factor.rs @@ -0,0 +1,86 @@ +use crate::metric::Metric; +use crate::model::Model; +use crate::parameters::{Parameter, ParameterMeta}; +use crate::scenario::ScenarioIndex; +use crate::state::State; +use crate::timestep::Timestep; +use crate::PywrError; +use std::any::Any; + +pub struct DiscountFactorParameter { + meta: ParameterMeta, + discount_rate: Metric, + base_year: i32, +} + +impl DiscountFactorParameter { + pub fn new(name: &str, discount_rate: Metric, base_year: i32) -> Self { + Self { + meta: ParameterMeta::new(name), + discount_rate, + base_year, + } + } +} + +impl Parameter for DiscountFactorParameter { + fn as_any_mut(&mut self) -> &mut dyn Any { + self + } + fn meta(&self) -> &ParameterMeta { + &self.meta + } + fn compute( + &self, + timestep: &Timestep, + _scenario_index: &ScenarioIndex, + model: &Model, + state: &State, + _internal_state: &mut Option>, + ) -> Result { + let year = timestep.date.year() - self.base_year; + let rate = self.discount_rate.get_value(model, state)?; + + let factor = 1.0 / (1.0 + rate).powi(year); + Ok(factor) + } +} + +#[cfg(test)] +mod test { + use crate::metric::Metric; + use crate::parameters::{Array1Parameter, DiscountFactorParameter}; + use crate::test_utils::{run_and_assert_parameter, simple_model}; + use ndarray::{concatenate, s, Array1, Array2, Axis}; + + /// Basic functional test of the delay parameter. + #[test] + fn test_basic() { + let mut model = simple_model(1); + + // Create an artificial volume series to use for the delay test + let volumes = Array1::linspace(1.0, 0.0, 21); + let volume = Array1Parameter::new("test-x", volumes.clone(), None); + + let volume_idx = model.add_parameter(Box::new(volume)).unwrap(); + + const DELAY: usize = 3; // 3 time-step delay + let parameter = DiscountFactorParameter::new( + "test-parameter", + Metric::Constant(0.03), // Interpolate with the parameter based values + 2020, + ); + + // We should have DELAY number of initial values to start with, and then follow the + // values in the `volumes` array. + let expected_values: Array1 = [ + 1.0; 21 // initial values + ] + .to_vec() + .into(); + + let expected_values: Array2 = expected_values.insert_axis(Axis(1)); + + run_and_assert_parameter(&mut model, Box::new(parameter), expected_values, None, Some(1e-12)); + } +} diff --git a/pywr-core/src/parameters/interpolate.rs b/pywr-core/src/parameters/interpolate.rs new file mode 100644 index 00000000..b675f5a0 --- /dev/null +++ b/pywr-core/src/parameters/interpolate.rs @@ -0,0 +1,110 @@ +use thiserror::Error; + +/// Interpolate a value between two bounds. +pub fn interpolate(value: f64, lower_bound: f64, upper_bound: f64, lower_value: f64, upper_value: f64) -> f64 { + if value <= lower_bound { + lower_value + } else if value >= upper_bound { + upper_value + } else if (lower_bound - upper_bound).abs() < 1E-6 { + lower_value + } else { + lower_value + (upper_value - lower_value) * (value - lower_bound) / (upper_bound - lower_bound) + } +} + +#[derive(Error, Debug, PartialEq, Eq)] +pub enum InterpolationError { + #[error("At least 2 points are required for interpolation")] + InsufficientPoints, + #[error("The")] + InconsistentPoints, + #[error("Value below lower bounds")] + BelowLowerBounds, + #[error("Value above upper bounds")] + AboveUpperBounds, + #[error("Points are not strictly monotonic")] + NotStrictlyMonotonic, +} + +pub fn linear_interpolation( + value: f64, + points: &[(f64, f64)], + error_on_bounds: bool, +) -> Result { + if points.len() < 2 { + return Err(InterpolationError::InsufficientPoints); + } + + // Handle lower bounds checking + if value < points[0].0 { + return if error_on_bounds { + Err(InterpolationError::BelowLowerBounds) + } else { + Ok(points[0].1) + }; + } + + for pts in points.windows(2) { + let lp = pts[0]; + let up = pts[1]; + + if lp.0 >= up.0 { + return Err(InterpolationError::NotStrictlyMonotonic); + } + + if value < up.0 { + return Ok(interpolate(value, lp.0, up.0, lp.1, up.1)); + } + } + + return if error_on_bounds { + Err(InterpolationError::AboveUpperBounds) + } else { + Ok(points + .last() + .expect("This should be impossible because fp has been checked for a length of at least 2") + .1) + }; +} + +#[cfg(test)] +mod tests { + use super::*; + use float_cmp::assert_approx_eq; + + #[test] + fn test_interpolate() { + // Middle of the range + assert_approx_eq!(f64, interpolate(0.5, 0.0, 1.0, 0.0, 1.0), 0.5); + assert_approx_eq!(f64, interpolate(0.25, 0.0, 1.0, 0.0, 1.0), 0.25); + assert_approx_eq!(f64, interpolate(0.75, 0.0, 1.0, 0.0, 1.0), 0.75); + // Below bounds; returns lower value + assert_approx_eq!(f64, interpolate(-1.0, 0.0, 1.0, 0.0, 1.0), 0.0); + // Above bounds; returns upper value + assert_approx_eq!(f64, interpolate(2.0, 0.0, 1.0, 0.0, 1.0), 1.0); + // Equal bounds; returns lower value + assert_approx_eq!(f64, interpolate(0.0, 0.0, 0.0, 0.0, 1.0), 0.0); + } + + #[test] + fn test_linear_interpolation() { + let points = vec![(1.0, 3.0), (2.0, 4.5), (3.0, 6.0), (4.0, 7.5), (5.0, 9.0)]; + + assert_approx_eq!(f64, linear_interpolation(1.0, &points, true).unwrap(), 3.0); + assert_approx_eq!(f64, linear_interpolation(0.5, &points, false).unwrap(), 3.0); + assert_approx_eq!(f64, linear_interpolation(2.5, &points, false).unwrap(), 5.25); + assert_approx_eq!(f64, linear_interpolation(5.0, &points, false).unwrap(), 9.0); + assert_approx_eq!(f64, linear_interpolation(5.5, &points, false).unwrap(), 9.0); + + // Check errors + assert!(linear_interpolation(0.0, &points, true).is_err()); + assert!(linear_interpolation(6.0, &points, true).is_err()); + + let not_enough_points = vec![(1.0, 3.0)]; + assert!(linear_interpolation(1.0, ¬_enough_points, true).is_err()); + + let non_monotonic_points = vec![(1.0, 3.0), (2.0, 4.5), (2.0, 6.0), (4.0, 7.5), (5.0, 9.0)]; + assert!(linear_interpolation(1.0, ¬_enough_points, true).is_err()); + } +} diff --git a/pywr-core/src/parameters/interpolated.rs b/pywr-core/src/parameters/interpolated.rs new file mode 100644 index 00000000..909da024 --- /dev/null +++ b/pywr-core/src/parameters/interpolated.rs @@ -0,0 +1,63 @@ +use crate::metric::Metric; +use crate::model::Model; +use crate::parameters::interpolate::linear_interpolation; +use crate::parameters::{Parameter, ParameterMeta}; +use crate::scenario::ScenarioIndex; +use crate::state::State; +use crate::timestep::Timestep; +use crate::PywrError; +use std::any::Any; + +/// A parameter that interpolates a value to a function with given discrete data points. +pub struct InterpolatedParameter { + meta: ParameterMeta, + x: Metric, + points: Vec<(Metric, Metric)>, + error_on_bounds: bool, +} + +impl InterpolatedParameter { + pub fn new(name: &str, x: Metric, points: Vec<(Metric, Metric)>, error_on_bounds: bool) -> Self { + Self { + meta: ParameterMeta::new(name), + x, + points, + error_on_bounds, + } + } +} + +impl Parameter for InterpolatedParameter { + fn as_any_mut(&mut self) -> &mut dyn Any { + self + } + fn meta(&self) -> &ParameterMeta { + &self.meta + } + fn compute( + &self, + _timestep: &Timestep, + _scenario_index: &ScenarioIndex, + model: &Model, + state: &State, + _internal_state: &mut Option>, + ) -> Result { + // Current value + let x = self.x.get_value(model, state)?; + + let points = self + .points + .iter() + .map(|(x, f)| { + let xp = x.get_value(model, state)?; + let fp = f.get_value(model, state)?; + + Ok::<(f64, f64), PywrError>((xp, fp)) + }) + .collect::, _>>()?; + + let f = linear_interpolation(x, &points, self.error_on_bounds)?; + + Ok(f) + } +} diff --git a/pywr-core/src/parameters/mod.rs b/pywr-core/src/parameters/mod.rs index 76d1986c..99978519 100644 --- a/pywr-core/src/parameters/mod.rs +++ b/pywr-core/src/parameters/mod.rs @@ -6,8 +6,11 @@ mod asymmetric; mod constant; mod control_curves; mod delay; +mod discount_factor; mod division; mod indexed_array; +mod interpolate; +mod interpolated; mod max; mod min; mod negative; @@ -35,12 +38,15 @@ pub use array::{Array1Parameter, Array2Parameter}; pub use asymmetric::AsymmetricSwitchIndexParameter; pub use constant::ConstantParameter; pub use control_curves::{ - ApportionParameter, ControlCurveIndexParameter, ControlCurveParameter, InterpolatedParameter, + ApportionParameter, ControlCurveIndexParameter, ControlCurveInterpolatedParameter, ControlCurveParameter, PiecewiseInterpolatedParameter, VolumeBetweenControlCurvesParameter, }; pub use delay::DelayParameter; +pub use discount_factor::DiscountFactorParameter; pub use division::DivisionParameter; pub use indexed_array::IndexedArrayParameter; +pub use interpolate::{interpolate, linear_interpolation, InterpolationError}; +pub use interpolated::InterpolatedParameter; pub use max::MaxParameter; pub use min::MinParameter; pub use negative::NegativeParameter; diff --git a/pywr-core/src/test_utils.rs b/pywr-core/src/test_utils.rs index 08f8ad6a..3fa7d348 100644 --- a/pywr-core/src/test_utils.rs +++ b/pywr-core/src/test_utils.rs @@ -38,7 +38,7 @@ pub fn simple_model(num_scenarios: usize) -> Model { model.connect_nodes(link_node, output_node).unwrap(); let inflow = Array::from_shape_fn((366, num_scenarios), |(i, j)| 1.0 + i as f64 + j as f64); - let inflow = Array2Parameter::new("inflow", inflow, scenario_idx); + let inflow = Array2Parameter::new("inflow", inflow, scenario_idx, None); let inflow = model.add_parameter(Box::new(inflow)).unwrap(); @@ -208,7 +208,7 @@ fn make_simple_system( for x in inflow.iter_mut() { *x = inflow_distr.sample(rng).max(0.0); } - let inflow = Array2Parameter::new(&format!("inflow-{suffix}"), inflow, scenario_group_index); + let inflow = Array2Parameter::new(&format!("inflow-{suffix}"), inflow, scenario_group_index, None); let idx = model.add_parameter(Box::new(inflow))?; model.set_node_max_flow( diff --git a/pywr-schema/src/data_tables/mod.rs b/pywr-schema/src/data_tables/mod.rs index 35a69e5c..b373b0b3 100644 --- a/pywr-schema/src/data_tables/mod.rs +++ b/pywr-schema/src/data_tables/mod.rs @@ -1,12 +1,19 @@ +mod scalar; +mod vec; + +use crate::data_tables::scalar::{ + load_csv_row2_scalar_table_one, load_csv_row_col_scalar_table_one, load_csv_row_scalar_table_one, +}; +use crate::data_tables::vec::{load_csv_row2_vec_table_one, load_csv_row_vec_table_one}; use crate::parameters::TableIndex; +use crate::ConversionError; use pywr_v1_schema::parameters::TableDataRef as TableDataRefV1; +pub use scalar::LoadedScalarTable; use std::collections::HashMap; -use std::fs::File; -use std::io::BufReader; use std::path::{Path, PathBuf}; -use std::str::FromStr; use thiserror::Error; use tracing::{debug, info}; +use vec::LoadedVecTable; #[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] #[serde(rename_all = "lowercase")] @@ -117,192 +124,6 @@ pub fn make_path(table_path: &Path, data_path: Option<&Path>) -> PathBuf { } } -/// Load a CSV file with looks for each rows & columns -fn load_csv_row_col_scalar_table_one( - table_path: &Path, - data_path: Option<&Path>, -) -> Result, TableError> -where - T: FromStr + Copy, - TableError: From, -{ - let path = make_path(table_path, data_path); - - let file = File::open(path).map_err(|e| TableError::IO(e.to_string()))?; - let buf_reader = BufReader::new(file); - let mut rdr = csv::Reader::from_reader(buf_reader); - - let headers: Vec = rdr - .headers() - .map_err(|e| TableError::Csv(e.to_string()))? - .iter() - .skip(1) - .map(|s| s.to_string()) - .collect(); - - let tbl: HashMap<(String, String), T> = rdr - .records() - .map(|result| { - // The iterator yields Result, so we check the - // error here. - let record = result.map_err(|e| TableError::Csv(e.to_string()))?; - - let key = record.get(0).ok_or(TableError::KeyParse)?.to_string(); - - let values: Vec = record.iter().skip(1).map(|v| v.parse()).collect::>()?; - - let values: Vec<((String, String), T)> = values - .into_iter() - .zip(&headers) - .map(|(v, col)| ((key.clone(), col.to_string()), v)) - .collect(); - - Ok(values) - }) - .collect::, TableError>>()? - .into_iter() - .flatten() - .collect(); - - Ok(LoadedScalarTable::Two(tbl)) -} - -fn load_csv_row_scalar_table_one( - table_path: &Path, - data_path: Option<&Path>, -) -> Result, TableError> -where - T: FromStr + Copy, - TableError: From, -{ - let path = make_path(table_path, data_path); - - let file = File::open(path.clone()).map_err(|e| TableError::IO(e.to_string()))?; - let buf_reader = BufReader::new(file); - let mut rdr = csv::Reader::from_reader(buf_reader); - - let tbl: HashMap = rdr - .records() - .map(|result| { - // The iterator yields Result, so we check the - // error here. - let record = result.map_err(|e| TableError::Csv(e.to_string()))?; - - let key = record.get(0).ok_or(TableError::KeyParse)?.to_string(); - - let values: Vec = record.iter().skip(1).map(|v| v.parse()).collect::>()?; - - if values.len() > 1 { - return Err(TableError::TooManyValues(path.clone())); - } - - Ok((key, values[0])) - }) - .collect::>()?; - - Ok(LoadedScalarTable::One(tbl)) -} - -fn load_csv_row2_scalar_table_one( - table_path: &Path, - data_path: Option<&Path>, -) -> Result, TableError> -where - T: FromStr + Copy, - TableError: From, -{ - let path = make_path(table_path, data_path); - - let file = File::open(path.clone()).map_err(|e| TableError::IO(e.to_string()))?; - let buf_reader = BufReader::new(file); - let mut rdr = csv::Reader::from_reader(buf_reader); - - let tbl: HashMap<(String, String), T> = rdr - .records() - .map(|result| { - // The iterator yields Result, so we check the - // error here. - let record = result.map_err(|e| TableError::Csv(e.to_string()))?; - - let key = ( - record.get(0).ok_or(TableError::KeyParse)?.to_string(), - record.get(1).ok_or(TableError::KeyParse)?.to_string(), - ); - - let values: Vec = record.iter().skip(2).map(|v| v.parse()).collect::>()?; - - if values.len() > 1 { - return Err(TableError::TooManyValues(path.clone())); - } - - Ok((key, values[0])) - }) - .collect::>()?; - - Ok(LoadedScalarTable::Two(tbl)) -} - -fn load_csv_row_vec_table_one(table_path: &Path, data_path: Option<&Path>) -> Result, TableError> -where - T: FromStr, - TableError: From, -{ - let path = make_path(table_path, data_path); - - let file = File::open(path).map_err(|e| TableError::IO(e.to_string()))?; - let buf_reader = BufReader::new(file); - let mut rdr = csv::Reader::from_reader(buf_reader); - - let tbl: HashMap> = rdr - .records() - .map(|result| { - // The iterator yields Result, so we check the - // error here. - let record = result.map_err(|e| TableError::Csv(e.to_string()))?; - - let key = record.get(0).ok_or(TableError::KeyParse)?.to_string(); - - let values: Vec = record.iter().skip(1).map(|v| v.parse()).collect::>()?; - - Ok((key, values)) - }) - .collect::>()?; - - Ok(LoadedVecTable::One(tbl)) -} - -fn load_csv_row2_vec_table_one(table_path: &Path, data_path: Option<&Path>) -> Result, TableError> -where - T: FromStr, - TableError: From, -{ - let path = make_path(table_path, data_path); - - let file = File::open(path).map_err(|e| TableError::IO(e.to_string()))?; - let buf_reader = BufReader::new(file); - let mut rdr = csv::Reader::from_reader(buf_reader); - - let tbl: HashMap<(String, String), Vec> = rdr - .records() - .map(|result| { - // The iterator yields Result, so we check the - // error here. - let record = result.map_err(|e| TableError::Csv(e.to_string()))?; - - let key = ( - record.get(0).ok_or(TableError::KeyParse)?.to_string(), - record.get(1).ok_or(TableError::KeyParse)?.to_string(), - ); - - let values: Vec = record.iter().skip(2).map(|v| v.parse()).collect::>()?; - - Ok((key, values)) - }) - .collect::>()?; - - Ok(LoadedVecTable::Two(tbl)) -} - #[derive(Error, Debug, PartialEq, Eq)] pub enum TableError { #[error("table not found: {0}")] @@ -325,6 +146,8 @@ pub enum TableError { WrongTableFormat(String), #[error("too many values for scalar table when loading data table from: {0}")] TooManyValues(PathBuf), + #[error("table index out of bounds: {0}")] + IndexOutOfBounds(usize), } pub enum LoadedTable { @@ -352,88 +175,6 @@ impl LoadedTable { } } -pub enum LoadedScalarTable { - One(HashMap), - Two(HashMap<(String, String), T>), - Three(HashMap<(String, String, String), T>), -} - -impl LoadedScalarTable -where - T: Copy, -{ - fn get_scalar(&self, key: &[&str]) -> Result { - match self { - LoadedScalarTable::One(tbl) => { - if key.len() == 1 { - tbl.get(key[0]).ok_or(TableError::EntryNotFound).copied() - } else { - Err(TableError::WrongKeySize(1, key.len())) - } - } - LoadedScalarTable::Two(tbl) => { - if key.len() == 2 { - // I think this copies the strings and is not very efficient. - let k = (key[0].to_string(), key[1].to_string()); - tbl.get(&k).ok_or(TableError::EntryNotFound).copied() - } else { - Err(TableError::WrongKeySize(2, key.len())) - } - } - LoadedScalarTable::Three(tbl) => { - if key.len() == 3 { - // I think this copies the strings and is not very efficient. - let k = (key[0].to_string(), key[1].to_string(), key[2].to_string()); - tbl.get(&k).ok_or(TableError::EntryNotFound).copied() - } else { - Err(TableError::WrongKeySize(3, key.len())) - } - } - } - } -} - -pub enum LoadedVecTable { - One(HashMap>), - Two(HashMap<(String, String), Vec>), - Three(HashMap<(String, String, String), Vec>), -} - -impl LoadedVecTable -where - T: Copy, -{ - fn get_vec(&self, key: &[&str]) -> Result<&Vec, TableError> { - match self { - LoadedVecTable::One(tbl) => { - if key.len() == 1 { - tbl.get(key[0]).ok_or(TableError::EntryNotFound) - } else { - Err(TableError::WrongKeySize(1, key.len())) - } - } - LoadedVecTable::Two(tbl) => { - if key.len() == 2 { - // I think this copies the strings and is not very efficient. - let k = (key[0].to_string(), key[1].to_string()); - tbl.get(&k).ok_or(TableError::EntryNotFound) - } else { - Err(TableError::WrongKeySize(2, key.len())) - } - } - LoadedVecTable::Three(tbl) => { - if key.len() == 3 { - // I think this copies the strings and is not very efficient. - let k = (key[0].to_string(), key[1].to_string(), key[2].to_string()); - tbl.get(&k).ok_or(TableError::EntryNotFound) - } else { - Err(TableError::WrongKeySize(3, key.len())) - } - } - } - } -} - pub struct LoadedTableCollection { tables: HashMap, } @@ -509,13 +250,22 @@ impl TableDataRef { } } -impl From for TableDataRef { - fn from(v1: TableDataRefV1) -> Self { - Self { +impl TryFrom for TableDataRef { + type Error = ConversionError; + fn try_from(v1: TableDataRefV1) -> Result { + let column = match v1.column { + None => None, + Some(c) => Some(c.try_into()?), + }; + let index = match v1.index { + None => None, + Some(i) => Some(i.try_into()?), + }; + Ok(Self { table: v1.table, - column: v1.column.map(|i| i.into()), - index: v1.index.map(|i| i.into()), - } + column, + index, + }) } } diff --git a/pywr-schema/src/data_tables/scalar.rs b/pywr-schema/src/data_tables/scalar.rs new file mode 100644 index 00000000..3a3a2358 --- /dev/null +++ b/pywr-schema/src/data_tables/scalar.rs @@ -0,0 +1,250 @@ +use crate::data_tables::{make_path, TableError}; +use std::collections::HashMap; +use std::fs::File; +use std::io::BufReader; +use std::path::Path; +use std::str::FromStr; + +fn table_key_to_position(key: &str, keys: &[String]) -> Result { + keys.iter().position(|k| k == key).ok_or(TableError::EntryNotFound) +} + +/// A simple table with a string based key for scalar values. +pub struct ScalarTableOne { + keys: Vec, + values: Vec, +} + +impl ScalarTableOne +where + T: Copy, +{ + fn get_scalar(&self, key: &str) -> Result { + let index = table_key_to_position(key, &self.keys)?; + self.values + .get(index) + .ok_or(TableError::IndexOutOfBounds(index)) + .copied() + } +} + +/// A simple table with two strings for a key to scalar values. +pub struct ScalarTableR1C1 { + index: (Vec, Vec), + // Could this be flattened for a small performance gain? + values: Vec>, +} + +impl ScalarTableR1C1 +where + T: Copy, +{ + fn get_scalar(&self, index: &[&str]) -> Result { + if index.len() == 2 { + let idx0 = table_key_to_position(index[0], &self.index.0)?; + let idx1 = table_key_to_position(index[1], &self.index.1)?; + + self.values + .get(idx0) + .ok_or(TableError::IndexOutOfBounds(idx0))? + .get(idx1) + .ok_or(TableError::IndexOutOfBounds(idx1)) + .copied() + } else { + Err(TableError::WrongKeySize(2, index.len())) + } + } +} + +/// A simple table with two strings for a key to scalar values. +pub struct ScalarTableR2 { + values: HashMap<(String, String), T>, +} + +impl ScalarTableR2 +where + T: Copy, +{ + fn get_scalar(&self, index: &[&str]) -> Result { + if index.len() == 3 { + // I think this copies the strings and is not very efficient. + let k = (index[0].to_string(), index[1].to_string()); + self.values.get(&k).ok_or(TableError::EntryNotFound).copied() + } else { + Err(TableError::WrongKeySize(3, index.len())) + } + } +} + +/// A simple table with three strings for a key to scalar values. +/// +/// This table can not be indexed by position. +pub struct ScalarTableThree { + values: HashMap<(String, String, String), T>, +} + +impl ScalarTableThree +where + T: Copy, +{ + fn get_scalar(&self, index: &[&str]) -> Result { + if index.len() == 3 { + // I think this copies the strings and is not very efficient. + let k = (index[0].to_string(), index[1].to_string(), index[2].to_string()); + self.values.get(&k).ok_or(TableError::EntryNotFound).copied() + } else { + Err(TableError::WrongKeySize(3, index.len())) + } + } +} + +pub enum LoadedScalarTable { + One(ScalarTableOne), + Row1Col1(ScalarTableR1C1), + Row2(ScalarTableR2), + Three(ScalarTableThree), +} + +impl LoadedScalarTable +where + T: Copy, +{ + pub fn get_scalar(&self, key: &[&str]) -> Result { + match self { + LoadedScalarTable::One(tbl) => { + if key.len() == 1 { + tbl.get_scalar(key[0]) + } else { + Err(TableError::WrongKeySize(1, key.len())) + } + } + LoadedScalarTable::Row1Col1(tbl) => tbl.get_scalar(key), + LoadedScalarTable::Row2(tbl) => tbl.get_scalar(key), + LoadedScalarTable::Three(tbl) => tbl.get_scalar(key), + } + } +} + +/// Load a CSV file with looks for each rows & columns +pub fn load_csv_row_col_scalar_table_one( + table_path: &Path, + data_path: Option<&Path>, +) -> Result, TableError> +where + T: FromStr + Copy, + TableError: From, +{ + let path = make_path(table_path, data_path); + + let file = File::open(path).map_err(|e| TableError::IO(e.to_string()))?; + let buf_reader = BufReader::new(file); + let mut rdr = csv::Reader::from_reader(buf_reader); + + let col_headers: Vec = rdr + .headers() + .map_err(|e| TableError::Csv(e.to_string()))? + .iter() + .skip(1) + .map(|s| s.to_string()) + .collect(); + + let mut row_headers: Vec = Vec::new(); + let values: Vec> = rdr + .records() + .map(|result| { + // The iterator yields Result, so we check the + // error here. + let record = result.map_err(|e| TableError::Csv(e.to_string()))?; + + let key = record.get(0).ok_or(TableError::KeyParse)?.to_string(); + + let values: Vec = record.iter().skip(1).map(|v| v.parse()).collect::>()?; + + row_headers.push(key.clone()); + + Ok(values) + }) + .collect::, TableError>>()?; + + Ok(LoadedScalarTable::Row1Col1(ScalarTableR1C1 { + index: (row_headers, col_headers), + values, + })) +} + +pub fn load_csv_row_scalar_table_one( + table_path: &Path, + data_path: Option<&Path>, +) -> Result, TableError> +where + T: FromStr + Copy, + TableError: From, +{ + let path = make_path(table_path, data_path); + + let file = File::open(path.clone()).map_err(|e| TableError::IO(e.to_string()))?; + let buf_reader = BufReader::new(file); + let mut rdr = csv::Reader::from_reader(buf_reader); + + let (keys, values): (Vec, Vec) = rdr + .records() + .map(|result| { + // The iterator yields Result, so we check the + // error here. + let record = result.map_err(|e| TableError::Csv(e.to_string()))?; + + let key = record.get(0).ok_or(TableError::KeyParse)?.to_string(); + + let values: Vec = record.iter().skip(1).map(|v| v.parse()).collect::>()?; + + if values.len() > 1 { + return Err(TableError::TooManyValues(path.clone())); + } + + Ok((key, values[0])) + }) + .collect::, TableError>>()? + .into_iter() + .unzip(); + + Ok(LoadedScalarTable::One(ScalarTableOne { keys, values })) +} + +pub fn load_csv_row2_scalar_table_one( + table_path: &Path, + data_path: Option<&Path>, +) -> Result, TableError> +where + T: FromStr + Copy, + TableError: From, +{ + let path = make_path(table_path, data_path); + + let file = File::open(path.clone()).map_err(|e| TableError::IO(e.to_string()))?; + let buf_reader = BufReader::new(file); + let mut rdr = csv::Reader::from_reader(buf_reader); + + let values: HashMap<(String, String), T> = rdr + .records() + .map(|result| { + // The iterator yields Result, so we check the + // error here. + let record = result.map_err(|e| TableError::Csv(e.to_string()))?; + + let key = ( + record.get(0).ok_or(TableError::KeyParse)?.to_string(), + record.get(1).ok_or(TableError::KeyParse)?.to_string(), + ); + + let values: Vec = record.iter().skip(2).map(|v| v.parse()).collect::>()?; + + if values.len() > 1 { + return Err(TableError::TooManyValues(path.clone())); + } + + Ok((key, values[0])) + }) + .collect::>()?; + + Ok(LoadedScalarTable::Row2(ScalarTableR2 { values })) +} diff --git a/pywr-schema/src/data_tables/vec.rs b/pywr-schema/src/data_tables/vec.rs new file mode 100644 index 00000000..0968e1ea --- /dev/null +++ b/pywr-schema/src/data_tables/vec.rs @@ -0,0 +1,114 @@ +use crate::data_tables::{make_path, TableError}; +use std::collections::HashMap; +use std::fs::File; +use std::io::BufReader; +use std::path::Path; +use std::str::FromStr; + +pub enum LoadedVecTable { + One(HashMap>), + Two(HashMap<(String, String), Vec>), + Three(HashMap<(String, String, String), Vec>), +} + +impl LoadedVecTable +where + T: Copy, +{ + pub fn get_vec(&self, key: &[&str]) -> Result<&Vec, TableError> { + match self { + LoadedVecTable::One(tbl) => { + if key.len() == 1 { + tbl.get(key[0]).ok_or(TableError::EntryNotFound) + } else { + Err(TableError::WrongKeySize(1, key.len())) + } + } + LoadedVecTable::Two(tbl) => { + if key.len() == 2 { + // I think this copies the strings and is not very efficient. + let k = (key[0].to_string(), key[1].to_string()); + tbl.get(&k).ok_or(TableError::EntryNotFound) + } else { + Err(TableError::WrongKeySize(2, key.len())) + } + } + LoadedVecTable::Three(tbl) => { + if key.len() == 3 { + // I think this copies the strings and is not very efficient. + let k = (key[0].to_string(), key[1].to_string(), key[2].to_string()); + tbl.get(&k).ok_or(TableError::EntryNotFound) + } else { + Err(TableError::WrongKeySize(3, key.len())) + } + } + } + } +} + +pub fn load_csv_row_vec_table_one( + table_path: &Path, + data_path: Option<&Path>, +) -> Result, TableError> +where + T: FromStr, + TableError: From, +{ + let path = make_path(table_path, data_path); + + let file = File::open(path).map_err(|e| TableError::IO(e.to_string()))?; + let buf_reader = BufReader::new(file); + let mut rdr = csv::Reader::from_reader(buf_reader); + + let tbl: HashMap> = rdr + .records() + .map(|result| { + // The iterator yields Result, so we check the + // error here. + let record = result.map_err(|e| TableError::Csv(e.to_string()))?; + + let key = record.get(0).ok_or(TableError::KeyParse)?.to_string(); + + let values: Vec = record.iter().skip(1).map(|v| v.parse()).collect::>()?; + + Ok((key, values)) + }) + .collect::>()?; + + Ok(LoadedVecTable::One(tbl)) +} + +pub fn load_csv_row2_vec_table_one( + table_path: &Path, + data_path: Option<&Path>, +) -> Result, TableError> +where + T: FromStr, + TableError: From, +{ + let path = make_path(table_path, data_path); + + let file = File::open(path).map_err(|e| TableError::IO(e.to_string()))?; + let buf_reader = BufReader::new(file); + let mut rdr = csv::Reader::from_reader(buf_reader); + + let tbl: HashMap<(String, String), Vec> = rdr + .records() + .map(|result| { + // The iterator yields Result, so we check the + // error here. + let record = result.map_err(|e| TableError::Csv(e.to_string()))?; + + let key = ( + record.get(0).ok_or(TableError::KeyParse)?.to_string(), + record.get(1).ok_or(TableError::KeyParse)?.to_string(), + ); + + let values: Vec = record.iter().skip(2).map(|v| v.parse()).collect::>()?; + + Ok((key, values)) + }) + .collect::>()?; + + Ok(LoadedVecTable::Two(tbl)) +} diff --git a/pywr-schema/src/error.rs b/pywr-schema/src/error.rs index 61c72d85..37ba4a60 100644 --- a/pywr-schema/src/error.rs +++ b/pywr-schema/src/error.rs @@ -39,6 +39,8 @@ pub enum SchemaError { CSVError(String), #[error("unexpected parameter type: {0}")] UnexpectedParameterType(String), + #[error("mismatch in the length of data provided. expected: {expected}, found: {found}")] + DataLengthMismatch { expected: usize, found: usize }, } impl From for PyErr { @@ -69,4 +71,12 @@ pub enum ConversionError { ExtraNodeAttribute { attr: String, name: String }, #[error("Custom node of type {ty:?} on node {name:?} is not supported .")] CustomNodeNotSupported { ty: String, name: String }, + #[error("Integer table indices are not supported.")] + IntegerTableIndicesNotSupported, + #[error("Conversion of one of the following attributes {attrs:?} is not supported on parameter {name:?}.")] + UnsupportedAttribute { attrs: Vec, name: String }, + #[error("Conversion of one of the following feature is not supported on parameter {name:?}: {feature}")] + UnsupportedFeature { feature: String, name: String }, + #[error("Parameter {name:?} of type `{ty:?}` are not supported in Pywr v2. {instead:?}")] + DeprecatedParameter { ty: String, name: String, instead: String }, } diff --git a/pywr-schema/src/model.rs b/pywr-schema/src/model.rs index 95f8f1c2..aab67f15 100644 --- a/pywr-schema/src/model.rs +++ b/pywr-schema/src/model.rs @@ -22,7 +22,9 @@ impl TryFrom for Metadata { fn try_from(v1: pywr_v1_schema::model::Metadata) -> Result { Ok(Self { - title: v1.title, + title: v1 + .title + .unwrap_or("Model converted from Pywr v1.x with no title.".to_string()), description: v1.description, minimum_version: v1.minimum_version, }) diff --git a/pywr-schema/src/parameters/control_curves.rs b/pywr-schema/src/parameters/control_curves.rs index 424927d9..c5570f3d 100644 --- a/pywr-schema/src/parameters/control_curves.rs +++ b/pywr-schema/src/parameters/control_curves.rs @@ -56,7 +56,12 @@ impl ControlCurveInterpolatedParameter { .map(|val| val.load(model, tables, data_path)) .collect::>()?; - let p = pywr_core::parameters::InterpolatedParameter::new(&self.meta.name, metric, control_curves, values); + let p = pywr_core::parameters::ControlCurveInterpolatedParameter::new( + &self.meta.name, + metric, + control_curves, + values, + ); Ok(model.add_parameter(Box::new(p))?) } } @@ -85,7 +90,26 @@ impl TryFromV1Parameter for ControlCurveInt }); }; - let values = v1.values.into_iter().map(DynamicFloatValue::from_f64).collect(); + // Handle the case where neither or both "values" and "parameters" are defined. + let values = match (v1.values, v1.parameters) { + (None, None) => { + return Err(ConversionError::MissingAttribute { + name: meta.name, + attrs: vec!["values".to_string(), "parameters".to_string()], + }); + } + (Some(_), Some(_)) => { + return Err(ConversionError::UnexpectedAttribute { + name: meta.name, + attrs: vec!["values".to_string(), "parameters".to_string()], + }); + } + (Some(values), None) => values.into_iter().map(DynamicFloatValue::from_f64).collect(), + (None, Some(parameters)) => parameters + .into_iter() + .map(|p| p.try_into_v2_parameter(Some(&meta.name), unnamed_count)) + .collect::, _>>()?, + }; let p = Self { meta, @@ -388,7 +412,7 @@ impl TryFromV1Parameter for Contro control_curves, storage_node: v1.storage_node, values: v1.values, - minimum: Some(v1.minimum), + minimum: v1.minimum, maximum: None, }; Ok(p) diff --git a/pywr-schema/src/parameters/core.rs b/pywr-schema/src/parameters/core.rs index 834d1b8e..2c4e4288 100644 --- a/pywr-schema/src/parameters/core.rs +++ b/pywr-schema/src/parameters/core.rs @@ -195,7 +195,7 @@ impl TryFromV1Parameter for ConstantParameter { let value = if let Some(v) = v1.value { ConstantValue::Literal(v) } else if let Some(tbl) = v1.table { - ConstantValue::Table(tbl.into()) + ConstantValue::Table(tbl.try_into()?) } else { ConstantValue::Literal(0.0) }; diff --git a/pywr-schema/src/parameters/data_frame.rs b/pywr-schema/src/parameters/data_frame.rs index 9a881e72..532f3b9a 100644 --- a/pywr-schema/src/parameters/data_frame.rs +++ b/pywr-schema/src/parameters/data_frame.rs @@ -1,6 +1,7 @@ use crate::error::SchemaError; use crate::parameters::python::try_json_value_into_py; -use crate::parameters::{DynamicFloatValueType, ParameterMeta}; +use crate::parameters::{DynamicFloatValueType, IntoV2Parameter, ParameterMeta, TryFromV1Parameter}; +use crate::ConversionError; use ndarray::Array2; use polars::prelude::DataType::Float64; use polars::prelude::{DataFrame, Float64Type, IndexOrder}; @@ -9,6 +10,7 @@ use pyo3::types::{PyDict, PyTuple}; use pyo3::{IntoPy, PyErr, PyObject, Python, ToPyObject}; use pyo3_polars::PyDataFrame; use pywr_core::parameters::{Array1Parameter, Array2Parameter, ParameterIndex}; +use pywr_v1_schema::parameters::DataFrameParameter as DataFrameParameterV1; use std::collections::HashMap; use std::path::{Path, PathBuf}; @@ -55,6 +57,7 @@ pub struct DataFrameParameter { pub meta: ParameterMeta, pub url: PathBuf, pub columns: DataFrameColumns, + pub timestep_offset: Option, pub pandas_kwargs: HashMap, } @@ -125,7 +128,7 @@ impl DataFrameParameter { DataFrameColumns::Scenario(scenario) => { let scenario_group = model.get_scenario_group_index_by_name(scenario)?; let array: Array2 = df.to_ndarray::(IndexOrder::default()).unwrap(); - let p = Array2Parameter::new(&self.meta.name, array, scenario_group); + let p = Array2Parameter::new(&self.meta.name, array, scenario_group, self.timestep_offset); Ok(model.add_parameter(Box::new(p))?) } DataFrameColumns::Column(column) => { @@ -139,9 +142,58 @@ impl DataFrameParameter { .unwrap() .to_owned(); - let p = Array1Parameter::new(&self.meta.name, array); + let p = Array1Parameter::new(&self.meta.name, array, self.timestep_offset); Ok(model.add_parameter(Box::new(p))?) } } } } + +impl TryFromV1Parameter for DataFrameParameter { + type Error = ConversionError; + + fn try_from_v1_parameter( + v1: DataFrameParameterV1, + parent_node: Option<&str>, + unnamed_count: &mut usize, + ) -> Result { + let meta: ParameterMeta = v1.meta.into_v2_parameter(parent_node, unnamed_count); + let url = v1.url.ok_or(ConversionError::MissingAttribute { + attrs: vec!["url".to_string()], + name: meta.name.clone(), + })?; + + // Here we can only handle a specific column or assume the columns map to a scenario group. + let columns = match (v1.column, v1.scenario) { + (None, None) => { + return Err(ConversionError::MissingAttribute { + attrs: vec!["column".to_string(), "scenario".to_string()], + name: meta.name.clone(), + }) + } + (Some(_), Some(_)) => { + return Err(ConversionError::UnexpectedAttribute { + attrs: vec!["column".to_string(), "scenario".to_string()], + name: meta.name.clone(), + }) + } + (Some(c), None) => DataFrameColumns::Column(c), + (None, Some(s)) => DataFrameColumns::Scenario(s), + }; + + if v1.index.is_some() || v1.indexes.is_some() || v1.table.is_some() { + return Err(ConversionError::UnsupportedAttribute { + attrs: vec!["index".to_string(), "indexes".to_string(), "table".to_string()], + name: meta.name.clone(), + }); + } + + Ok(Self { + meta, + url, + columns, + timestep_offset: v1.timestep_offset, + pandas_kwargs: v1.pandas_kwargs, + }) + } +} diff --git a/pywr-schema/src/parameters/discount_factor.rs b/pywr-schema/src/parameters/discount_factor.rs new file mode 100644 index 00000000..6b7b1ed1 --- /dev/null +++ b/pywr-schema/src/parameters/discount_factor.rs @@ -0,0 +1,61 @@ +use crate::data_tables::LoadedTableCollection; +use crate::error::SchemaError; +use crate::parameters::{DynamicFloatValue, DynamicFloatValueType, IntoV2Parameter, ParameterMeta, TryFromV1Parameter}; +use crate::ConversionError; +use pywr_core::parameters::ParameterIndex; +use pywr_v1_schema::parameters::DiscountFactorParameter as DiscountFactorParameterV1; +use std::collections::HashMap; +use std::path::Path; + +/// A parameter that returns the current discount factor for a given time-step. +#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] +pub struct DiscountFactorParameter { + #[serde(flatten)] + pub meta: ParameterMeta, + pub discount_rate: DynamicFloatValue, + pub base_year: i32, +} + +impl DiscountFactorParameter { + pub fn node_references(&self) -> HashMap<&str, &str> { + HashMap::new() + } + + pub fn parameters(&self) -> HashMap<&str, DynamicFloatValueType> { + let mut attributes = HashMap::new(); + + let metric = &self.discount_rate; + attributes.insert("discount_rate", metric.into()); + + attributes + } + + pub fn add_to_model( + &self, + model: &mut pywr_core::model::Model, + tables: &LoadedTableCollection, + data_path: Option<&Path>, + ) -> Result { + let discount_rate = self.discount_rate.load(model, tables, data_path)?; + let p = pywr_core::parameters::DiscountFactorParameter::new(&self.meta.name, discount_rate, self.base_year); + Ok(model.add_parameter(Box::new(p))?) + } +} + +impl TryFromV1Parameter for DiscountFactorParameter { + type Error = ConversionError; + + fn try_from_v1_parameter( + v1: DiscountFactorParameterV1, + parent_node: Option<&str>, + unnamed_count: &mut usize, + ) -> Result { + let meta: ParameterMeta = v1.meta.into_v2_parameter(parent_node, unnamed_count); + let discount_rate = DynamicFloatValue::from_f64(v1.rate); + Ok(Self { + meta, + discount_rate, + base_year: v1.base_year as i32, + }) + } +} diff --git a/pywr-schema/src/parameters/interpolated.rs b/pywr-schema/src/parameters/interpolated.rs new file mode 100644 index 00000000..9b80de49 --- /dev/null +++ b/pywr-schema/src/parameters/interpolated.rs @@ -0,0 +1,218 @@ +use crate::data_tables::LoadedTableCollection; +use crate::error::SchemaError; +use crate::parameters::{ + DynamicFloatValue, DynamicFloatValueType, IntoV2Parameter, MetricFloatValue, NodeReference, ParameterMeta, + TryFromV1Parameter, TryIntoV2Parameter, +}; +use crate::ConversionError; +use pywr_core::parameters::ParameterIndex; +use pywr_v1_schema::parameters::{ + InterpolatedFlowParameter as InterpolatedFlowParameterV1, + InterpolatedVolumeParameter as InterpolatedVolumeParameterV1, +}; +use std::collections::HashMap; +use std::path::Path; + +/// A parameter that interpolates a value to a function with given discrete data points. +/// +/// Internally this is implemented as a piecewise linear interpolation via +/// [`pywr_core::parameters::InterpolatedParameter`]. +#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] +pub struct InterpolatedParameter { + #[serde(flatten)] + pub meta: ParameterMeta, + pub x: DynamicFloatValue, + pub xp: Vec, + pub fp: Vec, + /// If not given or true, raise an error if the x value is outside the range of the data points. + pub error_on_bounds: Option, +} + +impl InterpolatedParameter { + pub fn node_references(&self) -> HashMap<&str, &str> { + HashMap::new() + } + + pub fn parameters(&self) -> HashMap<&str, DynamicFloatValueType> { + let mut attributes = HashMap::new(); + + let x = &self.x; + attributes.insert("x", x.into()); + + let xp = &self.xp; + attributes.insert("xp", xp.into()); + + let fp = &self.fp; + attributes.insert("fp", fp.into()); + + attributes + } + + pub fn add_to_model( + &self, + model: &mut pywr_core::model::Model, + tables: &LoadedTableCollection, + data_path: Option<&Path>, + ) -> Result { + let x = self.x.load(model, tables, data_path)?; + + // Sense check the points + if self.xp.len() != self.fp.len() { + return Err(SchemaError::DataLengthMismatch { + expected: self.xp.len(), + found: self.fp.len(), + }); + } + + let xp = self + .xp + .iter() + .map(|p| p.load(model, tables, data_path)) + .collect::, _>>()?; + let fp = self + .fp + .iter() + .map(|p| p.load(model, tables, data_path)) + .collect::, _>>()?; + + let points = xp + .into_iter() + .zip(fp.into_iter()) + .map(|(xp, fp)| (xp, fp)) + .collect::>(); + + let p = pywr_core::parameters::InterpolatedParameter::new( + &self.meta.name, + x, + points, + self.error_on_bounds.unwrap_or(true), + ); + Ok(model.add_parameter(Box::new(p))?) + } +} + +impl TryFromV1Parameter for InterpolatedParameter { + type Error = ConversionError; + + fn try_from_v1_parameter( + v1: InterpolatedFlowParameterV1, + parent_node: Option<&str>, + unnamed_count: &mut usize, + ) -> Result { + let meta: ParameterMeta = v1.meta.into_v2_parameter(parent_node, unnamed_count); + + // Convert the node reference to a metric + let node_ref = NodeReference { + name: v1.node, + sub_name: None, + }; + // This defaults to the node's inflow; not sure if we can do better than that. + let x = DynamicFloatValue::Dynamic(MetricFloatValue::NodeInFlow(node_ref)); + + let xp = v1 + .flows + .into_iter() + .map(|p| p.try_into_v2_parameter(Some(&meta.name), unnamed_count)) + .collect::, _>>()?; + + let fp = v1 + .values + .into_iter() + .map(|p| p.try_into_v2_parameter(Some(&meta.name), unnamed_count)) + .collect::, _>>()?; + + // Default values + let mut error_on_bounds = None; + if let Some(interp_kwargs) = v1.interp_kwargs { + if let Some(error_on_bounds_value) = interp_kwargs.get("bounds_error") { + // Try to get the value as a boolean; + if let Some(eob) = error_on_bounds_value.as_bool() { + error_on_bounds = Some(eob); + } + } + + // Check if non-linear interpolation is requested; this is not supported at the moment. + if let Some(kind) = interp_kwargs.get("kind") { + if let Some(kind_str) = kind.as_str() { + if kind_str != "linear" { + return Err(ConversionError::UnsupportedFeature { + feature: "Interpolation with `kind` other than `linear` is not supported.".to_string(), + name: meta.name.clone(), + }); + } + } + } + } + + Ok(Self { + meta, + x, + xp, + fp, + error_on_bounds, + }) + } +} + +impl TryFromV1Parameter for InterpolatedParameter { + type Error = ConversionError; + + fn try_from_v1_parameter( + v1: InterpolatedVolumeParameterV1, + parent_node: Option<&str>, + unnamed_count: &mut usize, + ) -> Result { + let meta: ParameterMeta = v1.meta.into_v2_parameter(parent_node, unnamed_count); + + // Convert the node reference to a metric + let node_ref = NodeReference { + name: v1.node, + sub_name: None, + }; + // This defaults to the node's inflow; not sure if we can do better than that. + let x = DynamicFloatValue::Dynamic(MetricFloatValue::NodeVolume(node_ref)); + + let xp = v1 + .volumes + .into_iter() + .map(|p| p.try_into_v2_parameter(Some(&meta.name), unnamed_count)) + .collect::, _>>()?; + + let fp = v1 + .values + .into_iter() + .map(|p| p.try_into_v2_parameter(Some(&meta.name), unnamed_count)) + .collect::, _>>()?; + + // Default values + let mut error_on_bounds = None; + if let Some(interp_kwargs) = v1.interp_kwargs { + if let Some(error_on_bounds_value) = interp_kwargs.get("bounds_error") { + // Try to get the value as a boolean; + if let Some(eob) = error_on_bounds_value.as_bool() { + error_on_bounds = Some(eob); + } + } + + // Check if non-linear interpolation is requested; this is not supported at the moment. + if let Some(kind) = interp_kwargs.get("kind") { + if let Some(kind_str) = kind.as_str() { + if kind_str != "linear" { + return Err(ConversionError::UnsupportedFeature { + feature: "Interpolation with `kind` other than `linear` is not supported.".to_string(), + name: meta.name.clone(), + }); + } + } + } + } + + Ok(Self { + meta, + x, + xp, + fp, + error_on_bounds, + }) + } +} diff --git a/pywr-schema/src/parameters/mod.rs b/pywr-schema/src/parameters/mod.rs index ff18c7a7..5c1eed0e 100644 --- a/pywr-schema/src/parameters/mod.rs +++ b/pywr-schema/src/parameters/mod.rs @@ -13,7 +13,9 @@ mod control_curves; mod core; mod data_frame; mod delay; +mod discount_factor; mod indexed_array; +mod interpolated; mod offset; mod polynomial; mod profiles; @@ -32,6 +34,7 @@ pub use super::parameters::core::{ ActivationFunction, ConstantParameter, MaxParameter, MinParameter, NegativeParameter, VariableSettings, }; pub use super::parameters::delay::DelayParameter; +pub use super::parameters::discount_factor::DiscountFactorParameter; pub use super::parameters::indexed_array::IndexedArrayParameter; pub use super::parameters::polynomial::Polynomial1DParameter; pub use super::parameters::profiles::{ @@ -43,6 +46,7 @@ pub use super::parameters::thresholds::ParameterThresholdParameter; use crate::error::{ConversionError, SchemaError}; use crate::parameters::core::DivisionParameter; pub use crate::parameters::data_frame::DataFrameParameter; +use crate::parameters::interpolated::InterpolatedParameter; pub use offset::OffsetParameter; use pywr_core::derived_metric::DerivedMetric; use pywr_core::metric::Metric; @@ -50,10 +54,10 @@ use pywr_core::node::NodeIndex; use pywr_core::parameters::{IndexParameterIndex, IndexValue, ParameterType}; use pywr_v1_schema::parameters::{ CoreParameter, ExternalDataRef as ExternalDataRefV1, Parameter as ParameterV1, ParameterMeta as ParameterMetaV1, - ParameterValue as ParameterValueV1, TableIndex as TableIndexV1, + ParameterValue as ParameterValueV1, TableIndex as TableIndexV1, TableIndexEntry as TableIndexEntryV1, }; use std::collections::HashMap; -use std::path::Path; +use std::path::{Path, PathBuf}; #[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] pub struct ParameterMeta { @@ -160,6 +164,8 @@ pub enum Parameter { Delay(DelayParameter), Division(DivisionParameter), Offset(OffsetParameter), + DiscountFactor(DiscountFactorParameter), + Interpolated(InterpolatedParameter), } impl Parameter { @@ -188,6 +194,8 @@ impl Parameter { Self::Division(p) => p.meta.name.as_str(), Self::Delay(p) => p.meta.name.as_str(), Self::Offset(p) => p.meta.name.as_str(), + Self::DiscountFactor(p) => p.meta.name.as_str(), + Self::Interpolated(p) => p.meta.name.as_str(), } } @@ -218,6 +226,8 @@ impl Parameter { Self::Delay(p) => p.node_references(), Self::Division(p) => p.node_references(), Self::Offset(p) => p.node_references(), + Self::DiscountFactor(p) => p.node_references(), + Self::Interpolated(p) => p.node_references(), } } @@ -265,6 +275,8 @@ impl Parameter { Self::Delay(_) => "Delay", Self::Division(_) => "Division", Self::Offset(_) => "Offset", + Self::DiscountFactor(_) => "DiscountFactor", + Self::Interpolated(_) => "Interpolated", } } @@ -300,6 +312,8 @@ impl Parameter { Self::Delay(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), Self::Division(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), Self::Offset(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), + Self::DiscountFactor(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), + Self::Interpolated(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), }; Ok(ty) @@ -364,6 +378,42 @@ impl TryFromV1Parameter for Parameter { } CoreParameter::Min(p) => Parameter::Min(p.try_into_v2_parameter(parent_node, unnamed_count)?), CoreParameter::Division(p) => Parameter::Division(p.try_into_v2_parameter(parent_node, unnamed_count)?), + CoreParameter::DataFrame(p) => { + Parameter::DataFrame(p.try_into_v2_parameter(parent_node, unnamed_count)?) + } + CoreParameter::Deficit(p) => { + return Err(ConversionError::DeprecatedParameter { + ty: "DeficitParameter".to_string(), + name: p.meta.map(|m| m.name).flatten().unwrap_or("unnamed".to_string()), + instead: "Use a derived metric instead.".to_string(), + }) + } + CoreParameter::DiscountFactor(p) => { + Parameter::DiscountFactor(p.try_into_v2_parameter(parent_node, unnamed_count)?) + } + CoreParameter::InterpolatedVolume(p) => { + Parameter::Interpolated(p.try_into_v2_parameter(parent_node, unnamed_count)?) + } + CoreParameter::InterpolatedFlow(p) => { + Parameter::Interpolated(p.try_into_v2_parameter(parent_node, unnamed_count)?) + } + CoreParameter::HydropowerTarget(_) => todo!("Implement HydropowerTargetParameter"), + CoreParameter::Storage(p) => { + return Err(ConversionError::DeprecatedParameter { + ty: "StorageParameter".to_string(), + name: p.meta.map(|m| m.name).flatten().unwrap_or("unnamed".to_string()), + instead: "Use a derived metric instead.".to_string(), + }) + } + CoreParameter::RollingMeanFlowNode(_) => todo!("Implement RollingMeanFlowNodeParameter"), + CoreParameter::ScenarioWrapper(_) => todo!("Implement ScenarioWrapperParameter"), + CoreParameter::Flow(p) => { + return Err(ConversionError::DeprecatedParameter { + ty: "FlowParameter".to_string(), + name: p.meta.map(|m| m.name).flatten().unwrap_or("unnamed".to_string()), + instead: "Use a derived metric instead.".to_string(), + }) + } }, ParameterV1::Custom(p) => { println!("Custom parameter: {:?} ({})", p.meta.name, p.ty); @@ -436,7 +486,7 @@ impl TryFrom for ConstantValue { match v1 { ParameterValueV1::Constant(v) => Ok(Self::Literal(v)), ParameterValueV1::Reference(_) => Err(ConversionError::ConstantFloatReferencesParameter), - ParameterValueV1::Table(tbl) => Ok(Self::Table(tbl.into())), + ParameterValueV1::Table(tbl) => Ok(Self::Table(tbl.try_into()?)), ParameterValueV1::Inline(_) => Err(ConversionError::ConstantFloatInlineParameter), } } @@ -444,8 +494,8 @@ impl TryFrom for ConstantValue { #[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] pub struct NodeReference { - name: String, - sub_name: Option, + pub name: String, + pub sub_name: Option, } impl NodeReference { @@ -622,7 +672,7 @@ impl TryFromV1Parameter for DynamicFloatValue { name: p_name, key: None, }), - ParameterValueV1::Table(tbl) => Self::Constant(ConstantValue::Table(tbl.into())), + ParameterValueV1::Table(tbl) => Self::Constant(ConstantValue::Table(tbl.try_into()?)), ParameterValueV1::Inline(param) => Self::Dynamic(MetricFloatValue::InlineParameter { definition: Box::new((*param).try_into_v2_parameter(parent_node, unnamed_count)?), }), @@ -675,7 +725,7 @@ impl TryFromV1Parameter for DynamicIndexValue { // TODO this could print a warning and do a cast to usize instead. ParameterValueV1::Constant(_) => return Err(ConversionError::FloatToIndex), ParameterValueV1::Reference(p_name) => Self::Dynamic(ParameterIndexValue::Reference(p_name)), - ParameterValueV1::Table(tbl) => Self::Constant(ConstantValue::Table(tbl.into())), + ParameterValueV1::Table(tbl) => Self::Constant(ConstantValue::Table(tbl.try_into()?)), ParameterValueV1::Inline(param) => Self::Dynamic(ParameterIndexValue::Inline(Box::new( (*param).try_into_v2_parameter(parent_node, unnamed_count)?, ))), @@ -708,18 +758,27 @@ impl ConstantFloatVec { #[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] pub struct ExternalDataRef { - url: String, + url: PathBuf, column: Option, index: Option, } -impl From for ExternalDataRef { - fn from(v1: ExternalDataRefV1) -> Self { - Self { +impl TryFrom for ExternalDataRef { + type Error = ConversionError; + fn try_from(v1: ExternalDataRefV1) -> Result { + let column = match v1.column { + None => None, + Some(c) => Some(c.try_into()?), + }; + let index = match v1.index { + None => None, + Some(i) => Some(i.try_into()?), + }; + Ok(Self { url: v1.url, - column: v1.column.map(|i| i.into()), - index: v1.index.map(|i| i.into()), - } + column, + index, + }) } } @@ -730,11 +789,25 @@ pub enum TableIndex { Multi(Vec), } -impl From for TableIndex { - fn from(v1: TableIndexV1) -> Self { +impl TryFrom for TableIndex { + type Error = ConversionError; + + fn try_from(v1: TableIndexV1) -> Result { match v1 { - TableIndexV1::Single(s) => Self::Single(s), - TableIndexV1::Multi(s) => Self::Multi(s), + TableIndexV1::Single(s) => match s { + TableIndexEntryV1::Name(s) => Ok(TableIndex::Single(s)), + TableIndexEntryV1::Index(_) => Err(ConversionError::IntegerTableIndicesNotSupported), + }, + TableIndexV1::Multi(s) => { + let names = s + .into_iter() + .map(|e| match e { + TableIndexEntryV1::Name(s) => Ok(s), + TableIndexEntryV1::Index(_) => Err(ConversionError::IntegerTableIndicesNotSupported), + }) + .collect::, _>>()?; + Ok(Self::Multi(names)) + } } } } diff --git a/pywr-schema/src/parameters/profiles.rs b/pywr-schema/src/parameters/profiles.rs index fbc78e8b..7294d1db 100644 --- a/pywr-schema/src/parameters/profiles.rs +++ b/pywr-schema/src/parameters/profiles.rs @@ -50,9 +50,9 @@ impl TryFromV1Parameter for DailyProfileParameter { let values: ConstantFloatVec = if let Some(values) = v1.values { ConstantFloatVec::Literal(values) } else if let Some(external) = v1.external { - ConstantFloatVec::External(external.into()) + ConstantFloatVec::External(external.try_into()?) } else if let Some(table_ref) = v1.table_ref { - ConstantFloatVec::Table(table_ref.into()) + ConstantFloatVec::Table(table_ref.try_into()?) } else { return Err(ConversionError::MissingAttribute { name: meta.name, @@ -134,9 +134,9 @@ impl TryFromV1Parameter for MonthlyProfileParameter { let values: ConstantFloatVec = if let Some(values) = v1.values { ConstantFloatVec::Literal(values.to_vec()) } else if let Some(external) = v1.external { - ConstantFloatVec::External(external.into()) + ConstantFloatVec::External(external.try_into()?) } else if let Some(table_ref) = v1.table_ref { - ConstantFloatVec::Table(table_ref.into()) + ConstantFloatVec::Table(table_ref.try_into()?) } else { return Err(ConversionError::MissingAttribute { name: meta.name, diff --git a/pywr-schema/src/parameters/tables.rs b/pywr-schema/src/parameters/tables.rs index 18494e63..f9dcca19 100644 --- a/pywr-schema/src/parameters/tables.rs +++ b/pywr-schema/src/parameters/tables.rs @@ -16,6 +16,7 @@ pub struct TablesArrayParameter { pub scenario: Option, pub checksum: Option>, pub url: PathBuf, + pub timestep_offset: Option, } impl TablesArrayParameter { @@ -60,11 +61,16 @@ impl TablesArrayParameter { // 3. Create an ArrayParameter using the loaded array. if let Some(scenario) = &self.scenario { let scenario_group = model.get_scenario_group_index_by_name(scenario)?; - let p = pywr_core::parameters::Array2Parameter::new(&self.meta.name, array, scenario_group); + let p = pywr_core::parameters::Array2Parameter::new( + &self.meta.name, + array, + scenario_group, + self.timestep_offset, + ); Ok(model.add_parameter(Box::new(p))?) } else { let array = array.slice_move(s![.., 0]); - let p = pywr_core::parameters::Array1Parameter::new(&self.meta.name, array); + let p = pywr_core::parameters::Array1Parameter::new(&self.meta.name, array, self.timestep_offset); Ok(model.add_parameter(Box::new(p))?) } } @@ -85,6 +91,7 @@ impl TryFromV1Parameter for TablesArrayParameter { scenario: v1.scenario, checksum: v1.checksum, url: v1.url, + timestep_offset: None, }; Ok(p) } From 5fcef40ec260473322316cb98056706fea60e2e8 Mon Sep 17 00:00:00 2001 From: James Tomlinson Date: Tue, 28 Nov 2023 16:18:47 +0000 Subject: [PATCH 2/7] feat: Initial implementation of virtual storage costs. (#71) Virtual storage nodes now add a reference to their linked flow nodes. Those nodes now include an function that aggregates over the local cost and the costs of any linked virtual storage nodes. There's currently no method to change the default aggregation function from "max" via the schema. --- pywr-core/src/lib.rs | 2 + pywr-core/src/model.rs | 15 +- pywr-core/src/node.rs | 137 +++++++++++++++--- pywr-core/src/virtual_storage.rs | 50 ++++++- .../src/nodes/annual_virtual_storage.rs | 6 + .../src/nodes/monthly_virtual_storage.rs | 6 + pywr-schema/src/nodes/virtual_storage.rs | 6 + 7 files changed, 193 insertions(+), 29 deletions(-) diff --git a/pywr-core/src/lib.rs b/pywr-core/src/lib.rs index 02c79633..40e45056 100644 --- a/pywr-core/src/lib.rs +++ b/pywr-core/src/lib.rs @@ -79,6 +79,8 @@ pub enum PywrError { FlowConstraintsUndefined, #[error("storage constraints are undefined for this node")] StorageConstraintsUndefined, + #[error("can not add virtual storage node to a storage node")] + NoVirtualStorageOnStorageNode, #[error("timestep index out of range")] TimestepIndexOutOfRange, #[error("solver not initialised")] diff --git a/pywr-core/src/model.rs b/pywr-core/src/model.rs index 4e5ae3f6..fc7dd932 100644 --- a/pywr-core/src/model.rs +++ b/pywr-core/src/model.rs @@ -1352,12 +1352,13 @@ impl Model { min_volume: ConstraintValue, max_volume: ConstraintValue, reset: VirtualStorageReset, + cost: ConstraintValue, ) -> Result { if let Ok(_agg_node) = self.get_virtual_storage_node_by_name(name, sub_name) { return Err(PywrError::NodeNameAlreadyExists(name.to_string())); } - let node_index = self.virtual_storage_nodes.push_new( + let vs_node_index = self.virtual_storage_nodes.push_new( name, sub_name, nodes, @@ -1366,12 +1367,20 @@ impl Model { min_volume, max_volume, reset, + cost, ); + // Link the virtual storage node to the nodes it is including + for node_idx in nodes { + let node = self.nodes.get_mut(node_idx)?; + node.add_virtual_storage(vs_node_index)?; + } + // Add to the resolve order. - self.resolve_order.push(ComponentType::VirtualStorageNode(node_index)); + self.resolve_order + .push(ComponentType::VirtualStorageNode(vs_node_index)); - Ok(node_index) + Ok(vs_node_index) } /// Add a `parameters::Parameter` to the model diff --git a/pywr-core/src/node.rs b/pywr-core/src/node.rs index 17dbeac1..3cce421f 100644 --- a/pywr-core/src/node.rs +++ b/pywr-core/src/node.rs @@ -3,6 +3,7 @@ use crate::metric::Metric; use crate::model::Model; use crate::state::{NodeState, State}; use crate::timestep::Timestep; +use crate::virtual_storage::VirtualStorageIndex; use crate::PywrError; use std::ops::{Deref, DerefMut}; @@ -123,6 +124,13 @@ impl From for ConstraintValue { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum CostAggFunc { + Sum, + Max, + Min, +} + impl Node { /// Create a new input node pub fn new_input(node_index: &NodeIndex, name: &str, sub_name: Option<&str>) -> Self { @@ -286,6 +294,24 @@ impl Node { } } + pub fn add_virtual_storage(&mut self, virtual_storage_index: VirtualStorageIndex) -> Result<(), PywrError> { + match self { + Self::Input(n) => { + n.cost.virtual_storage_nodes.push(virtual_storage_index); + Ok(()) + } + Self::Output(n) => { + n.cost.virtual_storage_nodes.push(virtual_storage_index); + Ok(()) + } + Self::Link(n) => { + n.cost.virtual_storage_nodes.push(virtual_storage_index); + Ok(()) + } + Self::Storage(_) => Err(PywrError::NoVirtualStorageOnStorageNode), + } + } + // /// Return a reference to a node's flow constraints if they exist. // fn flow_constraints(&self) -> Option<&FlowConstraints> { // match self { @@ -502,6 +528,17 @@ impl Node { } } + pub fn set_cost_agg_func(&mut self, agg_func: CostAggFunc) -> Result<(), PywrError> { + match self { + Self::Input(n) => n.set_cost_agg_func(agg_func), + Self::Link(n) => n.set_cost_agg_func(agg_func), + Self::Output(n) => n.set_cost_agg_func(agg_func), + Self::Storage(_) => return Err(PywrError::NoVirtualStorageOnStorageNode), + }; + + Ok(()) + } + pub fn get_outgoing_cost(&self, model: &Model, state: &State) -> Result { match self { Self::Input(n) => n.get_cost(model, state), @@ -628,10 +665,65 @@ impl StorageConstraints { } } +/// Generic cost data for a node. +#[derive(Debug, PartialEq)] +struct NodeCost { + local: ConstraintValue, + virtual_storage_nodes: Vec, + agg_func: CostAggFunc, +} + +impl Default for NodeCost { + fn default() -> Self { + Self { + local: ConstraintValue::None, + virtual_storage_nodes: Vec::new(), + agg_func: CostAggFunc::Max, + } + } +} + +impl NodeCost { + fn get_cost(&self, model: &Model, state: &State) -> Result { + let local_cost = match &self.local { + ConstraintValue::None => Ok(0.0), + ConstraintValue::Scalar(v) => Ok(*v), + ConstraintValue::Metric(m) => m.get_value(model, state), + }?; + + let vs_costs: Vec = self + .virtual_storage_nodes + .iter() + .map(|idx| { + let vs = model.get_virtual_storage_node(idx)?; + vs.get_cost(model, state) + }) + .collect::>()?; + + let cost = match self.agg_func { + CostAggFunc::Sum => local_cost + vs_costs.iter().sum::(), + CostAggFunc::Max => local_cost.max( + vs_costs + .into_iter() + .max_by(|a, b| a.total_cmp(b)) + .unwrap_or(f64::NEG_INFINITY), + ), + CostAggFunc::Min => local_cost.min( + vs_costs + .into_iter() + .min_by(|a, b| a.total_cmp(b)) + .unwrap_or(f64::INFINITY), + ), + }; + + Ok(cost) + } +} + #[derive(Debug, PartialEq)] pub struct InputNode { pub meta: NodeMeta, - pub cost: ConstraintValue, + cost: NodeCost, pub flow_constraints: FlowConstraints, pub outgoing_edges: Vec, } @@ -640,20 +732,19 @@ impl InputNode { fn new(index: &NodeIndex, name: &str, sub_name: Option<&str>) -> Self { Self { meta: NodeMeta::new(index, name, sub_name), - cost: ConstraintValue::None, + cost: NodeCost::default(), flow_constraints: FlowConstraints::new(), outgoing_edges: Vec::new(), } } fn set_cost(&mut self, value: ConstraintValue) { - self.cost = value + self.cost.local = value + } + fn set_cost_agg_func(&mut self, agg_func: CostAggFunc) { + self.cost.agg_func = agg_func } fn get_cost(&self, model: &Model, state: &State) -> Result { - match &self.cost { - ConstraintValue::None => Ok(0.0), - ConstraintValue::Scalar(v) => Ok(*v), - ConstraintValue::Metric(m) => m.get_value(model, state), - } + self.cost.get_cost(model, state) } fn set_min_flow(&mut self, value: ConstraintValue) { self.flow_constraints.min_flow = value; @@ -678,7 +769,7 @@ impl InputNode { #[derive(Debug, PartialEq)] pub struct OutputNode { pub meta: NodeMeta, - pub cost: ConstraintValue, + cost: NodeCost, pub flow_constraints: FlowConstraints, pub incoming_edges: Vec, } @@ -687,20 +778,19 @@ impl OutputNode { fn new(index: &NodeIndex, name: &str, sub_name: Option<&str>) -> Self { Self { meta: NodeMeta::new(index, name, sub_name), - cost: ConstraintValue::None, + cost: NodeCost::default(), flow_constraints: FlowConstraints::new(), incoming_edges: Vec::new(), } } fn set_cost(&mut self, value: ConstraintValue) { - self.cost = value + self.cost.local = value } fn get_cost(&self, model: &Model, state: &State) -> Result { - match &self.cost { - ConstraintValue::None => Ok(0.0), - ConstraintValue::Scalar(v) => Ok(*v), - ConstraintValue::Metric(m) => m.get_value(model, state), - } + self.cost.get_cost(model, state) + } + fn set_cost_agg_func(&mut self, agg_func: CostAggFunc) { + self.cost.agg_func = agg_func } fn set_min_flow(&mut self, value: ConstraintValue) { self.flow_constraints.min_flow = value; @@ -725,7 +815,7 @@ impl OutputNode { #[derive(Debug, PartialEq)] pub struct LinkNode { pub meta: NodeMeta, - pub cost: ConstraintValue, + cost: NodeCost, pub flow_constraints: FlowConstraints, pub incoming_edges: Vec, pub outgoing_edges: Vec, @@ -735,21 +825,20 @@ impl LinkNode { fn new(index: &NodeIndex, name: &str, sub_name: Option<&str>) -> Self { Self { meta: NodeMeta::new(index, name, sub_name), - cost: ConstraintValue::None, + cost: NodeCost::default(), flow_constraints: FlowConstraints::new(), incoming_edges: Vec::new(), outgoing_edges: Vec::new(), } } fn set_cost(&mut self, value: ConstraintValue) { - self.cost = value + self.cost.local = value + } + fn set_cost_agg_func(&mut self, agg_func: CostAggFunc) { + self.cost.agg_func = agg_func } fn get_cost(&self, model: &Model, state: &State) -> Result { - match &self.cost { - ConstraintValue::None => Ok(0.0), - ConstraintValue::Scalar(v) => Ok(*v), - ConstraintValue::Metric(m) => m.get_value(model, state), - } + self.cost.get_cost(model, state) } fn set_min_flow(&mut self, value: ConstraintValue) { self.flow_constraints.min_flow = value; diff --git a/pywr-core/src/virtual_storage.rs b/pywr-core/src/virtual_storage.rs index 2c5c99f3..d16b5063 100644 --- a/pywr-core/src/virtual_storage.rs +++ b/pywr-core/src/virtual_storage.rs @@ -55,6 +55,7 @@ impl VirtualStorageVec { min_volume: ConstraintValue, max_volume: ConstraintValue, reset: VirtualStorageReset, + cost: ConstraintValue, ) -> VirtualStorageIndex { let node_index = VirtualStorageIndex(self.nodes.len()); let node = VirtualStorage::new( @@ -67,6 +68,7 @@ impl VirtualStorageVec { min_volume, max_volume, reset, + cost, ); self.nodes.push(node); node_index @@ -88,6 +90,7 @@ pub struct VirtualStorage { pub initial_volume: StorageInitialVolume, pub storage_constraints: StorageConstraints, pub reset: VirtualStorageReset, + pub cost: ConstraintValue, } impl VirtualStorage { @@ -101,6 +104,7 @@ impl VirtualStorage { min_volume: ConstraintValue, max_volume: ConstraintValue, reset: VirtualStorageReset, + cost: ConstraintValue, ) -> Self { Self { meta: NodeMeta::new(index, name, sub_name), @@ -110,6 +114,7 @@ impl VirtualStorage { initial_volume, storage_constraints: StorageConstraints::new(min_volume, max_volume), reset, + cost, } } @@ -139,6 +144,14 @@ impl VirtualStorage { VirtualStorageState::new(0.0) } + pub fn get_cost(&self, model: &Model, state: &State) -> Result { + match &self.cost { + ConstraintValue::None => Ok(0.0), + ConstraintValue::Scalar(v) => Ok(*v), + ConstraintValue::Metric(m) => m.get_value(model, state), + } + } + pub fn before(&self, timestep: &Timestep, model: &Model, state: &mut State) -> Result<(), PywrError> { let do_reset = if timestep.is_first() { // Set the initial volume if it is the first timestep. @@ -219,12 +232,13 @@ mod tests { use crate::metric::Metric; use crate::model::Model; use crate::node::{ConstraintValue, StorageInitialVolume}; - use crate::recorders::AssertionFnRecorder; + use crate::recorders::{AssertionFnRecorder, AssertionRecorder}; use crate::scenario::ScenarioIndex; use crate::solvers::{ClpSolver, ClpSolverSettings}; - use crate::test_utils::{default_timestepper, run_all_solvers}; + use crate::test_utils::{default_timestepper, run_all_solvers, simple_model}; use crate::timestep::Timestep; use crate::virtual_storage::{months_since_last_reset, VirtualStorageReset}; + use ndarray::Array; use time::macros::date; /// Test the calculation of number of months since last reset @@ -278,6 +292,7 @@ mod tests { ConstraintValue::Scalar(0.0), ConstraintValue::Scalar(100.0), VirtualStorageReset::Never, + ConstraintValue::Scalar(0.0), ); // Setup a demand on output-0 and output-1 @@ -319,4 +334,35 @@ mod tests { // Test all solvers run_all_solvers(&model, ×tepper); } + + #[test] + /// Test virtual storage node costs + fn test_virtual_storage_node_costs() { + let mut model = simple_model(1); + let timestepper = default_timestepper(); + + let nodes = vec![model.get_node_index_by_name("input", None).unwrap()]; + // Virtual storage node cost is high enough to prevent any flow + model + .add_virtual_storage_node( + "vs", + None, + &nodes, + None, + StorageInitialVolume::Proportional(1.0), + ConstraintValue::Scalar(0.0), + ConstraintValue::Scalar(100.0), + VirtualStorageReset::Never, + ConstraintValue::Scalar(20.0), + ) + .unwrap(); + + let expected = Array::zeros((366, 1)); + let idx = model.get_node_by_name("output", None).unwrap().index(); + let recorder = AssertionRecorder::new("output-flow", Metric::NodeInFlow(idx), expected, None, None); + model.add_recorder(Box::new(recorder)).unwrap(); + + // Test all solvers + run_all_solvers(&model, ×tepper); + } } diff --git a/pywr-schema/src/nodes/annual_virtual_storage.rs b/pywr-schema/src/nodes/annual_virtual_storage.rs index 5a054db9..3b11b08b 100644 --- a/pywr-schema/src/nodes/annual_virtual_storage.rs +++ b/pywr-schema/src/nodes/annual_virtual_storage.rs @@ -54,6 +54,11 @@ impl AnnualVirtualStorageNode { return Err(SchemaError::MissingInitialVolume(self.meta.name.to_string())); }; + let cost = match &self.cost { + Some(v) => v.load(model, tables, data_path)?.into(), + None => ConstraintValue::Scalar(0.0), + }; + let min_volume = match &self.min_volume { Some(v) => v.load(model, tables, data_path)?.into(), None => ConstraintValue::Scalar(0.0), @@ -84,6 +89,7 @@ impl AnnualVirtualStorageNode { min_volume, max_volume, reset, + cost, )?; Ok(()) } diff --git a/pywr-schema/src/nodes/monthly_virtual_storage.rs b/pywr-schema/src/nodes/monthly_virtual_storage.rs index 0657372b..2c9bf47f 100644 --- a/pywr-schema/src/nodes/monthly_virtual_storage.rs +++ b/pywr-schema/src/nodes/monthly_virtual_storage.rs @@ -48,6 +48,11 @@ impl MonthlyVirtualStorageNode { return Err(SchemaError::MissingInitialVolume(self.meta.name.to_string())); }; + let cost = match &self.cost { + Some(v) => v.load(model, tables, data_path)?.into(), + None => ConstraintValue::Scalar(0.0), + }; + let min_volume = match &self.min_volume { Some(v) => v.load(model, tables, data_path)?.into(), None => ConstraintValue::Scalar(0.0), @@ -78,6 +83,7 @@ impl MonthlyVirtualStorageNode { min_volume, max_volume, reset, + cost, )?; Ok(()) } diff --git a/pywr-schema/src/nodes/virtual_storage.rs b/pywr-schema/src/nodes/virtual_storage.rs index 671aadaf..30da54cb 100644 --- a/pywr-schema/src/nodes/virtual_storage.rs +++ b/pywr-schema/src/nodes/virtual_storage.rs @@ -36,6 +36,11 @@ impl VirtualStorageNode { return Err(SchemaError::MissingInitialVolume(self.meta.name.to_string())); }; + let cost = match &self.cost { + Some(v) => v.load(model, tables, data_path)?.into(), + None => ConstraintValue::Scalar(0.0), + }; + let min_volume = match &self.min_volume { Some(v) => v.load(model, tables, data_path)?.into(), None => ConstraintValue::Scalar(0.0), @@ -64,6 +69,7 @@ impl VirtualStorageNode { min_volume, max_volume, reset, + cost, )?; Ok(()) } From 8c8e57310945c5fd55e10ca01b5cbabb78f810bf Mon Sep 17 00:00:00 2001 From: James Tomlinson Date: Tue, 28 Nov 2023 23:57:48 +0000 Subject: [PATCH 3/7] chore: Make all Pywr crates to version 2.0.0-dev (#75) --- pywr-cli/Cargo.toml | 2 +- pywr-python/Cargo.toml | 2 +- pywr-schema/Cargo.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pywr-cli/Cargo.toml b/pywr-cli/Cargo.toml index 7aad221e..48fb7812 100644 --- a/pywr-cli/Cargo.toml +++ b/pywr-cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pywr-cli" -version = "0.1.0" +version = "2.0.0-dev" edition = "2021" rust-version = "1.60" description = "A generalised water resource allocation model." diff --git a/pywr-python/Cargo.toml b/pywr-python/Cargo.toml index c0206067..2f650844 100644 --- a/pywr-python/Cargo.toml +++ b/pywr-python/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pywr-python" -version = "0.1.0" +version = "2.0.0-dev" edition = "2021" rust-version = "1.60" description = "A generalised water resource allocation model." diff --git a/pywr-schema/Cargo.toml b/pywr-schema/Cargo.toml index f5112477..b65a0737 100644 --- a/pywr-schema/Cargo.toml +++ b/pywr-schema/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pywr-schema" -version = "0.1.0" +version = "2.0.0-dev" authors = ["James Tomlinson "] edition = "2021" rust-version = "1.60" From 12e2328015c27e498227a983aea6226584460587 Mon Sep 17 00:00:00 2001 From: James Tomlinson Date: Tue, 12 Dec 2023 10:27:05 +0000 Subject: [PATCH 4/7] feat: Add RbfProfileParameter (#55) An initial implementation of RbfProfile parameter. It uses a basic RBF interpolation procedure. It also implements `VariableParameter` trait. This has required refactoring the `VariableParameter` trait into a generic and then providing an implementation for f64 and u32. This mimics the float and integer APIs from Pywr v1.x. `TryFromV1Parameter` is implemented for RbfProfileParameter following an update to v1 schema v0.9.0. --- Cargo.toml | 4 +- pywr-core/Cargo.toml | 1 + pywr-core/src/model.rs | 40 +- pywr-core/src/parameters/constant.rs | 23 +- pywr-core/src/parameters/mod.rs | 60 ++- pywr-core/src/parameters/offset.rs | 6 +- pywr-core/src/parameters/profiles/mod.rs | 2 + pywr-core/src/parameters/profiles/rbf.rs | 440 ++++++++++++++++++ pywr-schema/src/error.rs | 9 + .../src/parameters/doc_examples/rbf_1.json | 10 + .../src/parameters/doc_examples/rbf_2.json | 16 + pywr-schema/src/parameters/mod.rs | 11 +- pywr-schema/src/parameters/profiles.rs | 257 +++++++++- 13 files changed, 842 insertions(+), 37 deletions(-) create mode 100644 pywr-core/src/parameters/profiles/rbf.rs create mode 100644 pywr-schema/src/parameters/doc_examples/rbf_1.json create mode 100644 pywr-schema/src/parameters/doc_examples/rbf_2.json diff --git a/Cargo.toml b/Cargo.toml index ac4fd74b..853a9e71 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,3 @@ - - [workspace] resolver = "2" members = [ @@ -47,4 +45,4 @@ tracing = "0.1" csv = "1.1" hdf5 = { version="0.8.1" } hdf5-sys = { version="0.8.1", features=["static"] } -pywr-v1-schema = { git = "https://github.com/pywr/pywr-schema/", tag="v0.8.0", package = "pywr-schema" } +pywr-v1-schema = { git = "https://github.com/pywr/pywr-schema/", tag="v0.9.0", package = "pywr-schema" } diff --git a/pywr-core/Cargo.toml b/pywr-core/Cargo.toml index 00b15761..589f0162 100644 --- a/pywr-core/Cargo.toml +++ b/pywr-core/Cargo.toml @@ -31,6 +31,7 @@ tracing = { workspace = true } tracing-subscriber = { version ="0.3.17", features=["env-filter"] } highs-sys = { git = "https://github.com/jetuk/highs-sys", branch="fix-build-libz-linking", optional = true } # highs-sys = { path = "../../highs-sys" } +nalgebra = "0.32.3" pyo3 = { workspace = true } diff --git a/pywr-core/src/model.rs b/pywr-core/src/model.rs index fc7dd932..1dcfc1d1 100644 --- a/pywr-core/src/model.rs +++ b/pywr-core/src/model.rs @@ -1518,9 +1518,9 @@ impl Model { } /// Set the variable values on the parameter a index `['idx']. - pub fn set_parameter_variable_values(&mut self, idx: ParameterIndex, values: &[f64]) -> Result<(), PywrError> { + pub fn set_f64_parameter_variable_values(&mut self, idx: ParameterIndex, values: &[f64]) -> Result<(), PywrError> { match self.parameters.get_mut(*idx.deref()) { - Some(parameter) => match parameter.as_variable_mut() { + Some(parameter) => match parameter.as_f64_variable_mut() { Some(variable) => variable.set_variables(values), None => Err(PywrError::ParameterTypeNotVariable), }, @@ -1529,10 +1529,30 @@ impl Model { } /// Return a vector of the current values of active variable parameters. - pub fn get_parameter_variable_values(&self) -> Vec { + pub fn get_f64_parameter_variable_values(&self) -> Vec { self.parameters .iter() - .filter_map(|p| p.as_variable().filter(|v| v.is_active()).map(|v| v.get_variables())) + .filter_map(|p| p.as_f64_variable().filter(|v| v.is_active()).map(|v| v.get_variables())) + .flatten() + .collect() + } + + /// Set the variable values on the parameter a index `['idx']. + pub fn set_u32_parameter_variable_values(&mut self, idx: ParameterIndex, values: &[u32]) -> Result<(), PywrError> { + match self.parameters.get_mut(*idx.deref()) { + Some(parameter) => match parameter.as_u32_variable_mut() { + Some(variable) => variable.set_variables(values), + None => Err(PywrError::ParameterTypeNotVariable), + }, + None => Err(PywrError::ParameterIndexNotFound(idx)), + } + } + + /// Return a vector of the current values of active variable parameters. + pub fn get_u32_parameter_variable_values(&self) -> Vec { + self.parameters + .iter() + .filter_map(|p| p.as_u32_variable().filter(|v| v.is_active()).map(|v| v.get_variables())) .flatten() .collect() } @@ -1892,8 +1912,8 @@ mod tests { let variable = ActivationFunction::Unit { min: 0.0, max: 10.0 }; let input_max_flow = parameters::ConstantParameter::new("my-constant", 10.0, Some(variable)); - assert!(input_max_flow.can_be_variable()); - assert!(input_max_flow.is_variable_active()); + assert!(input_max_flow.can_be_f64_variable()); + assert!(input_max_flow.is_f64_variable_active()); assert!(input_max_flow.is_active()); let input_max_flow_idx = model.add_parameter(Box::new(input_max_flow)).unwrap(); @@ -1906,13 +1926,15 @@ mod tests { ) .unwrap(); - let variable_values = model.get_parameter_variable_values(); + let variable_values = model.get_f64_parameter_variable_values(); assert_eq!(variable_values, vec![10.0]); // Update the variable values - model.set_parameter_variable_values(input_max_flow_idx, &[5.0]).unwrap(); + model + .set_f64_parameter_variable_values(input_max_flow_idx, &[5.0]) + .unwrap(); - let variable_values = model.get_parameter_variable_values(); + let variable_values = model.get_f64_parameter_variable_values(); assert_eq!(variable_values, vec![5.0]); } } diff --git a/pywr-core/src/parameters/constant.rs b/pywr-core/src/parameters/constant.rs index 3d5f77e1..5111992d 100644 --- a/pywr-core/src/parameters/constant.rs +++ b/pywr-core/src/parameters/constant.rs @@ -1,5 +1,5 @@ use crate::model::Model; -use crate::parameters::{ActivationFunction, Parameter, ParameterMeta, VariableParameter}; +use crate::parameters::{downcast_internal_state, ActivationFunction, Parameter, ParameterMeta, VariableParameter}; use crate::scenario::ScenarioIndex; use crate::state::State; use crate::timestep::Timestep; @@ -30,27 +30,38 @@ impl Parameter for ConstantParameter { fn meta(&self) -> &ParameterMeta { &self.meta } + + fn setup( + &self, + timesteps: &[Timestep], + scenario_index: &ScenarioIndex, + ) -> Result>, PywrError> { + Ok(Some(Box::new(self.value))) + } + fn compute( &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, _model: &Model, _state: &State, - _internal_state: &mut Option>, + internal_state: &mut Option>, ) -> Result { - Ok(self.value) + let value = downcast_internal_state::(internal_state); + + Ok(*value) } - fn as_variable(&self) -> Option<&dyn VariableParameter> { + fn as_f64_variable(&self) -> Option<&dyn VariableParameter> { Some(self) } - fn as_variable_mut(&mut self) -> Option<&mut dyn VariableParameter> { + fn as_f64_variable_mut(&mut self) -> Option<&mut dyn VariableParameter> { Some(self) } } -impl VariableParameter for ConstantParameter { +impl VariableParameter for ConstantParameter { fn is_active(&self) -> bool { self.variable.is_some() } diff --git a/pywr-core/src/parameters/mod.rs b/pywr-core/src/parameters/mod.rs index 99978519..1c724963 100644 --- a/pywr-core/src/parameters/mod.rs +++ b/pywr-core/src/parameters/mod.rs @@ -52,7 +52,10 @@ pub use min::MinParameter; pub use negative::NegativeParameter; pub use offset::OffsetParameter; pub use polynomial::Polynomial1DParameter; -pub use profiles::{DailyProfileParameter, MonthlyInterpDay, MonthlyProfileParameter, UniformDrawdownProfileParameter}; +pub use profiles::{ + DailyProfileParameter, MonthlyInterpDay, MonthlyProfileParameter, RadialBasisFunction, RbfProfileParameter, + RbfProfileVariableConfig, UniformDrawdownProfileParameter, +}; pub use py::PyParameter; use std::fmt; use std::fmt::{Display, Formatter}; @@ -194,24 +197,47 @@ pub trait Parameter: Send + Sync { Ok(()) } - /// Return the parameter as a [`VariableParameter'] if it supports being a variable. - fn as_variable(&self) -> Option<&dyn VariableParameter> { + /// Return the parameter as a [`VariableParameter'] if it supports being a variable. + fn as_f64_variable(&self) -> Option<&dyn VariableParameter> { + None + } + + /// Return the parameter as a [`VariableParameter'] if it supports being a variable. + fn as_f64_variable_mut(&mut self) -> Option<&mut dyn VariableParameter> { + None + } + + /// Can this parameter be a variable + fn can_be_f64_variable(&self) -> bool { + self.as_f64_variable().is_some() + } + + /// Is this parameter an active variable + fn is_f64_variable_active(&self) -> bool { + match self.as_f64_variable() { + Some(var) => var.is_active(), + None => false, + } + } + + /// Return the parameter as a [`VariableParameter'] if it supports being a variable. + fn as_u32_variable(&self) -> Option<&dyn VariableParameter> { None } - /// Return the parameter as a [`VariableParameter'] if it supports being a variable. - fn as_variable_mut(&mut self) -> Option<&mut dyn VariableParameter> { + /// Return the parameter as a [`VariableParameter'] if it supports being a variable. + fn as_u32_variable_mut(&mut self) -> Option<&mut dyn VariableParameter> { None } /// Can this parameter be a variable - fn can_be_variable(&self) -> bool { - self.as_variable().is_some() + fn can_be_u32_variable(&self) -> bool { + self.as_u32_variable().is_some() } /// Is this parameter an active variable - fn is_variable_active(&self) -> bool { - match self.as_variable() { + fn is_u32_variable_active(&self) -> bool { + match self.as_u32_variable() { Some(var) => var.is_active(), None => false, } @@ -308,19 +334,25 @@ pub enum ParameterType { Multi(MultiValueParameterIndex), } -pub trait VariableParameter { +/// A parameter that can be optimised. +/// +/// This trait is used to allow parameter's internal values to be accessed and altered by +/// external algorithms. It is primarily designed to be used by the optimisation algorithms +/// such as multi-objective evolutionary algorithms. The trait is generic to the type of +/// the variable values being optimised but these will typically by `f64` and `u32`. +pub trait VariableParameter { /// Is this variable activated (i.e. should be used in optimisation) fn is_active(&self) -> bool; /// Return the number of variables required fn size(&self) -> usize; /// Apply new variable values to the parameter - fn set_variables(&mut self, values: &[f64]) -> Result<(), PywrError>; + fn set_variables(&mut self, values: &[T]) -> Result<(), PywrError>; /// Get the current variable values - fn get_variables(&self) -> Vec; + fn get_variables(&self) -> Vec; /// Get variable lower bounds - fn get_lower_bounds(&self) -> Result, PywrError>; + fn get_lower_bounds(&self) -> Result, PywrError>; /// Get variable upper bounds - fn get_upper_bounds(&self) -> Result, PywrError>; + fn get_upper_bounds(&self) -> Result, PywrError>; } #[cfg(test)] diff --git a/pywr-core/src/parameters/offset.rs b/pywr-core/src/parameters/offset.rs index 8b18517a..c0d13d86 100644 --- a/pywr-core/src/parameters/offset.rs +++ b/pywr-core/src/parameters/offset.rs @@ -45,16 +45,16 @@ impl Parameter for OffsetParameter { let x = self.metric.get_value(model, state)?; Ok(x + self.offset) } - fn as_variable(&self) -> Option<&dyn VariableParameter> { + fn as_f64_variable(&self) -> Option<&dyn VariableParameter> { Some(self) } - fn as_variable_mut(&mut self) -> Option<&mut dyn VariableParameter> { + fn as_f64_variable_mut(&mut self) -> Option<&mut dyn VariableParameter> { Some(self) } } -impl VariableParameter for OffsetParameter { +impl VariableParameter for OffsetParameter { fn is_active(&self) -> bool { self.variable.is_some() } diff --git a/pywr-core/src/parameters/profiles/mod.rs b/pywr-core/src/parameters/profiles/mod.rs index 8b32b134..f138cbb7 100644 --- a/pywr-core/src/parameters/profiles/mod.rs +++ b/pywr-core/src/parameters/profiles/mod.rs @@ -1,7 +1,9 @@ mod daily; mod monthly; +mod rbf; mod uniform_drawdown; pub use daily::DailyProfileParameter; pub use monthly::{MonthlyInterpDay, MonthlyProfileParameter}; +pub use rbf::{RadialBasisFunction, RbfProfileParameter, RbfProfileVariableConfig}; pub use uniform_drawdown::UniformDrawdownProfileParameter; diff --git a/pywr-core/src/parameters/profiles/rbf.rs b/pywr-core/src/parameters/profiles/rbf.rs new file mode 100644 index 00000000..fa334cf2 --- /dev/null +++ b/pywr-core/src/parameters/profiles/rbf.rs @@ -0,0 +1,440 @@ +use crate::model::Model; +use crate::parameters::{downcast_internal_state, Parameter, ParameterMeta, VariableParameter}; +use crate::scenario::ScenarioIndex; +use crate::state::State; +use crate::timestep::Timestep; +use crate::PywrError; +use nalgebra::DMatrix; +use std::any::Any; + +pub struct RbfProfileVariableConfig { + days_of_year_range: Option, + value_upper_bounds: f64, + value_lower_bounds: f64, +} + +impl RbfProfileVariableConfig { + pub fn new(days_of_year_range: Option, value_upper_bounds: f64, value_lower_bounds: f64) -> Self { + Self { + days_of_year_range, + value_upper_bounds, + value_lower_bounds, + } + } +} + +/// A parameter that interpolates between a set of points using a radial basis function to +/// create a daily profile. +pub struct RbfProfileParameter { + meta: ParameterMeta, + points: Vec<(u32, f64)>, + function: RadialBasisFunction, + variable: Option, +} + +impl RbfProfileParameter { + pub fn new( + name: &str, + points: Vec<(u32, f64)>, + function: RadialBasisFunction, + variable: Option, + ) -> Self { + Self { + meta: ParameterMeta::new(name), + points, + function, + variable, + } + } +} + +impl Parameter for RbfProfileParameter { + fn as_any_mut(&mut self) -> &mut dyn Any { + self + } + + fn meta(&self) -> &ParameterMeta { + &self.meta + } + + fn setup( + &self, + _timesteps: &[Timestep], + _scenario_index: &ScenarioIndex, + ) -> Result>, PywrError> { + let profile = interpolate_rbf_profile(&self.points, &self.function); + Ok(Some(Box::new(profile))) + } + + fn compute( + &self, + timestep: &Timestep, + _scenario_index: &ScenarioIndex, + _model: &Model, + _state: &State, + internal_state: &mut Option>, + ) -> Result { + // Get the profile from the internal state + let profile = downcast_internal_state::<[f64; 366]>(internal_state); + // Return today's value from the profile + Ok(profile[timestep.date.ordinal() as usize - 1]) + } + + fn as_f64_variable(&self) -> Option<&dyn VariableParameter> { + Some(self) + } + + fn as_f64_variable_mut(&mut self) -> Option<&mut dyn VariableParameter> { + Some(self) + } + + fn as_u32_variable(&self) -> Option<&dyn VariableParameter> { + Some(self) + } + + fn as_u32_variable_mut(&mut self) -> Option<&mut dyn VariableParameter> { + Some(self) + } +} + +impl VariableParameter for RbfProfileParameter { + fn is_active(&self) -> bool { + self.variable.is_some() + } + + /// The size is the number of points that define the profile. + fn size(&self) -> usize { + self.points.len() + } + + /// The f64 values update the profile value of each point. + fn set_variables(&mut self, values: &[f64]) -> Result<(), PywrError> { + if values.len() == self.points.len() { + for (point, v) in self.points.iter_mut().zip(values) { + point.1 = *v; + } + Ok(()) + } else { + Err(PywrError::ParameterVariableValuesIncorrectLength) + } + } + + /// The f64 values are the profile values of each point. + fn get_variables(&self) -> Vec { + self.points.iter().map(|p| p.1).collect() + } + + fn get_lower_bounds(&self) -> Result, PywrError> { + if let Some(variable) = &self.variable { + let lb = (0..self.points.len()).map(|_| variable.value_lower_bounds).collect(); + Ok(lb) + } else { + Err(PywrError::ParameterVariableNotActive) + } + } + + fn get_upper_bounds(&self) -> Result, PywrError> { + if let Some(variable) = &self.variable { + let ub = (0..self.points.len()).map(|_| variable.value_upper_bounds).collect(); + Ok(ub) + } else { + Err(PywrError::ParameterVariableNotActive) + } + } +} + +impl VariableParameter for RbfProfileParameter { + fn is_active(&self) -> bool { + self.variable.as_ref().is_some_and(|v| v.days_of_year_range.is_some()) + } + + /// The size is the number of points that define the profile. + fn size(&self) -> usize { + self.points.len() + } + + /// Sets the day of year for each point. + fn set_variables(&mut self, values: &[u32]) -> Result<(), PywrError> { + if values.len() == self.points.len() { + for (point, v) in self.points.iter_mut().zip(values) { + point.0 = *v; + } + Ok(()) + } else { + Err(PywrError::ParameterVariableValuesIncorrectLength) + } + } + + /// Returns the day of year for each point. + fn get_variables(&self) -> Vec { + self.points.iter().map(|p| p.0).collect() + } + + fn get_lower_bounds(&self) -> Result, PywrError> { + if let Some(variable) = &self.variable { + if let Some(days_of_year_range) = &variable.days_of_year_range { + // Make sure the lower bound is not less than 1 and handle integer underflow + let lb = self + .points + .iter() + .map(|p| p.0.checked_sub(*days_of_year_range).unwrap_or(1).max(1)) + .collect(); + + Ok(lb) + } else { + Err(PywrError::ParameterVariableNotActive) + } + } else { + Err(PywrError::ParameterVariableNotActive) + } + } + + fn get_upper_bounds(&self) -> Result, PywrError> { + if let Some(variable) = &self.variable { + if let Some(days_of_year_range) = &variable.days_of_year_range { + // Make sure the upper bound is not greater than 365 and handle integer overflow + let lb = self + .points + .iter() + .map(|p| p.0.checked_add(*days_of_year_range).unwrap_or(365).min(365)) + .collect(); + + Ok(lb) + } else { + Err(PywrError::ParameterVariableNotActive) + } + } else { + Err(PywrError::ParameterVariableNotActive) + } + } +} + +/// Radial basis functions for interpolation. +pub enum RadialBasisFunction { + Linear, + Cubic, + Quintic, + ThinPlateSpline, + Gaussian { epsilon: f64 }, + MultiQuadric { epsilon: f64 }, + InverseMultiQuadric { epsilon: f64 }, +} + +impl RadialBasisFunction { + fn compute(&self, r: f64) -> f64 { + match self { + RadialBasisFunction::Linear => r, + RadialBasisFunction::Cubic => r.powi(3), + RadialBasisFunction::Quintic => r.powi(5), + RadialBasisFunction::ThinPlateSpline => r.powi(2) * r.ln(), + RadialBasisFunction::Gaussian { epsilon } => (-(epsilon * r).powi(2)).exp(), + RadialBasisFunction::MultiQuadric { epsilon } => (1.0 + (epsilon * r).powi(2)).sqrt(), + RadialBasisFunction::InverseMultiQuadric { epsilon } => (1.0 + (epsilon * r).powi(2)).powf(-0.5), + } + } +} + +/// Perform radial-basis function interpolation from the given points. +/// +/// The provided points are a tuple of observed (x, y) values. +fn interpolate_rbf(points: &[(f64, f64)], function: &RadialBasisFunction, x: &[f64; N]) -> [f64; N] { + let n = points.len(); + + let matrix = DMatrix::from_fn(n, n, |r, c| { + let r = (points[c].0 - points[r].0).abs(); + function.compute(r) + }); + + let b = DMatrix::from_fn(n, 1, |r, _| points[r].1); + + let weights = matrix + .lu() + .solve(&b) + .expect("Failed to solve RBF system for interpolation weights."); + + let mut profile = [f64::default(); N]; + + for (profile, &doy) in profile.iter_mut().zip(x) { + *profile = points + .iter() + .enumerate() + .map(|(i, p)| { + let r = (doy - p.0).abs(); + let distance = function.compute(r); + distance * weights[(i, 0)] + }) + .sum(); + } + + profile +} + +/// Calculate the interpolation weights for the given points. +/// +/// This method repeats the point 365 days before and after the user provided points. This +/// helps create a cyclic interpolation suitable for a annual profile. It then repeats the +/// value for the 58th day to create a daily profile 366 days long. +fn interpolate_rbf_profile(points: &[(u32, f64)], function: &RadialBasisFunction) -> [f64; 366] { + // Replicate the points in the year before and after. + let year_before = points.iter().map(|p| (p.0 as f64 - 365.0, p.1)); + let year_after = points.iter().map(|p| (p.0 as f64 + 365.0, p.1)); + let points: Vec<_> = year_before + .chain(points.iter().map(|p| (p.0 as f64, p.1))) + .chain(year_after) + .collect(); + + let mut x_out = [f64::default(); 365]; + for (i, v) in x_out.iter_mut().enumerate() { + *v = i as f64; + } + let short_profile = interpolate_rbf(&points, function, &x_out); + + let (start, end) = short_profile.split_at(58); + + let profile = [start, &[end[0]], end].concat(); + + profile.try_into().unwrap() +} + +#[cfg(test)] +mod tests { + use crate::parameters::profiles::rbf::{interpolate_rbf, interpolate_rbf_profile, RadialBasisFunction}; + use float_cmp::{assert_approx_eq, F64Margin}; + use std::f64::consts::PI; + + /// Test example from Wikipedia on Rbf interpolation + /// + /// This test compares values to those produced by Scipy's Rbf interpolation. + /// + /// For future reference, the Scipy code used to produce the expected values is as follows: + /// ```python + /// from scipy.interpolate import Rbf + /// import numpy as np + /// x = np.array([k / 14.0 for k in range(15)]) + /// f = np.exp(x * np.cos(3.0 * x * np.pi)) + /// + /// rbf = Rbf(x, f, function='gaussian', epsilon=1/3.0) + /// + /// x_out = np.array([k / 149.0 for k in range(150)]) + /// f_interp = rbf(x_out) + /// print(f_interp) + /// ``` + #[test] + fn test_rbf_interpolation() { + let points: Vec<(f64, f64)> = (0..15) + .map(|k| { + let x = k as f64 / 14.0; + let f = (x * (3.0 * x * PI).cos()).exp(); + (x, f) + }) + .collect(); + + let mut x_out = [f64::default(); 150]; + for (i, v) in x_out.iter_mut().enumerate() { + *v = i as f64 / 149.0; + } + + let rbf = RadialBasisFunction::Gaussian { epsilon: 3.0 }; + let f_interp = interpolate_rbf(&points, &rbf, &x_out); + + // Values computed from the Scipy RBF interpolation function for the same problem. + let f_expected = [ + 0.99999999, 1.02215444, 1.03704224, 1.04658357, 1.05232959, 1.0555025, 1.05703598, 1.05761412, 1.05770977, + 1.05762023, 1.0575012, 1.05739784, 1.05727216, 1.0570282, 1.0565335, 1.05563715, 1.05418473, 1.05203042, + 1.04904584, 1.04512659, 1.04019611, 1.03420771, 1.02714462, 1.0190189, 1.00986897, 0.99975608, 0.98876095, + 0.97697989, 0.96451978, 0.9514951, 0.93802364, 0.92422356, 0.91021058, 0.89609542, 0.88198282, 0.86796961, + 0.85414519, 0.8405903, 0.82737825, 0.81457486, 0.80224023, 0.79042854, 0.77919009, 0.76857191, 0.75861923, + 0.74937591, 0.74088519, 0.73319047, 0.72633599, 0.72036607, 0.71532606, 0.71126198, 0.70821968, 0.7062455, + 0.70538494, 0.70568346, 0.7071849, 0.70993231, 0.71396743, 0.71933052, 0.72606058, 0.73419586, 0.74377345, + 0.75483021, 0.76740264, 0.78152758, 0.79724185, 0.81458285, 0.83358751, 0.85429299, 0.87673482, 0.90094656, + 0.926958, 0.95479321, 0.98446917, 1.01599247, 1.04935705, 1.08454095, 1.12150386, 1.16018313, 1.20049191, + 1.24231544, 1.28550918, 1.32989614, 1.37526651, 1.42137569, 1.46794495, 1.51466233, 1.56118419, 1.6071376, + 1.65212512, 1.69572785, 1.7375121, 1.77703531, 1.81385273, 1.84752542, 1.87762766, 1.90375533, 1.92553407, + 1.94262687, 1.95474147, 1.96163779, 1.96313291, 1.95910686, 1.94950578, 1.93434466, 1.91370844, 1.88775047, + 1.85669197, 1.82081727, 1.78046916, 1.73604268, 1.68797763, 1.63674943, 1.58286071, 1.52683076, 1.46918569, + 1.41044858, 1.35112887, 1.29171453, 1.23266261, 1.17439264, 1.11728046, 1.06165402, 1.00779065, 0.95591582, + 0.90620394, 0.8587805, 0.81372578, 0.77108031, 0.73085073, 0.69301704, 0.6575401, 0.62436898, 0.59344848, + 0.56472532, 0.53815332, 0.51369657, 0.49133094, 0.47104256, 0.45282388, 0.43666555, 0.42254569, 0.4104155, + 0.40018055, 0.39167888, 0.38465535, 0.37873281, 0.3733805, 0.36787943, + ]; + + for (i, e) in f_interp.iter().zip(f_expected) { + assert_approx_eq!(f64, *i, e, F64Margin { ulps: 2, epsilon: 1e-6 }); + } + } + + /// Test cyclical daily profile interpolation + /// + /// This test compares values to those produced by Scipy's Rbf interpolation. + /// + /// For future reference, the Scipy code used to produce the expected values is as follows: + /// ```python + /// from scipy.interpolate import Rbf + /// import numpy as np + /// x = np.array([90, 180, 270]) + /// f = np.array([0.5, 0.3, 0.7]) + /// + /// x = np.concatenate([x - 365, x, x + 365]) + /// f = np.concatenate([f, f, f]) + /// + /// rbf = Rbf(x, f, function='multiquadric', epsilon=50.0) + /// x_out = np.array([k for k in range(365)]) + /// f_interp = rbf(x_out) + /// print(f_interp) + /// ``` + #[test] + fn test_rbf_interpolation_profile() { + let points: Vec<(u32, f64)> = vec![(90, 0.5), (180, 0.3), (270, 0.7)]; + + let rbf = RadialBasisFunction::MultiQuadric { epsilon: 1.0 / 50.0 }; + let f_interp = interpolate_rbf_profile(&points, &rbf); + + let f_expected = [ + 0.69464463, 0.69308183, 0.69150736, 0.68992139, 0.68832406, 0.68671551, 0.68509589, 0.68346531, 0.68182389, + 0.68017171, 0.67850888, 0.67683548, 0.67515156, 0.6734572, 0.67175245, 0.67003733, 0.66831189, 0.66657615, + 0.66483011, 0.66307377, 0.66130712, 0.65953014, 0.65774281, 0.65594508, 0.6541369, 0.65231821, 0.65048893, + 0.64864899, 0.64679829, 0.64493672, 0.64306417, 0.64118051, 0.63928561, 0.63737931, 0.63546146, 0.63353187, + 0.63159038, 0.62963677, 0.62767084, 0.62569237, 0.62370112, 0.62169685, 0.61967931, 0.61764821, 0.61560328, + 0.61354422, 0.61147072, 0.60938246, 0.60727911, 0.60516031, 0.60302571, 0.60087495, 0.59870763, 0.59652337, + 0.59432175, 0.59210238, 0.58986482, 0.58760865, 0.58533341, 0.58533341, 0.58303867, 0.58072398, 0.57838887, + 0.57603288, 0.57365555, 0.57125641, 0.568835, 0.56639087, 0.56392355, 0.5614326, 0.55891758, 0.55637805, + 0.55381361, 0.55122386, 0.54860842, 0.54596693, 0.54329907, 0.54060452, 0.53788302, 0.53513433, 0.53235824, + 0.5295546, 0.52672327, 0.52386419, 0.52097732, 0.51806269, 0.51512038, 0.5121505, 0.50915325, 0.50612887, + 0.50307767, 0.5, 0.4968963, 0.49376705, 0.4906128, 0.48743418, 0.48423185, 0.48100655, 0.47775909, + 0.47449034, 0.4712012, 0.46789267, 0.46456578, 0.46122162, 0.45786134, 0.45448613, 0.45109726, 0.44769602, + 0.44428374, 0.44086183, 0.43743171, 0.43399486, 0.4305528, 0.42710707, 0.42365927, 0.42021102, 0.416764, + 0.41331988, 0.4098804, 0.40644733, 0.40302245, 0.3996076, 0.39620462, 0.3928154, 0.38944187, 0.38608597, + 0.38274969, 0.37943505, 0.37614408, 0.37287886, 0.36964152, 0.3664342, 0.36325908, 0.36011837, 0.35701434, + 0.35394927, 0.35092549, 0.34794536, 0.34501129, 0.34212571, 0.33929111, 0.33650999, 0.33378492, 0.33111848, + 0.32851331, 0.32597206, 0.32349743, 0.32109215, 0.31875898, 0.31650072, 0.31432016, 0.31222016, 0.31020357, + 0.30827325, 0.30643209, 0.30468296, 0.30302876, 0.30147235, 0.30001661, 0.29866436, 0.29741843, 0.2962816, + 0.29525658, 0.29434606, 0.29355265, 0.29287889, 0.29232723, 0.29190003, 0.29159955, 0.29142793, 0.29138718, + 0.2914792, 0.29170571, 0.29206829, 0.29256837, 0.29320718, 0.29398581, 0.29490512, 0.29596581, 0.29716836, + 0.29851306, 0.3, 0.30162905, 0.30339988, 0.30531196, 0.30736453, 0.30955665, 0.31188717, 0.31435474, + 0.31695784, 0.31969475, 0.32256357, 0.32556225, 0.32868857, 0.33194015, 0.33531448, 0.33880892, 0.34242071, + 0.34614696, 0.34998469, 0.35393082, 0.35798222, 0.36213562, 0.36638776, 0.37073525, 0.37517472, 0.3797027, + 0.38431572, 0.38901027, 0.39378283, 0.39862985, 0.40354777, 0.40853303, 0.41358206, 0.4186913, 0.42385719, + 0.42907617, 0.43434469, 0.43965922, 0.44501624, 0.45041222, 0.45584367, 0.4613071, 0.46679904, 0.47231604, + 0.47785464, 0.48341142, 0.48898296, 0.49456585, 0.5001567, 0.50575214, 0.51134878, 0.51694328, 0.52253228, + 0.52811245, 0.53368045, 0.53923298, 0.54476672, 0.55027838, 0.55576468, 0.56122234, 0.56664811, 0.57203875, + 0.57739104, 0.58270178, 0.58796779, 0.59318592, 0.59835305, 0.60346609, 0.60852201, 0.61351779, 0.61845048, + 0.62331718, 0.62811505, 0.63284131, 0.63749327, 0.64206831, 0.64656388, 0.65097754, 0.65530696, 0.65954989, + 0.66370421, 0.66776792, 0.67173914, 0.67561613, 0.67939727, 0.68308111, 0.68666633, 0.69015176, 0.6935364, + 0.69681937, 0.7, 0.70307774, 0.7060522, 0.70892317, 0.71169059, 0.71435453, 0.71691524, 0.7193731, + 0.72172864, 0.7239825, 0.72613549, 0.72818851, 0.73014259, 0.73199887, 0.73375858, 0.73542305, 0.7369937, + 0.73847202, 0.73985957, 0.74115796, 0.74236887, 0.74349402, 0.74453517, 0.7454941, 0.74637263, 0.74717258, + 0.7478958, 0.74854413, 0.74911943, 0.74962353, 0.75005827, 0.75042547, 0.75072693, 0.75096445, 0.75113978, + 0.75125466, 0.75131079, 0.75130986, 0.75125351, 0.75114335, 0.75098096, 0.75076789, 0.75050563, 0.75019565, + 0.74983939, 0.74943824, 0.74899356, 0.74850665, 0.74797881, 0.74741128, 0.74680526, 0.74616191, 0.74548238, + 0.74476776, 0.74401911, 0.74323746, 0.74242382, 0.74157913, 0.74070433, 0.73980031, 0.73886796, 0.7379081, + 0.73692155, 0.73590908, 0.73487145, 0.73380939, 0.7327236, 0.73161476, 0.73048351, 0.72933049, 0.7281563, + 0.72696153, 0.72574673, 0.72451244, 0.7232592, 0.72198749, 0.72069779, 0.71939058, 0.71806629, 0.71672535, + 0.71536817, 0.71399514, 0.71260665, 0.71120305, 0.70978469, 0.7083519, 0.706905, 0.7054443, 0.70397008, + 0.70248262, 0.70098218, 0.69946903, 0.69794338, 0.69640548, 0.69485553, + ]; + + for (i, e) in f_interp.iter().zip(f_expected) { + assert_approx_eq!(f64, *i, e, F64Margin { ulps: 2, epsilon: 1e-6 }); + } + } +} diff --git a/pywr-schema/src/error.rs b/pywr-schema/src/error.rs index 37ba4a60..5c99e541 100644 --- a/pywr-schema/src/error.rs +++ b/pywr-schema/src/error.rs @@ -41,6 +41,8 @@ pub enum SchemaError { UnexpectedParameterType(String), #[error("mismatch in the length of data provided. expected: {expected}, found: {found}")] DataLengthMismatch { expected: usize, found: usize }, + #[error("Failed to estimate epsilon for use in the radial basis function.")] + RbfEpsilonEstimation, } impl From for PyErr { @@ -79,4 +81,11 @@ pub enum ConversionError { UnsupportedFeature { feature: String, name: String }, #[error("Parameter {name:?} of type `{ty:?}` are not supported in Pywr v2. {instead:?}")] DeprecatedParameter { ty: String, name: String, instead: String }, + #[error("Unexpected type for attribute {attr} on parameter {name}. Expected `{expected}`, found `{actual}`")] + UnexpectedType { + attr: String, + name: String, + expected: String, + actual: String, + }, } diff --git a/pywr-schema/src/parameters/doc_examples/rbf_1.json b/pywr-schema/src/parameters/doc_examples/rbf_1.json new file mode 100644 index 00000000..add053de --- /dev/null +++ b/pywr-schema/src/parameters/doc_examples/rbf_1.json @@ -0,0 +1,10 @@ +{ + "name": "my-interpolated-profile", + "type": "RbfProfile", + "points": [ + [90, 0.5], + [180, 0.3], + [270, 0.7] + ], + "function": {"Gaussian": { "epsilon": 3.0 }} +} diff --git a/pywr-schema/src/parameters/doc_examples/rbf_2.json b/pywr-schema/src/parameters/doc_examples/rbf_2.json new file mode 100644 index 00000000..0f90a146 --- /dev/null +++ b/pywr-schema/src/parameters/doc_examples/rbf_2.json @@ -0,0 +1,16 @@ +{ + "name": "my-interpolated-profile", + "type": "RbfProfile", + "points": [ + [90, 0.5], + [180, 0.3], + [270, 0.7] + ], + "function": {"Gaussian": { "epsilon": 3.0 }}, + "variable": { + "is_active": true, + "days_of_year_range": 30, + "value_upper_bounds": 1.0, + "value_lower_bounds": 0.0 + } +} diff --git a/pywr-schema/src/parameters/mod.rs b/pywr-schema/src/parameters/mod.rs index 5c1eed0e..0795f8a8 100644 --- a/pywr-schema/src/parameters/mod.rs +++ b/pywr-schema/src/parameters/mod.rs @@ -38,7 +38,8 @@ pub use super::parameters::discount_factor::DiscountFactorParameter; pub use super::parameters::indexed_array::IndexedArrayParameter; pub use super::parameters::polynomial::Polynomial1DParameter; pub use super::parameters::profiles::{ - DailyProfileParameter, MonthlyProfileParameter, UniformDrawdownProfileParameter, + DailyProfileParameter, MonthlyProfileParameter, RadialBasisFunction, RbfProfileParameter, + RbfProfileVariableSettings, UniformDrawdownProfileParameter, }; pub use super::parameters::python::PythonParameter; pub use super::parameters::tables::TablesArrayParameter; @@ -166,6 +167,7 @@ pub enum Parameter { Offset(OffsetParameter), DiscountFactor(DiscountFactorParameter), Interpolated(InterpolatedParameter), + RbfProfile(RbfProfileParameter), } impl Parameter { @@ -196,6 +198,7 @@ impl Parameter { Self::Offset(p) => p.meta.name.as_str(), Self::DiscountFactor(p) => p.meta.name.as_str(), Self::Interpolated(p) => p.meta.name.as_str(), + Self::RbfProfile(p) => p.meta.name.as_str(), } } @@ -228,6 +231,7 @@ impl Parameter { Self::Offset(p) => p.node_references(), Self::DiscountFactor(p) => p.node_references(), Self::Interpolated(p) => p.node_references(), + Self::RbfProfile(p) => p.node_references(), } } @@ -277,6 +281,7 @@ impl Parameter { Self::Offset(_) => "Offset", Self::DiscountFactor(_) => "DiscountFactor", Self::Interpolated(_) => "Interpolated", + Self::RbfProfile(_) => "RbfProfile", } } @@ -314,6 +319,7 @@ impl Parameter { Self::Offset(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), Self::DiscountFactor(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), Self::Interpolated(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), + Self::RbfProfile(p) => ParameterType::Parameter(p.add_to_model(model)?), }; Ok(ty) @@ -414,6 +420,9 @@ impl TryFromV1Parameter for Parameter { instead: "Use a derived metric instead.".to_string(), }) } + CoreParameter::RbfProfile(p) => { + Parameter::RbfProfile(p.try_into_v2_parameter(parent_node, unnamed_count)?) + } }, ParameterV1::Custom(p) => { println!("Custom parameter: {:?} ({})", p.meta.name, p.ty); diff --git a/pywr-schema/src/parameters/profiles.rs b/pywr-schema/src/parameters/profiles.rs index 7294d1db..6ece1d48 100644 --- a/pywr-schema/src/parameters/profiles.rs +++ b/pywr-schema/src/parameters/profiles.rs @@ -6,7 +6,7 @@ use crate::parameters::{ use pywr_core::parameters::ParameterIndex; use pywr_v1_schema::parameters::{ DailyProfileParameter as DailyProfileParameterV1, MonthInterpDay as MonthInterpDayV1, - MonthlyProfileParameter as MonthlyProfileParameterV1, + MonthlyProfileParameter as MonthlyProfileParameterV1, RbfProfileParameter as RbfProfileParameterV1, UniformDrawdownProfileParameter as UniformDrawdownProfileParameterV1, }; use std::collections::HashMap; @@ -218,3 +218,258 @@ impl TryFromV1Parameter for UniformDrawdownPr Ok(p) } } + +/// Distance functions for radial basis function interpolation. +#[derive(serde::Deserialize, serde::Serialize, Debug, Copy, Clone)] +pub enum RadialBasisFunction { + Linear, + Cubic, + Quintic, + ThinPlateSpline, + Gaussian { epsilon: Option }, + MultiQuadric { epsilon: Option }, + InverseMultiQuadric { epsilon: Option }, +} + +impl RadialBasisFunction { + /// Convert the schema representation of the RBF into `pywr_core` type. + /// + /// If required this will estimate values of from the provided points. + fn into_core_rbf(self, points: &[(u32, f64)]) -> Result { + let rbf = match self { + Self::Linear => pywr_core::parameters::RadialBasisFunction::Linear, + Self::Cubic => pywr_core::parameters::RadialBasisFunction::Cubic, + Self::Quintic => pywr_core::parameters::RadialBasisFunction::Cubic, + Self::ThinPlateSpline => pywr_core::parameters::RadialBasisFunction::ThinPlateSpline, + Self::Gaussian { epsilon } => { + let epsilon = match epsilon { + Some(e) => e, + None => estimate_epsilon(points).ok_or(SchemaError::RbfEpsilonEstimation)?, + }; + + pywr_core::parameters::RadialBasisFunction::Gaussian { epsilon } + } + Self::MultiQuadric { epsilon } => { + let epsilon = match epsilon { + Some(e) => e, + None => estimate_epsilon(points).ok_or(SchemaError::RbfEpsilonEstimation)?, + }; + + pywr_core::parameters::RadialBasisFunction::MultiQuadric { epsilon } + } + Self::InverseMultiQuadric { epsilon } => { + let epsilon = match epsilon { + Some(e) => e, + None => estimate_epsilon(points).ok_or(SchemaError::RbfEpsilonEstimation)?, + }; + + pywr_core::parameters::RadialBasisFunction::InverseMultiQuadric { epsilon } + } + }; + + Ok(rbf) + } +} + +/// Compute an estimate for epsilon. +/// +/// If there `points` is empty then `None` is returned. +fn estimate_epsilon(points: &[(u32, f64)]) -> Option { + if points.is_empty() { + return None; + } + + // SAFETY: Above check that points is non-empty should make these unwraps safe. + let x_min = points.iter().map(|(x, _)| *x).min().unwrap(); + let x_max = points.iter().map(|(x, _)| *x).max().unwrap(); + let y_min = points.iter().map(|(_, y)| *y).reduce(f64::min).unwrap(); + let y_max = points.iter().map(|(_, y)| *y).reduce(f64::max).unwrap(); + + let mut x_range = x_max - x_min; + if x_range == 0 { + x_range = 1; + } + let mut y_range = y_max - y_min; + if y_range == 0.0 { + y_range = 1.0; + } + + Some((x_range as f64 * y_range).powf(1.0 / points.len() as f64)) +} + +/// Settings for a variable RBF profile. +#[derive(serde::Deserialize, serde::Serialize, Debug, Clone, Copy)] +pub struct RbfProfileVariableSettings { + /// Is this parameter an active variable? + pub is_active: bool, + /// Optional maximum number of days that the interpolation points can be moved from their + /// original position. If this is `None` then the points can not be moved from their + /// original day of the year. + pub days_of_year_range: Option, + /// Optional upper bound for the value of each interpolation point. If this is `None` then + /// there is no upper bound. + pub value_upper_bounds: Option, + /// Optional lower bound for the value of each interpolation point. If this is `None` then + /// the lower bound is zero. + pub value_lower_bounds: Option, +} + +impl Into for RbfProfileVariableSettings { + fn into(self) -> pywr_core::parameters::RbfProfileVariableConfig { + pywr_core::parameters::RbfProfileVariableConfig::new( + self.days_of_year_range, + self.value_upper_bounds.unwrap_or(f64::INFINITY), + self.value_lower_bounds.unwrap_or(0.0), + ) + } +} + +/// A parameter that interpolates between a set of points using a radial basis function to +/// create a daily profile. +/// +/// # JSON Examples +/// +/// The example below shows the definition of a [`RbfProfileParameter`] in JSON. +/// +/// ```json +#[doc = include_str!("doc_examples/rbf_1.json")] +/// ``` +/// +/// The example below shows the definition of a [`RbfProfileParameter`] in JSON with variable +/// settings defined. This settings determine how the interpolation points be modified by +/// external algorithms. See [`RbfProfileVariableSettings`] for more information. +/// +/// ```json +#[doc = include_str!("doc_examples/rbf_2.json")] +/// ``` +/// +#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] +pub struct RbfProfileParameter { + #[serde(flatten)] + pub meta: ParameterMeta, + /// The points are the profile positions defined by an ordinal day of the year and a value. + /// Radial basis function interpolation is used to create a daily profile from these points. + pub points: Vec<(u32, f64)>, + /// The distance function used for interpolation. + pub function: RadialBasisFunction, + /// Definition of optional variable settings. + pub variable: Option, +} + +impl RbfProfileParameter { + pub fn node_references(&self) -> HashMap<&str, &str> { + HashMap::new() + } + pub fn parameters(&self) -> HashMap<&str, DynamicFloatValueType> { + HashMap::new() + } + + pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result { + let variable = match self.variable { + None => None, + Some(v) => { + // Only set the variable data if the user has indicated the variable is active. + if v.is_active { + Some(v.into()) + } else { + None + } + } + }; + + let function = self.function.into_core_rbf(&self.points)?; + + let p = + pywr_core::parameters::RbfProfileParameter::new(&self.meta.name, self.points.clone(), function, variable); + Ok(model.add_parameter(Box::new(p))?) + } +} + +impl TryFromV1Parameter for RbfProfileParameter { + type Error = ConversionError; + + fn try_from_v1_parameter( + v1: RbfProfileParameterV1, + parent_node: Option<&str>, + unnamed_count: &mut usize, + ) -> Result { + let meta: ParameterMeta = v1.meta.into_v2_parameter(parent_node, unnamed_count); + + let points = v1 + .days_of_year + .into_iter() + .zip(v1.values.into_iter()) + .map(|(doy, v)| (doy, v)) + .collect(); + + if v1.rbf_kwargs.contains_key("smooth") { + return Err(ConversionError::UnsupportedFeature { + feature: "The RBF `smooth` keyword argument is not supported.".to_string(), + name: meta.name, + }); + } + + if v1.rbf_kwargs.contains_key("norm") { + return Err(ConversionError::UnsupportedFeature { + feature: "The RBF `norm` keyword argument is not supported.".to_string(), + name: meta.name, + }); + } + + // Parse any epsilon value; we expect a float here. + let epsilon = if let Some(epsilon_value) = v1.rbf_kwargs.get("epsilon") { + if let Some(epsilon_f64) = epsilon_value.as_f64() { + Some(epsilon_f64) + } else { + return Err(ConversionError::UnexpectedType { + attr: "epsilon".to_string(), + name: meta.name, + expected: "float".to_string(), + actual: format!("{}", epsilon_value), + }); + } + } else { + None + }; + + let function = if let Some(function_value) = v1.rbf_kwargs.get("function") { + if let Some(function_str) = function_value.as_str() { + // Function kwarg is a string! + let f = match function_str { + "multiquadric" => RadialBasisFunction::MultiQuadric { epsilon }, + "inverse" => RadialBasisFunction::InverseMultiQuadric { epsilon }, + "gaussian" => RadialBasisFunction::Gaussian { epsilon }, + "linear" => RadialBasisFunction::Linear, + "cubic" => RadialBasisFunction::Cubic, + "thin_plate" => RadialBasisFunction::ThinPlateSpline, + _ => { + return Err(ConversionError::UnsupportedFeature { + feature: format!("Radial basis function `{}` not supported.", function_str), + name: meta.name.clone(), + }) + } + }; + f + } else { + return Err(ConversionError::UnexpectedType { + attr: "function".to_string(), + name: meta.name, + expected: "string".to_string(), + actual: format!("{}", function_value), + }); + } + } else { + // Default to multi-quadratic + RadialBasisFunction::MultiQuadric { epsilon } + }; + + let p = Self { + meta, + points, + function, + variable: None, + }; + + Ok(p) + } +} From 1f4c756da8318f2583a62aa24639b50bde72efbf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Dec 2023 10:27:29 +0000 Subject: [PATCH 5/7] chore(deps): Bump actions/setup-python from 4 to 5 (#78) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/python.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index a884b747..be714375 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -21,7 +21,7 @@ jobs: sudo apt-get update sudo apt-get install libhdf5-dev ocl-icd-opencl-dev zlib1g-dev - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.x" - name: Install Dependencies @@ -47,7 +47,7 @@ jobs: sudo apt-get update sudo apt-get install libhdf5-dev ocl-icd-opencl-dev liblzma-dev zlib1g-dev - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Build and test From 31a65fa3733232fca75ccc6945adad7504e598cd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Jan 2024 10:19:15 +0000 Subject: [PATCH 6/7] chore(deps): Update polars requirement from 0.35.4 to 0.36.2 (#80) Updates the requirements on [polars](https://github.com/pola-rs/polars) to permit the latest version. - [Release notes](https://github.com/pola-rs/polars/releases) - [Commits](https://github.com/pola-rs/polars/commits) --- updated-dependencies: - dependency-name: polars dependency-type: direct:production ... --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: James Tomlinson --- Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 853a9e71..3bad60c0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -38,8 +38,8 @@ thiserror = "1.0.25" time = { version = "0.3", features = ["serde", "serde-well-known", "serde-human-readable", "macros"] } num = "0.4.0" ndarray = "0.15.3" -polars = { version = "0.35.4", features = ["lazy", "rows", "ndarray"] } -pyo3-polars = "0.9.0" +polars = { version = "0.36.2", features = ["lazy", "rows", "ndarray"] } +pyo3-polars = "0.10.0" pyo3 = { version = "0.20.0" } tracing = "0.1" csv = "1.1" From 651d7bb7af407ff69e7d35a109a72366db8de28c Mon Sep 17 00:00:00 2001 From: James Tomlinson Date: Fri, 12 Jan 2024 23:00:00 +0000 Subject: [PATCH 7/7] feat: Add support for multi-model simulation. (#60) Add `PywrMultiModel` schema and corresponding `MultiNetworkModel` to pywr-core. This update refactors the schema of the regular model to use a new "network" key. The schema then provides a `PywrNetwork` containing only the nodes, edges, parameters, metric sets and outputs. This is then reused by the regular model and the multi-model. A new metric is added that can receive a metric from another model. This is implemented via the transfers in the multi-model configuration. This is fairly generic because it uses the metric system to allowing passing of node or parameter data from one model to another. --- pywr-cli/src/main.rs | 95 ++- pywr-core/benches/random_models.rs | 182 +----- pywr-core/src/aggregated_node.rs | 62 +- pywr-core/src/derived_metric.rs | 20 +- pywr-core/src/edge.rs | 4 +- pywr-core/src/lib.rs | 20 +- pywr-core/src/metric.rs | 9 +- pywr-core/src/models/mod.rs | 55 ++ pywr-core/src/models/multi.rs | 394 ++++++++++++ pywr-core/src/models/simple.rs | 307 +++++++++ pywr-core/src/{model.rs => network.rs} | 584 ++++++------------ pywr-core/src/node.rs | 148 ++--- pywr-core/src/parameters/aggregated.rs | 4 +- pywr-core/src/parameters/aggregated_index.rs | 4 +- pywr-core/src/parameters/array.rs | 6 +- pywr-core/src/parameters/asymmetric.rs | 4 +- pywr-core/src/parameters/constant.rs | 4 +- .../parameters/control_curves/apportion.rs | 4 +- .../src/parameters/control_curves/index.rs | 4 +- .../parameters/control_curves/interpolated.rs | 4 +- .../parameters/control_curves/piecewise.rs | 6 +- .../src/parameters/control_curves/simple.rs | 4 +- .../control_curves/volume_between.rs | 10 +- pywr-core/src/parameters/delay.rs | 8 +- pywr-core/src/parameters/discount_factor.rs | 9 +- pywr-core/src/parameters/division.rs | 4 +- pywr-core/src/parameters/indexed_array.rs | 4 +- pywr-core/src/parameters/interpolated.rs | 10 +- pywr-core/src/parameters/max.rs | 4 +- pywr-core/src/parameters/min.rs | 4 +- pywr-core/src/parameters/mod.rs | 14 +- pywr-core/src/parameters/negative.rs | 4 +- pywr-core/src/parameters/offset.rs | 4 +- pywr-core/src/parameters/polynomial.rs | 4 +- pywr-core/src/parameters/profiles/daily.rs | 4 +- pywr-core/src/parameters/profiles/monthly.rs | 6 +- pywr-core/src/parameters/profiles/rbf.rs | 4 +- .../parameters/profiles/uniform_drawdown.rs | 4 +- pywr-core/src/parameters/py.rs | 31 +- pywr-core/src/parameters/rhai.rs | 14 +- pywr-core/src/parameters/simple_wasm.rs | 4 +- pywr-core/src/parameters/threshold.rs | 4 +- pywr-core/src/parameters/vector.rs | 4 +- pywr-core/src/recorders/csv.rs | 9 +- pywr-core/src/recorders/hdf.rs | 9 +- pywr-core/src/recorders/metric_set.rs | 4 +- pywr-core/src/recorders/mod.rs | 28 +- pywr-core/src/scenario.rs | 48 +- pywr-core/src/solvers/builder.rs | 108 ++-- pywr-core/src/solvers/clp/mod.rs | 8 +- pywr-core/src/solvers/highs/mod.rs | 14 +- pywr-core/src/solvers/ipm_ocl/mod.rs | 93 +-- pywr-core/src/solvers/ipm_simd/mod.rs | 76 +-- pywr-core/src/solvers/mod.rs | 11 +- pywr-core/src/state.rs | 33 +- pywr-core/src/test_utils.rs | 169 ++--- pywr-core/src/timestep.rs | 21 +- pywr-core/src/virtual_storage.rs | 68 +- pywr-python/src/lib.rs | 12 +- pywr-schema/src/error.rs | 6 + pywr-schema/src/metric_sets/mod.rs | 18 +- pywr-schema/src/model.rs | 440 +++++++++++-- .../src/nodes/annual_virtual_storage.rs | 26 +- pywr-schema/src/nodes/core.rs | 162 ++--- pywr-schema/src/nodes/delay.rs | 38 +- pywr-schema/src/nodes/loss_link.rs | 30 +- pywr-schema/src/nodes/mod.rs | 123 ++-- .../src/nodes/monthly_virtual_storage.rs | 26 +- pywr-schema/src/nodes/piecewise_link.rs | 48 +- pywr-schema/src/nodes/piecewise_storage.rs | 88 ++- pywr-schema/src/nodes/river.rs | 8 +- pywr-schema/src/nodes/river_gauge.rs | 116 ++-- .../src/nodes/river_split_with_gauge.rs | 56 +- pywr-schema/src/nodes/virtual_storage.rs | 26 +- .../src/nodes/water_treatment_works.rs | 137 ++-- pywr-schema/src/outputs/csv.rs | 18 +- pywr-schema/src/outputs/hdf.rs | 18 +- pywr-schema/src/outputs/mod.rs | 6 +- pywr-schema/src/parameters/aggregated.rs | 20 +- .../src/parameters/asymmetric_switch.rs | 16 +- pywr-schema/src/parameters/control_curves.rs | 46 +- pywr-schema/src/parameters/core.rs | 54 +- pywr-schema/src/parameters/data_frame.rs | 16 +- pywr-schema/src/parameters/delay.rs | 14 +- pywr-schema/src/parameters/discount_factor.rs | 12 +- pywr-schema/src/parameters/indexed_array.rs | 14 +- pywr-schema/src/parameters/interpolated.rs | 24 +- pywr-schema/src/parameters/mod.rs | 194 ++++-- pywr-schema/src/parameters/offset.rs | 14 +- pywr-schema/src/parameters/polynomial.rs | 6 +- pywr-schema/src/parameters/profiles.rs | 16 +- pywr-schema/src/parameters/python.rs | 39 +- pywr-schema/src/parameters/tables.rs | 16 +- pywr-schema/src/parameters/thresholds.rs | 16 +- pywr-schema/src/test_models/csv1.json | 100 +-- pywr-schema/src/test_models/delay1.json | 60 +- pywr-schema/src/test_models/hdf1.json | 100 +-- pywr-schema/src/test_models/multi1/model.json | 34 + .../src/test_models/multi1/network1.json | 39 ++ .../src/test_models/multi1/network2.json | 42 ++ pywr-schema/src/test_models/multi2/model.json | 44 ++ .../src/test_models/multi2/network1.json | 42 ++ .../src/test_models/multi2/network2.json | 42 ++ .../src/test_models/piecewise_link1.json | 82 +-- .../src/test_models/piecewise_storage1.json | 80 +-- .../src/test_models/piecewise_storage2.json | 139 +++-- .../test_models/river_split_with_gauge1.json | 76 +-- pywr-schema/src/test_models/simple1.json | 68 +- 108 files changed, 3608 insertions(+), 2036 deletions(-) create mode 100644 pywr-core/src/models/mod.rs create mode 100644 pywr-core/src/models/multi.rs create mode 100644 pywr-core/src/models/simple.rs rename pywr-core/src/{model.rs => network.rs} (79%) create mode 100644 pywr-schema/src/test_models/multi1/model.json create mode 100644 pywr-schema/src/test_models/multi1/network1.json create mode 100644 pywr-schema/src/test_models/multi1/network2.json create mode 100644 pywr-schema/src/test_models/multi2/model.json create mode 100644 pywr-schema/src/test_models/multi2/network1.json create mode 100644 pywr-schema/src/test_models/multi2/network2.json diff --git a/pywr-cli/src/main.rs b/pywr-cli/src/main.rs index 9536e32a..c6a4377d 100644 --- a/pywr-cli/src/main.rs +++ b/pywr-cli/src/main.rs @@ -1,6 +1,5 @@ use anyhow::{Context, Result}; use clap::{Parser, Subcommand, ValueEnum}; -use pywr_core::model::Model; #[cfg(feature = "ipm-ocl")] use pywr_core::solvers::{ClIpmF32Solver, ClIpmF64Solver, ClIpmSolverSettings}; use pywr_core::solvers::{ClpSolver, ClpSolverSettings}; @@ -9,16 +8,13 @@ use pywr_core::solvers::{HighsSolver, HighsSolverSettings}; #[cfg(feature = "ipm-simd")] use pywr_core::solvers::{SimdIpmF64Solver, SimdIpmSolverSettings}; use pywr_core::test_utils::make_random_model; -use pywr_core::timestep::Timestepper; use pywr_core::tracing::setup_tracing; -use pywr_core::PywrError; -use pywr_schema::model::PywrModel; +use pywr_schema::model::{PywrModel, PywrMultiNetworkModel}; use pywr_schema::ConversionError; use rand::SeedableRng; use rand_chacha::ChaCha8Rng; use std::fmt::{Display, Formatter}; use std::path::{Path, PathBuf}; -use time::macros::date; #[derive(Copy, Clone, ValueEnum)] enum Solver { @@ -92,6 +88,25 @@ enum Commands { #[arg(long, default_value_t = false)] debug: bool, }, + RunMulti { + /// Path to Pywr model JSON. + model: PathBuf, + /// Solver to use. + #[arg(short, long, default_value_t=Solver::Clp)] + solver: Solver, + #[arg(short, long)] + data_path: Option, + #[arg(short, long)] + output_path: Option, + /// Use multiple threads for simulation. + #[arg(short, long, default_value_t = false)] + parallel: bool, + /// The number of threads to use in parallel simulation. + #[arg(short, long, default_value_t = 1)] + threads: usize, + #[arg(long, default_value_t = false)] + debug: bool, + }, RunRandom { num_systems: usize, density: usize, @@ -117,6 +132,15 @@ fn main() -> Result<()> { threads, debug, } => run(model, solver, data_path.as_deref(), output_path.as_deref(), *debug), + Commands::RunMulti { + model, + solver, + data_path, + output_path, + parallel, + threads, + debug, + } => run_multi(model, solver, data_path.as_deref(), output_path.as_deref(), *debug), Commands::RunRandom { num_systems, density, @@ -171,50 +195,63 @@ fn run(path: &Path, solver: &Solver, data_path: Option<&Path>, output_path: Opti setup_tracing(debug).unwrap(); let data = std::fs::read_to_string(path).unwrap(); + let data_path = data_path.or_else(|| path.parent()); let schema_v2: PywrModel = serde_json::from_str(data.as_str()).unwrap(); - let (model, timestepper): (Model, Timestepper) = schema_v2.build_model(data_path, output_path).unwrap(); + let model = schema_v2.build_model(data_path, output_path).unwrap(); match *solver { - Solver::Clp => model.run::(×tepper, &ClpSolverSettings::default()), + Solver::Clp => model.run::(&ClpSolverSettings::default()), #[cfg(feature = "highs")] - Solver::HIGHS => model.run::(×tepper, &HighsSolverSettings::default()), + Solver::HIGHS => model.run::(&HighsSolverSettings::default()), #[cfg(feature = "ipm-ocl")] - Solver::CLIPMF32 => model.run_multi_scenario::(×tepper, &ClIpmSolverSettings::default()), + Solver::CLIPMF32 => model.run_multi_scenario::(&ClIpmSolverSettings::default()), #[cfg(feature = "ipm-ocl")] - Solver::CLIPMF64 => model.run_multi_scenario::(×tepper, &ClIpmSolverSettings::default()), + Solver::CLIPMF64 => model.run_multi_scenario::(&ClIpmSolverSettings::default()), #[cfg(feature = "ipm-simd")] - Solver::IpmSimd => { - model.run_multi_scenario::>(×tepper, &SimdIpmSolverSettings::default()) - } + Solver::IpmSimd => model.run_multi_scenario::>(&SimdIpmSolverSettings::default()), + } + .unwrap(); +} + +fn run_multi(path: &Path, solver: &Solver, data_path: Option<&Path>, output_path: Option<&Path>, debug: bool) { + setup_tracing(debug).unwrap(); + + let data = std::fs::read_to_string(path).unwrap(); + let data_path = data_path.or_else(|| path.parent()); + + let schema_v2: PywrMultiNetworkModel = serde_json::from_str(data.as_str()).unwrap(); + + let model = schema_v2.build_model(data_path, output_path).unwrap(); + + match *solver { + Solver::Clp => model.run::(&ClpSolverSettings::default()), + #[cfg(feature = "highs")] + Solver::HIGHS => model.run::(&HighsSolverSettings::default()), + #[cfg(feature = "ipm-ocl")] + Solver::CLIPMF32 => model.run_multi_scenario::(&ClIpmSolverSettings::default()), + #[cfg(feature = "ipm-ocl")] + Solver::CLIPMF64 => model.run_multi_scenario::(&ClIpmSolverSettings::default()), + #[cfg(feature = "ipm-simd")] + Solver::IpmSimd => model.run_multi_scenario::>(&SimdIpmSolverSettings::default()), } .unwrap(); } fn run_random(num_systems: usize, density: usize, num_scenarios: usize, solver: &Solver) { - let timestepper = Timestepper::new(date!(2020 - 01 - 01), date!(2020 - 01 - 10), 1); let mut rng = ChaCha8Rng::seed_from_u64(0); - let model = make_random_model( - num_systems, - density, - timestepper.timesteps().len(), - num_scenarios, - &mut rng, - ) - .unwrap(); + let model = make_random_model(num_systems, density, num_scenarios, &mut rng).unwrap(); match *solver { - Solver::Clp => model.run::(×tepper, &ClpSolverSettings::default()), + Solver::Clp => model.run::(&ClpSolverSettings::default()), #[cfg(feature = "highs")] - Solver::HIGHS => model.run::(×tepper, &HighsSolverSettings::default()), + Solver::HIGHS => model.run::(&HighsSolverSettings::default()), #[cfg(feature = "ipm-ocl")] - Solver::CLIPMF32 => model.run_multi_scenario::(×tepper, &ClIpmSolverSettings::default()), + Solver::CLIPMF32 => model.run_multi_scenario::(&ClIpmSolverSettings::default()), #[cfg(feature = "ipm-ocl")] - Solver::CLIPMF64 => model.run_multi_scenario::(×tepper, &ClIpmSolverSettings::default()), + Solver::CLIPMF64 => model.run_multi_scenario::(&ClIpmSolverSettings::default()), #[cfg(feature = "ipm-simd")] - Solver::IpmSimd => { - model.run_multi_scenario::>(×tepper, &SimdIpmSolverSettings::default()) - } + Solver::IpmSimd => model.run_multi_scenario::>(&SimdIpmSolverSettings::default()), } .unwrap(); } diff --git a/pywr-core/benches/random_models.rs b/pywr-core/benches/random_models.rs index 1062621a..a36be743 100644 --- a/pywr-core/benches/random_models.rs +++ b/pywr-core/benches/random_models.rs @@ -31,10 +31,6 @@ fn random_benchmark( solver_setups: &[SolverSetup], // TODO This should be an enum (see one also in main.rs; should incorporated into the crate). sample_size: Option, ) { - // Run 10 time-steps - let timestepper = Timestepper::new(date!(2020 - 01 - 01), date!(2020 - 01 - 10), 1); - let timesteps = timestepper.timesteps(); - let mut group = c.benchmark_group(group_name); // group.sampling_mode(SamplingMode::Flat); if let Some(n) = sample_size { @@ -48,10 +44,11 @@ fn random_benchmark( // Make a consistent random number generator // ChaCha8 should be consistent across builds and platforms let mut rng = ChaCha8Rng::seed_from_u64(0); - let model = make_random_model(n_sys, density, timesteps.len(), n_sc, &mut rng).unwrap(); + let model = make_random_model(n_sys, density, n_sc, &mut rng).unwrap(); + let num_timesteps = model.domain().time().timesteps().len(); // This is the number of time-steps - group.throughput(Throughput::Elements((timesteps.len() * n_sc) as u64)); + group.throughput(Throughput::Elements((num_timesteps * n_sc) as u64)); for setup in solver_setups { match &setup.setting { @@ -63,30 +60,10 @@ fn random_benchmark( &(n_sys, density, n_sc), |b, _n| { // Do the setup here outside of the time-step loop + let mut state = + model.setup::(&settings).expect("Failed to setup the model."); - let ( - scenario_indices, - mut states, - mut parameter_internal_states, - mut recorder_internal_states, - ) = model.setup(×teps).expect("Failed to setup the model."); - - // Setup the solver - let mut solvers = model - .setup_solver::(settings) - .expect("Failed to setup the solver."); - - b.iter(|| { - model.run_with_state::( - ×tepper, - &settings, - &scenario_indices, - &mut states, - &mut parameter_internal_states, - &mut recorder_internal_states, - &mut solvers, - ) - }) + b.iter(|| model.run_with_state(&mut state, &settings)) }, ); } @@ -98,31 +75,11 @@ fn random_benchmark( BenchmarkId::new("random-model", parameter_string), &(n_sys, density, n_sc), |b, _n| { - // Do the setup here outside of the time-step loop - let timesteps = timestepper.timesteps(); - let ( - scenario_indices, - mut states, - mut parameter_internal_states, - mut recorder_internal_states, - ) = model.setup(×teps).expect("Failed to setup the model."); - - // Setup the solver - let mut solvers = model - .setup_solver::(settings) - .expect("Failed to setup the solver."); - - b.iter(|| { - model.run_with_state::( - ×tepper, - &settings, - &scenario_indices, - &mut states, - &mut parameter_internal_states, - &mut recorder_internal_states, - &mut solvers, - ) - }) + let mut state = model + .setup::(&settings) + .expect("Failed to setup the model."); + + b.iter(|| model.run_with_state(&mut state, &settings)) }, ); } @@ -135,31 +92,11 @@ fn random_benchmark( BenchmarkId::new("random-model", parameter_string), &(n_sys, density, n_sc), |b, _n| { - // Do the setup here outside of the time-step loop - let timesteps = timestepper.timesteps(); - let ( - scenario_indices, - mut states, - mut parameter_internal_states, - mut recorder_internal_states, - ) = model.setup(×teps).expect("Failed to setup the model."); - - // Setup the solver - let mut solver = model - .setup_multi_scenario::>(&scenario_indices, settings) - .expect("Failed to setup the solver."); - - b.iter(|| { - model.run_multi_scenario_with_state::>( - ×tepper, - &settings, - &scenario_indices, - &mut states, - &mut parameter_internal_states, - &mut recorder_internal_states, - &mut solver, - ) - }) + let mut state = model + .setup_multi_scenario::>(&settings) + .expect("Failed to setup the model."); + + b.iter(|| model.run_multi_scenario_with_state(&mut state, &settings)) }, ); } @@ -173,30 +110,11 @@ fn random_benchmark( &(n_sys, density, n_sc), |b, _n| { // Do the setup here outside of the time-step loop - let timesteps = timestepper.timesteps(); - let ( - scenario_indices, - mut states, - mut parameter_internal_states, - mut recorder_internal_states, - ) = model.setup(×teps).expect("Failed to setup the model."); - - // Setup the solver - let mut solver = model - .setup_multi_scenario::>(&scenario_indices, settings) - .expect("Failed to setup the solver."); - - b.iter(|| { - model.run_multi_scenario_with_state::>( - ×tepper, - &settings, - &scenario_indices, - &mut states, - &mut parameter_internal_states, - &mut recorder_internal_states, - &mut solver, - ) - }) + let mut state = model + .setup_multi_scenario::>(&settings) + .expect("Failed to setup the model."); + + b.iter(|| model.run_multi_scenario_with_state(&mut state, &settings)) }, ); } @@ -210,30 +128,11 @@ fn random_benchmark( &(n_sys, density, n_sc), |b, _n| { // Do the setup here outside of the time-step loop - let timesteps = timestepper.timesteps(); - let ( - scenario_indices, - mut states, - mut parameter_internal_states, - mut recorder_internal_states, - ) = model.setup(×teps).expect("Failed to setup the model."); - - // Setup the solver - let mut solver = model - .setup_multi_scenario::>(&scenario_indices, settings) - .expect("Failed to setup the solver."); - - b.iter(|| { - model.run_multi_scenario_with_state::>( - ×tepper, - &settings, - &scenario_indices, - &mut states, - &mut parameter_internal_states, - &mut recorder_internal_states, - &mut solver, - ) - }) + let mut state = model + .setup_multi_scenario::>(&settings) + .expect("Failed to setup the model."); + + b.iter(|| model.run_multi_scenario_with_state(&mut state, &settings)) }, ); } @@ -247,30 +146,11 @@ fn random_benchmark( &(n_sys, density, n_sc), |b, _n| { // Do the setup here outside of the time-step loop - let timesteps = timestepper.timesteps(); - let ( - scenario_indices, - mut states, - mut parameter_internal_states, - mut recorder_internal_states, - ) = model.setup(×teps).expect("Failed to setup the model."); - - // Setup the solver - let mut solver = model - .setup_multi_scenario::(&scenario_indices, settings) - .expect("Failed to setup the solver."); - - b.iter(|| { - model.run_multi_scenario_with_state::( - ×tepper, - &settings, - &scenario_indices, - &mut states, - &mut parameter_internal_states, - &mut recorder_internal_states, - &mut solver, - ) - }) + let mut state = model + .setup_multi_scenario::(&settings) + .expect("Failed to setup the model."); + + b.iter(|| model.run_multi_scenario_with_state(&mut state, &settings)) }, ); } diff --git a/pywr-core/src/aggregated_node.rs b/pywr-core/src/aggregated_node.rs index a689e92e..b2a2166d 100644 --- a/pywr-core/src/aggregated_node.rs +++ b/pywr-core/src/aggregated_node.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::node::{Constraint, ConstraintValue, FlowConstraints, NodeMeta}; use crate::state::State; use crate::{NodeIndex, PywrError}; @@ -132,7 +132,7 @@ impl AggregatedNode { /// pub fn get_norm_factor_pairs( &self, - model: &Model, + model: &Network, state: &State, ) -> Option> { if let Some(factors) = &self.factors { @@ -151,13 +151,13 @@ impl AggregatedNode { pub fn set_min_flow_constraint(&mut self, value: ConstraintValue) { self.flow_constraints.min_flow = value; } - pub fn get_min_flow_constraint(&self, model: &Model, state: &State) -> Result { + pub fn get_min_flow_constraint(&self, model: &Network, state: &State) -> Result { self.flow_constraints.get_min_flow(model, state) } pub fn set_max_flow_constraint(&mut self, value: ConstraintValue) { self.flow_constraints.max_flow = value; } - pub fn get_max_flow_constraint(&self, model: &Model, state: &State) -> Result { + pub fn get_max_flow_constraint(&self, model: &Network, state: &State) -> Result { self.flow_constraints.get_max_flow(model, state) } @@ -176,15 +176,15 @@ impl AggregatedNode { Ok(()) } - pub fn get_current_min_flow(&self, model: &Model, state: &State) -> Result { + pub fn get_current_min_flow(&self, model: &Network, state: &State) -> Result { self.flow_constraints.get_min_flow(model, state) } - pub fn get_current_max_flow(&self, model: &Model, state: &State) -> Result { + pub fn get_current_max_flow(&self, model: &Network, state: &State) -> Result { self.flow_constraints.get_max_flow(model, state) } - pub fn get_current_flow_bounds(&self, model: &Model, state: &State) -> Result<(f64, f64), PywrError> { + pub fn get_current_flow_bounds(&self, model: &Network, state: &State) -> Result<(f64, f64), PywrError> { match ( self.get_current_min_flow(model, state), self.get_current_max_flow(model, state), @@ -203,7 +203,7 @@ impl AggregatedNode { fn get_norm_proportional_factor_pairs( factors: &[Metric], nodes: &[NodeIndex], - model: &Model, + model: &Network, state: &State, ) -> Vec<((NodeIndex, f64), (NodeIndex, f64))> { if factors.len() != nodes.len() - 1 { @@ -241,7 +241,7 @@ fn get_norm_proportional_factor_pairs( fn get_norm_ratio_factor_pairs( factors: &[Metric], nodes: &[NodeIndex], - model: &Model, + model: &Network, state: &State, ) -> Vec<((NodeIndex, f64), (NodeIndex, f64))> { if factors.len() != nodes.len() { @@ -263,10 +263,11 @@ fn get_norm_ratio_factor_pairs( mod tests { use crate::aggregated_node::Factors; use crate::metric::Metric; - use crate::model::Model; + use crate::models::Model; + use crate::network::Network; use crate::node::ConstraintValue; use crate::recorders::AssertionRecorder; - use crate::test_utils::{default_timestepper, run_all_solvers}; + use crate::test_utils::{default_time_domain, run_all_solvers}; use ndarray::Array2; /// Test the factors forcing a simple ratio of flow @@ -274,30 +275,27 @@ mod tests { /// The model has a single input that diverges to two links and respective output nodes. #[test] fn test_simple_factors() { - let mut model = Model::default(); - let timestepper = default_timestepper(); + let mut network = Network::default(); - model.add_scenario_group("test-scenario", 2).unwrap(); + let input_node = network.add_input_node("input", None).unwrap(); + let link_node0 = network.add_link_node("link", Some("0")).unwrap(); + let output_node0 = network.add_output_node("output", Some("0")).unwrap(); - let input_node = model.add_input_node("input", None).unwrap(); - let link_node0 = model.add_link_node("link", Some("0")).unwrap(); - let output_node0 = model.add_output_node("output", Some("0")).unwrap(); + network.connect_nodes(input_node, link_node0).unwrap(); + network.connect_nodes(link_node0, output_node0).unwrap(); - model.connect_nodes(input_node, link_node0).unwrap(); - model.connect_nodes(link_node0, output_node0).unwrap(); + let link_node1 = network.add_link_node("link", Some("1")).unwrap(); + let output_node1 = network.add_output_node("output", Some("1")).unwrap(); - let link_node1 = model.add_link_node("link", Some("1")).unwrap(); - let output_node1 = model.add_output_node("output", Some("1")).unwrap(); - - model.connect_nodes(input_node, link_node1).unwrap(); - model.connect_nodes(link_node1, output_node1).unwrap(); + network.connect_nodes(input_node, link_node1).unwrap(); + network.connect_nodes(link_node1, output_node1).unwrap(); let factors = Some(Factors::Ratio(vec![Metric::Constant(2.0), Metric::Constant(1.0)])); - let _agg_node = model.add_aggregated_node("agg-node", None, &[link_node0, link_node1], factors); + let _agg_node = network.add_aggregated_node("agg-node", None, &[link_node0, link_node1], factors); // Setup a demand on output-0 - let output_node = model.get_mut_node_by_name("output", Some("0")).unwrap(); + let output_node = network.get_mut_node_by_name("output", Some("0")).unwrap(); output_node .set_max_flow_constraint(ConstraintValue::Scalar(100.0)) .unwrap(); @@ -305,17 +303,19 @@ mod tests { output_node.set_cost(ConstraintValue::Scalar(-10.0)); // Set-up assertion for "input" node - let idx = model.get_node_by_name("link", Some("0")).unwrap().index(); + let idx = network.get_node_by_name("link", Some("0")).unwrap().index(); let expected = Array2::from_elem((366, 10), 100.0); let recorder = AssertionRecorder::new("link-0-flow", Metric::NodeOutFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); // Set-up assertion for "input" node - let idx = model.get_node_by_name("link", Some("1")).unwrap().index(); + let idx = network.get_node_by_name("link", Some("1")).unwrap().index(); let expected = Array2::from_elem((366, 10), 50.0); let recorder = AssertionRecorder::new("link-0-flow", Metric::NodeOutFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); + + let model = Model::new(default_time_domain().into(), network); - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } } diff --git a/pywr-core/src/derived_metric.rs b/pywr-core/src/derived_metric.rs index 13bd1763..d90f4ec4 100644 --- a/pywr-core/src/derived_metric.rs +++ b/pywr-core/src/derived_metric.rs @@ -1,5 +1,5 @@ use crate::aggregated_storage_node::AggregatedStorageNodeIndex; -use crate::model::Model; +use crate::network::Network; use crate::node::NodeIndex; use crate::state::State; use crate::timestep::Timestep; @@ -46,31 +46,31 @@ pub enum DerivedMetric { } impl DerivedMetric { - pub fn before(&self, timestep: &Timestep, model: &Model, state: &State) -> Result, PywrError> { + pub fn before(&self, timestep: &Timestep, network: &Network, state: &State) -> Result, PywrError> { // On the first time-step set the initial value if timestep.is_first() { - self.compute(model, state).map(|v| Some(v)) + self.compute(network, state).map(|v| Some(v)) } else { Ok(None) } } - pub fn compute(&self, model: &Model, state: &State) -> Result { + pub fn compute(&self, network: &Network, state: &State) -> Result { match self { Self::NodeProportionalVolume(idx) => { - let max_volume = model.get_node(idx)?.get_current_max_volume(model, state)?; + let max_volume = network.get_node(idx)?.get_current_max_volume(network, state)?; Ok(state .get_network_state() .get_node_proportional_volume(idx, max_volume)?) } Self::VirtualStorageProportionalVolume(idx) => { - let max_volume = model.get_virtual_storage_node(idx)?.get_max_volume(model, state)?; + let max_volume = network.get_virtual_storage_node(idx)?.get_max_volume(network, state)?; Ok(state .get_network_state() .get_virtual_storage_proportional_volume(idx, max_volume)?) } Self::AggregatedNodeProportionalVolume(idx) => { - let node = model.get_aggregated_storage_node(idx)?; + let node = network.get_aggregated_storage_node(idx)?; let volume: f64 = node .nodes .iter() @@ -80,15 +80,15 @@ impl DerivedMetric { let max_volume: f64 = node .nodes .iter() - .map(|idx| model.get_node(idx)?.get_current_max_volume(model, state)) + .map(|idx| network.get_node(idx)?.get_current_max_volume(network, state)) .sum::>()?; // TODO handle divide by zero Ok(volume / max_volume) } Self::NodeInFlowDeficit(idx) => { - let node = model.get_node(idx)?; + let node = network.get_node(idx)?; let flow = state.get_network_state().get_node_in_flow(idx)?; - let max_flow = node.get_current_max_flow(model, state)?; + let max_flow = node.get_current_max_flow(network, state)?; Ok(max_flow - flow) } } diff --git a/pywr-core/src/edge.rs b/pywr-core/src/edge.rs index b24386b7..258252c6 100644 --- a/pywr-core/src/edge.rs +++ b/pywr-core/src/edge.rs @@ -1,4 +1,4 @@ -use crate::model::Model; +use crate::network::Network; use crate::node::NodeVec; use crate::state::State; use crate::{NodeIndex, PywrError}; @@ -43,7 +43,7 @@ impl Edge { self.to_node_index } - pub(crate) fn cost(&self, nodes: &NodeVec, model: &Model, state: &State) -> Result { + pub(crate) fn cost(&self, nodes: &NodeVec, model: &Network, state: &State) -> Result { let from_node = nodes.get(&self.from_node_index)?; let to_node = nodes.get(&self.to_node_index)?; diff --git a/pywr-core/src/lib.rs b/pywr-core/src/lib.rs index 40e45056..0d6eeb75 100644 --- a/pywr-core/src/lib.rs +++ b/pywr-core/src/lib.rs @@ -3,6 +3,7 @@ extern crate core; use crate::derived_metric::DerivedMetricIndex; +use crate::models::MultiNetworkTransferIndex; use crate::node::NodeIndex; use crate::parameters::{IndexParameterIndex, InterpolationError, MultiValueParameterIndex, ParameterIndex}; use crate::recorders::RecorderIndex; @@ -15,11 +16,12 @@ mod aggregated_storage_node; pub mod derived_metric; pub mod edge; pub mod metric; -pub mod model; +pub mod models; +pub mod network; pub mod node; pub mod parameters; pub mod recorders; -mod scenario; +pub mod scenario; pub mod solvers; pub mod state; pub mod test_utils; @@ -43,10 +45,14 @@ pub enum PywrError { ParameterIndexNotFound(ParameterIndex), #[error("index parameter index {0} not found")] IndexParameterIndexNotFound(IndexParameterIndex), - #[error("multi value parameter index {0} not found")] + #[error("multi1 value parameter index {0} not found")] MultiValueParameterIndexNotFound(MultiValueParameterIndex), - #[error("multi value parameter key {0} not found")] + #[error("multi1 value parameter key {0} not found")] MultiValueParameterKeyNotFound(String), + #[error("inter-network parameter state not initialised")] + InterNetworkParameterStateNotInitialised, + #[error("inter-network parameter index {0} not found")] + MultiNetworkTransferIndexNotFound(MultiNetworkTransferIndex), #[error("parameter {0} not found")] ParameterNotFound(String), #[error("metric set index not found")] @@ -79,6 +85,8 @@ pub enum PywrError { FlowConstraintsUndefined, #[error("storage constraints are undefined for this node")] StorageConstraintsUndefined, + #[error("No more timesteps")] + EndOfTimesteps, #[error("can not add virtual storage node to a storage node")] NoVirtualStorageOnStorageNode, #[error("timestep index out of range")] @@ -137,6 +145,10 @@ pub enum PywrError { MissingSolverFeatures, #[error("interpolation error: {0}")] Interpolation(#[from] InterpolationError), + #[error("network {0} not found")] + NetworkNotFound(String), + #[error("network index ({0}) not found")] + NetworkIndexNotFound(usize), #[error("parameters do not provide an initial value")] ParameterNoInitialValue, } diff --git a/pywr-core/src/metric.rs b/pywr-core/src/metric.rs index 6431bff3..7637e38d 100644 --- a/pywr-core/src/metric.rs +++ b/pywr-core/src/metric.rs @@ -2,7 +2,8 @@ use crate::aggregated_node::AggregatedNodeIndex; use crate::aggregated_storage_node::AggregatedStorageNodeIndex; use crate::derived_metric::DerivedMetricIndex; use crate::edge::EdgeIndex; -use crate::model::Model; +use crate::models::MultiNetworkTransferIndex; +use crate::network::Network; use crate::node::NodeIndex; use crate::parameters::{IndexParameterIndex, MultiValueParameterIndex, ParameterIndex}; use crate::state::State; @@ -28,10 +29,11 @@ pub enum Metric { // TODO implement other MultiNodeXXX variants Constant(f64), DerivedMetric(DerivedMetricIndex), + InterNetworkTransfer(MultiNetworkTransferIndex), } impl Metric { - pub fn get_value(&self, model: &Model, state: &State) -> Result { + pub fn get_value(&self, model: &Network, state: &State) -> Result { match self { Metric::NodeInFlow(idx) => Ok(state.get_network_state().get_node_in_flow(idx)?), Metric::NodeOutFlow(idx) => Ok(state.get_network_state().get_node_out_flow(idx)?), @@ -74,6 +76,7 @@ impl Metric { .sum::>()?; Ok(flow) } + Metric::InterNetworkTransfer(idx) => state.get_inter_network_transfer_value(*idx), } } } @@ -85,7 +88,7 @@ pub enum IndexMetric { } impl IndexMetric { - pub fn get_value(&self, model: &Model, state: &State) -> Result { + pub fn get_value(&self, _network: &Network, state: &State) -> Result { match self { Self::IndexParameterValue(idx) => state.get_parameter_index(*idx), Self::Constant(i) => Ok(*i), diff --git a/pywr-core/src/models/mod.rs b/pywr-core/src/models/mod.rs new file mode 100644 index 00000000..36cd5999 --- /dev/null +++ b/pywr-core/src/models/mod.rs @@ -0,0 +1,55 @@ +mod multi; +mod simple; + +use crate::scenario::{ScenarioDomain, ScenarioGroupCollection}; +use crate::timestep::{TimeDomain, Timestepper}; +pub use multi::{MultiNetworkModel, MultiNetworkTransferIndex}; +pub use simple::Model; + +pub struct ModelDomain { + time: TimeDomain, + scenarios: ScenarioDomain, +} + +impl ModelDomain { + pub fn new(time: TimeDomain, scenarios: ScenarioDomain) -> Self { + Self { time, scenarios } + } + + pub fn from(timestepper: Timestepper, scenario_collection: ScenarioGroupCollection) -> Self { + Self { + time: timestepper.into(), + scenarios: scenario_collection.into(), + } + } + + pub fn time(&self) -> &TimeDomain { + &self.time + } + + pub fn scenarios(&self) -> &ScenarioDomain { + &self.scenarios + } + + pub fn shape(&self) -> (usize, usize) { + (self.time.timesteps().len(), self.scenarios.indices().len()) + } +} + +impl From for ModelDomain { + fn from(value: Timestepper) -> Self { + Self { + time: value.into(), + scenarios: ScenarioGroupCollection::default().into(), + } + } +} + +impl From for ModelDomain { + fn from(value: TimeDomain) -> Self { + Self { + time: value, + scenarios: ScenarioGroupCollection::default().into(), + } + } +} diff --git a/pywr-core/src/models/multi.rs b/pywr-core/src/models/multi.rs new file mode 100644 index 00000000..933dc4a0 --- /dev/null +++ b/pywr-core/src/models/multi.rs @@ -0,0 +1,394 @@ +use crate::metric::Metric; +use crate::models::ModelDomain; +use crate::network::{Network, NetworkState, RunTimings}; +use crate::scenario::ScenarioIndex; +use crate::solvers::{Solver, SolverSettings}; +use crate::timestep::Timestep; +use crate::PywrError; +use std::any::Any; +use std::fmt; +use std::fmt::{Display, Formatter}; +use std::num::NonZeroUsize; +use std::ops::Deref; +use std::time::Instant; + +/// An index to another model +/// +/// The index is to either a model evaluated before this model, or after this model. +enum OtherNetworkIndex { + Before(NonZeroUsize), + After(NonZeroUsize), +} + +impl OtherNetworkIndex { + fn new(from_idx: usize, to_idx: usize) -> Self { + if from_idx == to_idx { + panic!("Cannot create OtherNetworkIndex to self.") + } else if from_idx < to_idx { + Self::Before(NonZeroUsize::new(to_idx - from_idx).unwrap()) + } else { + Self::After(NonZeroUsize::new(from_idx - to_idx).unwrap()) + } + } +} + +#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Debug)] +pub struct MultiNetworkTransferIndex(pub usize); + +impl Deref for MultiNetworkTransferIndex { + type Target = usize; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Display for MultiNetworkTransferIndex { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +/// A special parameter that retrieves a value from a metric in another model. +struct MultiNetworkTransfer { + /// The model to get the value from. + from_model_idx: OtherNetworkIndex, + /// The metric to get the value from. + from_metric: Metric, + /// Optional initial value to use on the first time-step + initial_value: Option, +} + +struct MultiNetworkEntry { + name: String, + network: Network, + parameters: Vec, +} + +pub struct MultiNetworkModelState { + current_time_step_idx: usize, + states: Vec, + recorder_states: Vec>>>, + solvers: Vec, +} + +/// A MultiNetwork is a collection of models that can be run together. +pub struct MultiNetworkModel { + domain: ModelDomain, + networks: Vec, +} + +impl MultiNetworkModel { + pub fn new(domain: ModelDomain) -> Self { + Self { + domain, + networks: Vec::new(), + } + } + + /// Get a reference to the [`ModelDomain`] + pub fn domain(&self) -> &ModelDomain { + &self.domain + } + + /// Get a reference to a network by index. + pub fn network(&self, idx: usize) -> Result<&Network, PywrError> { + self.networks + .get(idx) + .map(|n| &n.network) + .ok_or(PywrError::NetworkIndexNotFound(idx)) + } + + /// Get a mutable reference to a network by index. + pub fn network_mut(&mut self, idx: usize) -> Result<&mut Network, PywrError> { + self.networks + .get_mut(idx) + .map(|n| &mut n.network) + .ok_or(PywrError::NetworkIndexNotFound(idx)) + } + + /// Get the index of a network by name. + pub fn get_network_index_by_name(&self, name: &str) -> Result { + self.networks + .iter() + .position(|n| n.name == name) + .ok_or(PywrError::NetworkNotFound(name.to_string())) + } + + pub fn add_network(&mut self, name: &str, network: Network) -> usize { + // TODO check for duplicate names + let idx = self.networks.len(); + self.networks.push(MultiNetworkEntry { + name: name.to_string(), + network, + parameters: Vec::new(), + }); + + idx + } + + /// Add a transfer of data from one network to another. + pub fn add_inter_network_transfer( + &mut self, + from_network_idx: usize, + from_metric: Metric, + to_network_idx: usize, + initial_value: Option, + ) { + let parameter = MultiNetworkTransfer { + from_model_idx: OtherNetworkIndex::new(from_network_idx, to_network_idx), + from_metric, + initial_value, + }; + + self.networks[to_network_idx].parameters.push(parameter); + } + + pub fn setup(&self, settings: &S::Settings) -> Result>>, PywrError> + where + S: Solver, + { + let timesteps = self.domain.time.timesteps(); + let scenario_indices = self.domain.scenarios.indices(); + + let mut states = Vec::with_capacity(self.networks.len()); + let mut recorder_states = Vec::with_capacity(self.networks.len()); + let mut solvers = Vec::with_capacity(self.networks.len()); + + for entry in &self.networks { + let state = entry + .network + .setup_network(×teps, &scenario_indices, entry.parameters.len())?; + let recorder_state = entry.network.setup_recorders(×teps, &scenario_indices)?; + let solver = entry.network.setup_solver::(&scenario_indices, settings)?; + + states.push(state); + recorder_states.push(recorder_state); + solvers.push(solver); + } + + Ok(MultiNetworkModelState { + current_time_step_idx: 0, + states, + recorder_states, + solvers, + }) + } + + /// Compute inter model transfers + fn compute_inter_network_transfers( + &self, + model_idx: usize, + timestep: &Timestep, + scenario_indices: &[ScenarioIndex], + states: &mut [NetworkState], + ) -> Result<(), PywrError> { + // Get references to the models before and after this model + let (before_models, after_models) = self.networks.split_at(model_idx); + let (this_model, after_models) = after_models.split_first().unwrap(); + // Get references to the states before and after this model + let (before, after) = states.split_at_mut(model_idx); + let (this_models_state, after) = after.split_first_mut().unwrap(); + + // Compute inter-model transfers for all scenarios + for scenario_index in scenario_indices.iter() { + compute_inter_network_transfers( + timestep, + scenario_index, + &this_model.parameters, + this_models_state, + &before_models, + &before, + &after_models, + &after, + )?; + } + + Ok(()) + } + + /// Perform a single time-step of the multi1-model. + pub fn step(&self, state: &mut MultiNetworkModelState>>) -> Result<(), PywrError> + where + S: Solver, + { + let mut timings = RunTimings::default(); + + let timestep = self + .domain + .time + .timesteps() + .get(state.current_time_step_idx) + .ok_or(PywrError::EndOfTimesteps)?; + + let scenario_indices = self.domain.scenarios.indices(); + + for (idx, entry) in self.networks.iter().enumerate() { + // Perform inter-model state updates + self.compute_inter_network_transfers(idx, timestep, scenario_indices, &mut state.states)?; + + let sub_model_solvers = state.solvers.get_mut(idx).unwrap(); + let sub_model_states = state.states.get_mut(idx).unwrap(); + + // Perform sub-model step + entry + .network + .step( + timestep, + scenario_indices, + sub_model_solvers, + sub_model_states, + &mut timings, + ) + .unwrap(); + + let start_r_save = Instant::now(); + + let sub_model_recorder_states = state.recorder_states.get_mut(idx).unwrap(); + + entry + .network + .save_recorders(timestep, scenario_indices, sub_model_states, sub_model_recorder_states)?; + timings.recorder_saving += start_r_save.elapsed(); + } + + // Finally increment the time-step index + state.current_time_step_idx += 1; + + Ok(()) + } + + /// Run the model through the given time-steps. + /// + /// This method will setup state and solvers, and then run the model through the time-steps. + pub fn run(&self, settings: &S::Settings) -> Result<(), PywrError> + where + S: Solver, + ::Settings: SolverSettings, + { + let mut state = self.setup::(settings)?; + + self.run_with_state::(&mut state, settings)?; + + Ok(()) + } + + /// Run the model with the provided states and solvers. + pub fn run_with_state( + &self, + state: &mut MultiNetworkModelState>>, + _settings: &S::Settings, + ) -> Result<(), PywrError> + where + S: Solver, + ::Settings: SolverSettings, + { + let mut timings = RunTimings::default(); + let mut count = 0; + + // TODO: Setup thread pool if running in parallel + + loop { + match self.step::(state) { + Ok(_) => {} + Err(PywrError::EndOfTimesteps) => break, + Err(e) => return Err(e), + } + + count += self.domain.scenarios.indices().len(); + } + + for (idx, entry) in self.networks.iter().enumerate() { + let sub_model_recorder_states = state.recorder_states.get_mut(idx).unwrap(); + entry.network.finalise(sub_model_recorder_states)?; + } + // End the global timer and print the run statistics + timings.finish(count); + timings.print_table(); + + Ok(()) + } +} + +/// Calculate inter-model parameters for the given scenario index. +/// +/// +fn compute_inter_network_transfers( + timestep: &Timestep, + scenario_index: &ScenarioIndex, + inter_network_transfers: &[MultiNetworkTransfer], + state: &mut NetworkState, + before_models: &[MultiNetworkEntry], + before_states: &[NetworkState], + after_models: &[MultiNetworkEntry], + after_states: &[NetworkState], +) -> Result<(), PywrError> { + // Iterate through all of the inter-model transfers + for (idx, parameter) in inter_network_transfers.iter().enumerate() { + // Determine which model and state we are getting the value from + let (other_model, other_model_state) = match parameter.from_model_idx { + OtherNetworkIndex::Before(i) => { + let rev_i = before_states.len() - i.get(); + (&before_models[rev_i], &before_states[rev_i]) + } + OtherNetworkIndex::After(i) => (&after_models[i.get() - 1], &after_states[i.get() - 1]), + }; + + let value = match timestep.is_first().then(|| parameter.initial_value).flatten() { + // Use the initial value if it is given and it is the first time-step. + Some(initial_value) => initial_value, + // Otherwise, get the value from the other model's state/metric + None => parameter + .from_metric + .get_value(&other_model.network, other_model_state.state(scenario_index))?, + }; + + state + .state_mut(scenario_index) + .set_inter_network_transfer_value(MultiNetworkTransferIndex(idx), value)?; + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::MultiNetworkModel; + use crate::models::ModelDomain; + use crate::network::Network; + use crate::scenario::ScenarioGroupCollection; + use crate::solvers::ClpSolver; + use crate::test_utils::{default_timestepper, simple_network}; + + /// Test basic [`MultiNetworkModel`] functionality by running two independent models. + #[test] + fn test_multi_model_step() { + // Create two simple models + let timestepper = default_timestepper(); + let mut scenario_collection = ScenarioGroupCollection::default(); + scenario_collection.add_group("test-scenario", 2); + + let mut multi_model = MultiNetworkModel::new(ModelDomain::from(timestepper, scenario_collection)); + + let test_scenario_group_idx = multi_model + .domain() + .scenarios + .group_index("test-scenario") + .expect("Scenario group not found."); + + let mut network1 = Network::default(); + simple_network(&mut network1, test_scenario_group_idx, 2); + + let mut network2 = Network::default(); + simple_network(&mut network2, test_scenario_group_idx, 2); + + let network1_idx = multi_model.add_network("network1", network1); + let network2_idx = multi_model.add_network("network2", network2); + + let mut state = multi_model + .setup::(&Default::default()) + .expect("Failed to setup multi1-model."); + + multi_model.step(&mut state).expect("Failed to step multi1-model.") + } +} diff --git a/pywr-core/src/models/simple.rs b/pywr-core/src/models/simple.rs new file mode 100644 index 00000000..5cb075e1 --- /dev/null +++ b/pywr-core/src/models/simple.rs @@ -0,0 +1,307 @@ +use crate::models::ModelDomain; +use crate::network::{Network, NetworkState, RunTimings}; +use crate::solvers::{MultiStateSolver, Solver, SolverSettings}; +use crate::PywrError; +use rayon::ThreadPool; +use std::any::Any; +use std::time::Instant; +use tracing::debug; + +pub struct ModelState { + current_time_step_idx: usize, + state: NetworkState, + recorder_state: Vec>>, + solvers: S, +} + +impl ModelState { + pub fn network_state(&self) -> &NetworkState { + &self.state + } + + pub fn network_state_mut(&mut self) -> &mut NetworkState { + &mut self.state + } +} + +/// A standard Pywr model containing a single network. +pub struct Model { + domain: ModelDomain, + network: Network, +} + +impl Model { + /// Construct a new model from a [`ModelDomain`] and [`Network`]. + pub fn new(domain: ModelDomain, network: Network) -> Self { + Self { domain, network } + } + + /// Get a reference to the [`ModelDomain`] + pub fn domain(&self) -> &ModelDomain { + &self.domain + } + + pub fn network(&self) -> &Network { + &self.network + } + + pub fn network_mut(&mut self) -> &mut Network { + &mut self.network + } + + /// Check whether a solver [`S`] has the required features to run this model. + pub fn check_solver_features(&self) -> bool + where + S: Solver, + { + self.network.check_solver_features::() + } + + /// Check whether a solver [`S`] has the required features to run this model. + pub fn check_multi_scenario_solver_features(&self) -> bool + where + S: MultiStateSolver, + { + self.network.check_multi_scenario_solver_features::() + } + + pub fn setup(&self, settings: &S::Settings) -> Result>>, PywrError> + where + S: Solver, + { + let timesteps = self.domain.time.timesteps(); + let scenario_indices = self.domain.scenarios.indices(); + + let state = self.network.setup_network(×teps, &scenario_indices, 0)?; + let recorder_state = self.network.setup_recorders(×teps, &scenario_indices)?; + let solvers = self.network.setup_solver::(&scenario_indices, settings)?; + + Ok(ModelState { + current_time_step_idx: 0, + state, + recorder_state, + solvers, + }) + } + + pub fn setup_multi_scenario(&self, settings: &S::Settings) -> Result>, PywrError> + where + S: MultiStateSolver, + { + let timesteps = self.domain.time.timesteps(); + let scenario_indices = self.domain.scenarios.indices(); + + let state = self.network.setup_network(×teps, &scenario_indices, 0)?; + let recorder_state = self.network.setup_recorders(×teps, &scenario_indices)?; + let solvers = self + .network + .setup_multi_scenario_solver::(&scenario_indices, settings)?; + + Ok(ModelState { + current_time_step_idx: 0, + state, + recorder_state, + solvers, + }) + } + + pub fn step( + &self, + state: &mut ModelState>>, + thread_pool: Option<&ThreadPool>, + timings: &mut RunTimings, + ) -> Result<(), PywrError> + where + S: Solver, + { + let timestep = self + .domain + .time + .timesteps() + .get(state.current_time_step_idx) + .ok_or(PywrError::EndOfTimesteps)?; + + let scenario_indices = self.domain.scenarios.indices(); + debug!("Starting timestep {:?}", timestep); + + let solvers = &mut state.solvers; + let network_state = &mut state.state; + + match thread_pool { + Some(pool) => { + // State is mutated in-place + pool.install(|| { + self.network + .step_par(timestep, &scenario_indices, solvers, network_state, timings) + })?; + } + None => { + self.network + .step(timestep, &scenario_indices, solvers, network_state, timings)?; + } + } + + let start_r_save = Instant::now(); + + self.network + .save_recorders(timestep, scenario_indices, &state.state, &mut state.recorder_state)?; + timings.recorder_saving += start_r_save.elapsed(); + + // Finally increment the time-step index + state.current_time_step_idx += 1; + + Ok(()) + } + + pub fn step_multi_scenario( + &self, + state: &mut ModelState>, + thread_pool: &ThreadPool, + timings: &mut RunTimings, + ) -> Result<(), PywrError> + where + S: MultiStateSolver, + { + let timestep = self + .domain + .time + .timesteps() + .get(state.current_time_step_idx) + .ok_or(PywrError::EndOfTimesteps)?; + + let scenario_indices = self.domain.scenarios.indices(); + debug!("Starting timestep {:?}", timestep); + + let solvers = &mut state.solvers; + let network_state = &mut state.state; + + // State is mutated in-place + thread_pool.install(|| { + self.network + .step_multi_scenario(timestep, &scenario_indices, solvers, network_state, timings) + })?; + + let start_r_save = Instant::now(); + + self.network + .save_recorders(timestep, scenario_indices, &state.state, &mut state.recorder_state)?; + timings.recorder_saving += start_r_save.elapsed(); + + // Finally increment the time-step index + state.current_time_step_idx += 1; + + Ok(()) + } + + /// Run a model through the given time-steps. + /// + /// This method will setup state and solvers, and then run the model through the time-steps. + pub fn run(&self, settings: &S::Settings) -> Result<(), PywrError> + where + S: Solver, + ::Settings: SolverSettings, + { + let mut state = self.setup::(settings)?; + + self.run_with_state::(&mut state, settings)?; + + Ok(()) + } + + /// Run the model with the provided states and solvers. + pub fn run_with_state( + &self, + state: &mut ModelState>>, + settings: &S::Settings, + ) -> Result<(), PywrError> + where + S: Solver, + ::Settings: SolverSettings, + { + let mut timings = RunTimings::default(); + let mut count = 0; + + // Setup thread pool if running in parallel + let pool = if settings.parallel() { + Some( + rayon::ThreadPoolBuilder::new() + .num_threads(settings.threads()) + .build() + .unwrap(), + ) + } else { + None + }; + + loop { + match self.step::(state, pool.as_ref(), &mut timings) { + Ok(_) => {} + Err(PywrError::EndOfTimesteps) => break, + Err(e) => return Err(e), + } + + count += self.domain.scenarios.indices().len(); + } + + self.network.finalise(&mut state.recorder_state)?; + // End the global timer and print the run statistics + timings.finish(count); + timings.print_table(); + + Ok(()) + } + + /// Run a network through the given time-steps with [`MultiStateSolver`]. + /// + /// This method will setup state and the solver, and then run the network through the time-steps. + pub fn run_multi_scenario(&self, settings: &S::Settings) -> Result<(), PywrError> + where + S: MultiStateSolver, + ::Settings: SolverSettings, + { + // Setup the network and create the initial state + let mut state = self.setup_multi_scenario(settings)?; + + self.run_multi_scenario_with_state::(&mut state, settings) + } + + /// Run the network with the provided states and [`MultiStateSolver`] solver. + pub fn run_multi_scenario_with_state( + &self, + state: &mut ModelState>, + settings: &S::Settings, + ) -> Result<(), PywrError> + where + S: MultiStateSolver, + ::Settings: SolverSettings, + { + let mut timings = RunTimings::default(); + let mut count = 0; + + let num_threads = if settings.parallel() { settings.threads() } else { 1 }; + + // Setup thread pool + let pool = rayon::ThreadPoolBuilder::new() + .num_threads(num_threads) + .build() + .unwrap(); + + loop { + match self.step_multi_scenario::(state, &pool, &mut timings) { + Ok(_) => {} + Err(PywrError::EndOfTimesteps) => break, + Err(e) => return Err(e), + } + + count += self.domain.scenarios.indices().len(); + } + + self.network.finalise(&mut state.recorder_state)?; + + // End the global timer and print the run statistics + timings.finish(count); + timings.print_table(); + + Ok(()) + } +} diff --git a/pywr-core/src/model.rs b/pywr-core/src/network.rs similarity index 79% rename from pywr-core/src/model.rs rename to pywr-core/src/network.rs index 1dcfc1d1..a9d0f57d 100644 --- a/pywr-core/src/model.rs +++ b/pywr-core/src/network.rs @@ -6,10 +6,10 @@ use crate::metric::Metric; use crate::node::{ConstraintValue, Node, NodeVec, StorageInitialVolume}; use crate::parameters::{MultiValueParameterIndex, ParameterType}; use crate::recorders::{MetricSet, MetricSetIndex}; -use crate::scenario::{ScenarioGroupCollection, ScenarioIndex}; -use crate::solvers::{MultiStateSolver, Solver, SolverFeatures, SolverSettings, SolverTimings}; +use crate::scenario::ScenarioIndex; +use crate::solvers::{MultiStateSolver, Solver, SolverFeatures, SolverTimings}; use crate::state::{ParameterStates, State}; -use crate::timestep::{Timestep, Timestepper}; +use crate::timestep::Timestep; use crate::virtual_storage::{VirtualStorage, VirtualStorageIndex, VirtualStorageReset, VirtualStorageVec}; use crate::{parameters, recorders, IndexParameterIndex, NodeIndex, ParameterIndex, PywrError, RecorderIndex}; use rayon::prelude::*; @@ -18,18 +18,18 @@ use std::collections::HashSet; use std::ops::Deref; use std::time::Duration; use std::time::Instant; -use tracing::{debug, info}; +use tracing::info; -enum RunDuration { +pub enum RunDuration { Running(Instant), Finished(Duration, usize), } pub struct RunTimings { - global: RunDuration, - parameter_calculation: Duration, - recorder_saving: Duration, - solve: SolverTimings, + pub global: RunDuration, + pub parameter_calculation: Duration, + pub recorder_saving: Duration, + pub solve: SolverTimings, } impl Default for RunTimings { @@ -47,7 +47,7 @@ impl RunTimings { /// End the global timer for this timing instance. /// /// If the timer has already finished this method has no effect. - fn finish(&mut self, count: usize) { + pub fn finish(&mut self, count: usize) { if let RunDuration::Running(i) = self.global { self.global = RunDuration::Finished(i.elapsed(), count); } @@ -67,7 +67,7 @@ impl RunTimings { } } - fn print_table(&self) { + pub fn print_table(&self) { info!("Run timing statistics:"); let total = self.total_duration().as_secs_f64(); info!("{: <24} | {: <10}", "Metric", "Value"); @@ -142,14 +142,45 @@ enum ComponentType { DerivedMetric(DerivedMetricIndex), } +/// Internal states for each scenario and recorder. +pub struct NetworkState { + // State by scenario + states: Vec, + // Parameter state by scenario + parameter_internal_states: Vec, +} + +impl NetworkState { + pub fn state(&self, scenario_index: &ScenarioIndex) -> &State { + &self.states[scenario_index.index] + } + + pub fn state_mut(&mut self, scenario_index: &ScenarioIndex) -> &mut State { + &mut self.states[scenario_index.index] + } + + pub fn parameter_states(&self, scenario_index: &ScenarioIndex) -> &ParameterStates { + &self.parameter_internal_states[scenario_index.index] + } + + pub fn parameter_states_mut(&mut self, scenario_index: &ScenarioIndex) -> &mut ParameterStates { + &mut self.parameter_internal_states[scenario_index.index] + } +} + +/// A Pywr network containing nodes, edges, parameters, metric sets, etc. +/// +/// This struct is the main entry point for constructing a Pywr network and should be used +/// to represent a discrete system. A network can be simulated using a model and a solver. The +/// network is translated into a linear program using the [`Solver`] trait. +/// #[derive(Default)] -pub struct Model { - scenarios: ScenarioGroupCollection, - pub nodes: NodeVec, - pub edges: EdgeVec, - pub aggregated_nodes: AggregatedNodeVec, - pub aggregated_storage_nodes: AggregatedStorageNodeVec, - pub virtual_storage_nodes: VirtualStorageVec, +pub struct Network { + nodes: NodeVec, + edges: EdgeVec, + aggregated_nodes: AggregatedNodeVec, + aggregated_storage_nodes: AggregatedStorageNodeVec, + virtual_storage_nodes: VirtualStorageVec, parameters: Vec>, index_parameters: Vec>, multi_parameters: Vec>, @@ -159,25 +190,37 @@ pub struct Model { recorders: Vec>, } -impl Model { - /// Setup the model and create the initial state for each scenario. - pub fn setup( +impl Network { + pub fn nodes(&self) -> &NodeVec { + &self.nodes + } + pub fn edges(&self) -> &EdgeVec { + &self.edges + } + + pub fn aggregated_nodes(&self) -> &AggregatedNodeVec { + &self.aggregated_nodes + } + + pub fn aggregated_storage_nodes(&self) -> &AggregatedStorageNodeVec { + &self.aggregated_storage_nodes + } + + pub fn virtual_storage_nodes(&self) -> &VirtualStorageVec { + &self.virtual_storage_nodes + } + + /// Setup the network and create the initial state for each scenario. + pub fn setup_network( &self, timesteps: &[Timestep], - ) -> Result< - ( - Vec, - Vec, - Vec, - Vec>>, - ), - PywrError, - > { - let scenario_indices = self.scenarios.scenario_indices(); + scenario_indices: &[ScenarioIndex], + num_inter_network_transfers: usize, + ) -> Result { let mut states: Vec = Vec::with_capacity(scenario_indices.len()); let mut parameter_internal_states: Vec = Vec::with_capacity(scenario_indices.len()); - for scenario_index in &scenario_indices { + for scenario_index in scenario_indices { // Initialise node states. Note that storage nodes will have a zero volume at this point. let initial_node_states = self.nodes.iter().map(|n| n.default_state()).collect(); @@ -210,8 +253,8 @@ impl Model { initial_indices_states.len(), initial_multi_param_states.len(), self.derived_metrics.len(), + num_inter_network_transfers, ); - states.push(state); parameter_internal_states.push(ParameterStates::new( @@ -221,6 +264,17 @@ impl Model { )); } + Ok(NetworkState { + states, + parameter_internal_states, + }) + } + + pub fn setup_recorders( + &self, + timesteps: &[Timestep], + scenario_indices: &[ScenarioIndex], + ) -> Result>>, PywrError> { // Setup recorders let mut recorder_internal_states = Vec::new(); for recorder in &self.recorders { @@ -228,15 +282,10 @@ impl Model { recorder_internal_states.push(initial_state); } - Ok(( - scenario_indices, - states, - parameter_internal_states, - recorder_internal_states, - )) + Ok(recorder_internal_states) } - /// Check whether a solver [`S`] has the required features to run this model. + /// Check whether a solver [`S`] has the required features to run this network. pub fn check_solver_features(&self) -> bool where S: Solver, @@ -246,7 +295,7 @@ impl Model { required_features.iter().all(|f| S::features().contains(f)) } - /// Check whether a solver [`S`] has the required features to run this model. + /// Check whether a solver [`S`] has the required features to run this network. pub fn check_multi_scenario_solver_features(&self) -> bool where S: MultiStateSolver, @@ -256,7 +305,11 @@ impl Model { required_features.iter().all(|f| S::features().contains(f)) } - pub fn setup_solver(&self, settings: &S::Settings) -> Result>, PywrError> + pub fn setup_solver( + &self, + scenario_indices: &[ScenarioIndex], + settings: &S::Settings, + ) -> Result>, PywrError> where S: Solver, { @@ -264,8 +317,6 @@ impl Model { return Err(PywrError::MissingSolverFeatures); } - let scenario_indices = self.scenarios.scenario_indices(); - let mut solvers = Vec::with_capacity(scenario_indices.len()); for _scenario_index in scenario_indices { @@ -277,7 +328,7 @@ impl Model { Ok(solvers) } - pub fn setup_multi_scenario( + pub fn setup_multi_scenario_solver( &self, scenario_indices: &[ScenarioIndex], settings: &S::Settings, @@ -291,7 +342,7 @@ impl Model { S::setup(self, scenario_indices.len(), settings) } - fn finalise(&self, recorder_internal_states: &mut [Option>]) -> Result<(), PywrError> { + pub fn finalise(&self, recorder_internal_states: &mut [Option>]) -> Result<(), PywrError> { // Setup recorders for (recorder, internal_state) in self.recorders.iter().zip(recorder_internal_states) { recorder.finalise(internal_state)?; @@ -300,204 +351,13 @@ impl Model { Ok(()) } - /// Run a model through the given time-steps. - /// - /// This method will setup state and solvers, and then run the model through the time-steps. - pub fn run(&self, timestepper: &Timestepper, settings: &S::Settings) -> Result<(), PywrError> - where - S: Solver, - ::Settings: SolverSettings, - { - let timesteps = timestepper.timesteps(); - - // Setup the model and create the initial state - let (scenario_indices, mut states, mut parameter_internal_states, mut recorder_internal_states) = - self.setup(×teps)?; - - // Setup the solver - let mut solvers = self.setup_solver::(settings)?; - - self.run_with_state( - timestepper, - settings, - &scenario_indices, - &mut states, - &mut parameter_internal_states, - &mut recorder_internal_states, - &mut solvers, - ) - } - - /// Run the model with the provided states and solvers. - pub fn run_with_state( - &self, - timestepper: &Timestepper, - settings: &S::Settings, - scenario_indices: &[ScenarioIndex], - states: &mut [State], - parameter_internal_states: &mut [ParameterStates], - recorder_internal_states: &mut [Option>], - solvers: &mut [Box], - ) -> Result<(), PywrError> - where - S: Solver, - ::Settings: SolverSettings, - { - let mut timings = RunTimings::default(); - let mut count = 0; - - let timesteps = timestepper.timesteps(); - - // Setup thread pool if running in parallel - let pool = if settings.parallel() { - Some( - rayon::ThreadPoolBuilder::new() - .num_threads(settings.threads()) - .build() - .unwrap(), - ) - } else { - None - }; - - // Step a timestep - for timestep in timesteps.iter() { - debug!("Starting timestep {:?}", timestep); - - if let Some(pool) = &pool { - // State is mutated in-place - pool.install(|| { - self.step_par( - timestep, - &scenario_indices, - solvers, - states, - parameter_internal_states, - &mut timings, - ) - })?; - } else { - // State is mutated in-place - self.step( - timestep, - &scenario_indices, - solvers, - states, - parameter_internal_states, - &mut timings, - )?; - } - - let start_r_save = Instant::now(); - self.save_recorders(timestep, &scenario_indices, &states, recorder_internal_states)?; - timings.recorder_saving += start_r_save.elapsed(); - - count += scenario_indices.len(); - } - - self.finalise(recorder_internal_states)?; - // End the global timer and print the run statistics - timings.finish(count); - timings.print_table(); - - Ok(()) - } - - /// Run a model through the given time-steps with [`MultiStateSolver`]. - /// - /// This method will setup state and the solver, and then run the model through the time-steps. - pub fn run_multi_scenario(&self, timestepper: &Timestepper, settings: &S::Settings) -> Result<(), PywrError> - where - S: MultiStateSolver, - ::Settings: SolverSettings, - { - let timesteps = timestepper.timesteps(); - - // Setup the model and create the initial state - let (scenario_indices, mut states, mut parameter_internal_states, mut recorder_internal_states) = - self.setup(×teps)?; - - // Setup the solver - let mut solver = self.setup_multi_scenario::(&scenario_indices, settings)?; - - self.run_multi_scenario_with_state( - ×tepper, - settings, - &scenario_indices, - &mut states, - &mut parameter_internal_states, - &mut recorder_internal_states, - &mut solver, - ) - } - - /// Run the model with the provided states and [`MultiStateSolver`] solver. - pub fn run_multi_scenario_with_state( - &self, - timestepper: &Timestepper, - settings: &S::Settings, - scenario_indices: &[ScenarioIndex], - states: &mut [State], - parameter_internal_states: &mut [ParameterStates], - recorder_internal_states: &mut [Option>], - solver: &mut Box, - ) -> Result<(), PywrError> - where - S: MultiStateSolver, - ::Settings: SolverSettings, - { - let mut timings = RunTimings::default(); - let mut count = 0; - - let timesteps = timestepper.timesteps(); - let num_threads = if settings.parallel() { settings.threads() } else { 1 }; - - // Setup thread pool - let pool = rayon::ThreadPoolBuilder::new() - .num_threads(num_threads) - .build() - .unwrap(); - - // Step a timestep - for timestep in timesteps.iter() { - debug!("Starting timestep {:?}", timestep); - - pool.install(|| { - // State is mutated in-place - self.step_multi_scenario( - timestep, - &scenario_indices, - solver, - states, - parameter_internal_states, - &mut timings, - ) - })?; - - let start_r_save = Instant::now(); - self.save_recorders(timestep, &scenario_indices, &states, recorder_internal_states)?; - timings.recorder_saving += start_r_save.elapsed(); - - count += scenario_indices.len(); - } - - self.finalise(recorder_internal_states)?; - - // End the global timer and print the run statistics - timings.finish(count); - timings.print_table(); - - Ok(()) - } - /// Perform a single timestep mutating the current state. pub fn step( &self, timestep: &Timestep, scenario_indices: &[ScenarioIndex], solvers: &mut [Box], - states: &mut [State], - parameter_internal_states: &mut [ParameterStates], + state: &mut NetworkState, timings: &mut RunTimings, ) -> Result<(), PywrError> where @@ -505,14 +365,14 @@ impl Model { { scenario_indices .iter() - .zip(states) - .zip(parameter_internal_states) + .zip(state.states.iter_mut()) + .zip(state.parameter_internal_states.iter_mut()) .zip(solvers) - .for_each(|(((scenario_index, current_state), p_internal_state), solver)| { + .for_each(|(((scenario_index, current_state), p_internal_states), solver)| { // TODO clear the current parameter values state (i.e. set them all to zero). let start_p_calc = Instant::now(); - self.compute_components(timestep, scenario_index, current_state, p_internal_state) + self.compute_components(timestep, scenario_index, current_state, p_internal_states) .unwrap(); // State now contains updated parameter values BUT original network state @@ -525,7 +385,7 @@ impl Model { // Now run the "after" method on all components let start_p_after = Instant::now(); - self.after(timestep, scenario_index, current_state, p_internal_state) + self.after(timestep, scenario_index, current_state, p_internal_states) .unwrap(); timings.parameter_calculation += start_p_after.elapsed(); @@ -544,8 +404,7 @@ impl Model { timestep: &Timestep, scenario_indices: &[ScenarioIndex], solvers: &mut [Box], - states: &mut [State], - parameter_internal_states: &mut [ParameterStates], + state: &mut NetworkState, timings: &mut RunTimings, ) -> Result<(), PywrError> where @@ -554,8 +413,8 @@ impl Model { // Collect all the timings from each parallel solve let step_times: Vec<_> = scenario_indices .par_iter() - .zip(states) - .zip(parameter_internal_states) + .zip(&mut state.states) + .zip(&mut state.parameter_internal_states) .zip(solvers) .map(|(((scenario_index, current_state), p_internal_state), solver)| { // TODO clear the current parameter values state (i.e. set them all to zero). @@ -591,14 +450,13 @@ impl Model { Ok(()) } - /// Perform a single timestep with a multi-state solver mutating the current state. + /// Perform a single timestep with a multi1-state solver mutating the current state. pub(crate) fn step_multi_scenario( &self, timestep: &Timestep, scenario_indices: &[ScenarioIndex], solver: &mut Box, - states: &mut [State], - parameter_internal_states: &mut [ParameterStates], + state: &mut NetworkState, timings: &mut RunTimings, ) -> Result<(), PywrError> where @@ -608,13 +466,13 @@ impl Model { let p_calc_timings: Vec<_> = scenario_indices .par_iter() - .zip(&mut *states) - .zip(&mut *parameter_internal_states) - .map(|((scenario_index, current_state), p_internal_state)| { + .zip(&mut state.states) + .zip(&mut state.parameter_internal_states) + .map(|((scenario_index, current_state), p_internal_states)| { // TODO clear the current parameter values state (i.e. set them all to zero). let start_p_calc = Instant::now(); - self.compute_components(timestep, scenario_index, current_state, p_internal_state) + self.compute_components(timestep, scenario_index, current_state, p_internal_states) .unwrap(); // State now contains updated parameter values BUT original network state @@ -627,18 +485,19 @@ impl Model { } // Now solve all the LPs simultaneously - let solve_timings = solver.solve(self, timestep, states).unwrap(); + + let solve_timings = solver.solve(self, timestep, &mut state.states).unwrap(); // State now contains updated parameter values AND updated network state timings.solve += solve_timings; // Now run the "after" method on all components let p_after_timings: Vec<_> = scenario_indices .par_iter() - .zip(&mut *states) - .zip(parameter_internal_states) - .map(|((scenario_index, current_state), p_internal_state)| { + .zip(&mut state.states) + .zip(&mut state.parameter_internal_states) + .map(|((scenario_index, current_state), p_internal_states)| { let start_p_after = Instant::now(); - self.after(timestep, scenario_index, current_state, p_internal_state) + self.after(timestep, scenario_index, current_state, p_internal_states) .unwrap(); start_p_after.elapsed() }) @@ -651,7 +510,7 @@ impl Model { Ok(()) } - /// Calculate the set of [`SolverFeatures`] required to correctly run this model. + /// Calculate the set of [`SolverFeatures`] required to correctly run this network. fn required_features(&self) -> HashSet { let mut features = HashSet::new(); @@ -673,9 +532,9 @@ impl Model { features } - /// Undertake calculations for model components before solve. + /// Undertake calculations for network components before solve. /// - /// This method iterates through the model components (nodes, parameters, etc) to perform + /// This method iterates through the network components (nodes, parameters, etc) to perform /// pre-solve calculations. For nodes this can be adjustments to storage volume (e.g. to /// set initial volume). For parameters this involves computing the current value for the /// the timestep. The `state` object is progressively updated with these values during this @@ -768,9 +627,9 @@ impl Model { Ok(()) } - /// Undertake "after" for model components after solve. + /// Undertake "after" for network components after solve. /// - /// This method iterates through the model components (nodes, parameters, etc) to perform + /// This method iterates through the network components (nodes, parameters, etc) to perform /// pre-solve calculations. For nodes this can be adjustments to storage volume (e.g. to /// set initial volume). For parameters this involves computing the current value for the /// the timestep. The `state` object is progressively updated with these values during this @@ -850,39 +709,19 @@ impl Model { Ok(()) } - fn save_recorders( + pub fn save_recorders( &self, timestep: &Timestep, scenario_indices: &[ScenarioIndex], - states: &[State], + state: &NetworkState, recorder_internal_states: &mut [Option>], ) -> Result<(), PywrError> { for (recorder, internal_state) in self.recorders.iter().zip(recorder_internal_states) { - recorder.save(timestep, scenario_indices, self, states, internal_state)?; + recorder.save(timestep, scenario_indices, self, &state.states, internal_state)?; } Ok(()) } - /// Add a `ScenarioGroup` to the model - pub fn add_scenario_group(&mut self, name: &str, size: usize) -> Result<(), PywrError> { - self.scenarios.add_group(name, size); - Ok(()) - } - - /// Get a `ScenarioGroup`'s index by name - pub fn get_scenario_group_index_by_name(&self, name: &str) -> Result { - self.scenarios.get_group_index_by_name(name) - } - - /// Get a `ScenarioGroup`'s size by name - pub fn get_scenario_group_size_by_name(&self, name: &str) -> Result { - self.scenarios.get_group_by_name(name).map(|g| g.size()) - } - - pub fn get_scenario_indices(&self) -> Vec { - self.scenarios.scenario_indices() - } - /// Get a Node from a node's name pub fn get_node_index_by_name(&self, name: &str, sub_name: Option<&str>) -> Result { Ok(self.get_node_by_name(name, sub_name)?.index()) @@ -1241,7 +1080,7 @@ impl Model { } } - /// Add a new Node::Input to the model. + /// Add a new Node::Input to the network. pub fn add_input_node(&mut self, name: &str, sub_name: Option<&str>) -> Result { // Check for name. // TODO move this check to `NodeVec` @@ -1256,7 +1095,7 @@ impl Model { Ok(node_index) } - /// Add a new Node::Link to the model. + /// Add a new Node::Link to the network. pub fn add_link_node(&mut self, name: &str, sub_name: Option<&str>) -> Result { // Check for name. // TODO move this check to `NodeVec` @@ -1271,7 +1110,7 @@ impl Model { Ok(node_index) } - /// Add a new Node::Link to the model. + /// Add a new Node::Link to the network. pub fn add_output_node(&mut self, name: &str, sub_name: Option<&str>) -> Result { // Check for name. // TODO move this check to `NodeVec` @@ -1286,7 +1125,7 @@ impl Model { Ok(node_index) } - /// Add a new Node::Link to the model. + /// Add a new Node::Link to the network. pub fn add_storage_node( &mut self, name: &str, @@ -1310,7 +1149,7 @@ impl Model { Ok(node_index) } - /// Add a new `aggregated_node::AggregatedNode` to the model. + /// Add a new `aggregated_node::AggregatedNode` to the network. pub fn add_aggregated_node( &mut self, name: &str, @@ -1326,7 +1165,7 @@ impl Model { Ok(node_index) } - /// Add a new `aggregated_storage_node::AggregatedStorageNode` to the model. + /// Add a new `aggregated_storage_node::AggregatedStorageNode` to the network. pub fn add_aggregated_storage_node( &mut self, name: &str, @@ -1341,7 +1180,7 @@ impl Model { Ok(node_index) } - /// Add a new `VirtualStorage` to the model. + /// Add a new `VirtualStorage` to the network. pub fn add_virtual_storage_node( &mut self, name: &str, @@ -1383,7 +1222,7 @@ impl Model { Ok(vs_node_index) } - /// Add a `parameters::Parameter` to the model + /// Add a `parameters::Parameter` to the network pub fn add_parameter(&mut self, parameter: Box) -> Result { if let Ok(idx) = self.get_parameter_index_by_name(¶meter.meta().name) { return Err(PywrError::ParameterNameAlreadyExists( @@ -1402,7 +1241,7 @@ impl Model { Ok(parameter_index) } - /// Add a `parameters::IndexParameter` to the model + /// Add a `parameters::IndexParameter` to the network pub fn add_index_parameter( &mut self, index_parameter: Box, @@ -1423,7 +1262,7 @@ impl Model { Ok(parameter_index) } - /// Add a `parameters::MultiValueParameter` to the model + /// Add a `parameters::MultiValueParameter` to the network pub fn add_multi_value_parameter( &mut self, parameter: Box, @@ -1445,7 +1284,7 @@ impl Model { Ok(parameter_index) } - /// Add a [`MetricSet`] to the model. + /// Add a [`MetricSet`] to the network. pub fn add_metric_set(&mut self, metric_set: MetricSet) -> Result { if let Ok(_) = self.get_metric_set_by_name(&metric_set.name()) { return Err(PywrError::MetricSetNameAlreadyExists(metric_set.name().to_string())); @@ -1477,7 +1316,7 @@ impl Model { } } - /// Add a `recorders::Recorder` to the model + /// Add a `recorders::Recorder` to the network pub fn add_recorder(&mut self, recorder: Box) -> Result { // TODO reinstate this check // if let Ok(idx) = self.get_recorder_by_name(&recorder.meta().name) { @@ -1506,7 +1345,7 @@ impl Model { // Next edge index let edge_index = self.edges.push(from_node_index, to_node_index); - // The model can get in a bad state here if the edge is added to the `from_node` + // The network can get in a bad state here if the edge is added to the `from_node` // successfully, but fails on the `to_node`. // Suggest to do a check before attempting to add. let from_node = self.nodes.get_mut(&from_node_index)?; @@ -1562,40 +1401,41 @@ impl Model { mod tests { use super::*; use crate::metric::Metric; - use crate::model::Model; + use crate::network::Network; use crate::node::{Constraint, ConstraintValue}; use crate::parameters::{ActivationFunction, ControlCurveInterpolatedParameter, Parameter, VariableParameter}; use crate::recorders::AssertionRecorder; - use crate::scenario::{ScenarioGroupCollection, ScenarioIndex}; + use crate::scenario::{ScenarioDomain, ScenarioGroupCollection, ScenarioIndex}; #[cfg(feature = "clipm")] use crate::solvers::{ClIpmF64Solver, SimdIpmF64Solver}; use crate::solvers::{ClpSolver, ClpSolverSettings}; - use crate::test_utils::{default_timestepper, run_all_solvers, simple_model, simple_storage_model}; - use float_cmp::{approx_eq, assert_approx_eq}; + use crate::test_utils::{run_all_solvers, simple_model, simple_storage_model}; + use float_cmp::assert_approx_eq; use ndarray::{Array, Array2}; + use std::default::Default; use std::ops::Deref; #[test] - fn test_simple_model() { - let mut model = Model::default(); + fn test_simple_network() { + let mut network = Network::default(); - let input_node = model.add_input_node("input", None).unwrap(); - let link_node = model.add_link_node("link", None).unwrap(); - let output_node = model.add_output_node("output", None).unwrap(); + let input_node = network.add_input_node("input", None).unwrap(); + let link_node = network.add_link_node("link", None).unwrap(); + let output_node = network.add_output_node("output", None).unwrap(); assert_eq!(*input_node.deref(), 0); assert_eq!(*link_node.deref(), 1); assert_eq!(*output_node.deref(), 2); - let edge = model.connect_nodes(input_node, link_node).unwrap(); + let edge = network.connect_nodes(input_node, link_node).unwrap(); assert_eq!(*edge.deref(), 0); - let edge = model.connect_nodes(link_node, output_node).unwrap(); + let edge = network.connect_nodes(link_node, output_node).unwrap(); assert_eq!(*edge.deref(), 1); // Now assert the internal structure is as expected. - let input_node = model.get_node_by_name("input", None).unwrap(); - let link_node = model.get_node_by_name("link", None).unwrap(); - let output_node = model.get_node_by_name("output", None).unwrap(); + let input_node = network.get_node_by_name("input", None).unwrap(); + let link_node = network.get_node_by_name("link", None).unwrap(); + let output_node = network.get_node_by_name("output", None).unwrap(); assert_eq!(input_node.get_outgoing_edges().unwrap().len(), 1); assert_eq!(link_node.get_incoming_edges().unwrap().len(), 1); assert_eq!(link_node.get_outgoing_edges().unwrap().len(), 1); @@ -1605,34 +1445,34 @@ mod tests { #[test] /// Test the duplicate node names are not permitted. fn test_duplicate_node_name() { - let mut model = Model::default(); + let mut network = Network::default(); - model.add_input_node("my-node", None).unwrap(); + network.add_input_node("my-node", None).unwrap(); // Second add with the same name assert_eq!( - model.add_input_node("my-node", None), + network.add_input_node("my-node", None), Err(PywrError::NodeNameAlreadyExists("my-node".to_string())) ); - model.add_input_node("my-node", Some("a")).unwrap(); + network.add_input_node("my-node", Some("a")).unwrap(); // Second add with the same name assert_eq!( - model.add_input_node("my-node", Some("a")), + network.add_input_node("my-node", Some("a")), Err(PywrError::NodeNameAlreadyExists("my-node".to_string())) ); assert_eq!( - model.add_link_node("my-node", None), + network.add_link_node("my-node", None), Err(PywrError::NodeNameAlreadyExists("my-node".to_string())) ); assert_eq!( - model.add_output_node("my-node", None), + network.add_output_node("my-node", None), Err(PywrError::NodeNameAlreadyExists("my-node".to_string())) ); assert_eq!( - model.add_storage_node( + network.add_storage_node( "my-node", None, StorageInitialVolume::Absolute(10.0), @@ -1644,16 +1484,16 @@ mod tests { } #[test] - /// Test adding a constant parameter to a model. + /// Test adding a constant parameter to a network. fn test_constant_parameter() { - let mut model = Model::default(); - let _node_index = model.add_input_node("input", None).unwrap(); + let mut network = Network::default(); + let _node_index = network.add_input_node("input", None).unwrap(); let input_max_flow = parameters::ConstantParameter::new("my-constant", 10.0, None); - let parameter = model.add_parameter(Box::new(input_max_flow)).unwrap(); + let parameter = network.add_parameter(Box::new(input_max_flow)).unwrap(); // assign the new parameter to one of the nodes. - let node = model.get_mut_node_by_name("input", None).unwrap(); + let node = network.get_mut_node_by_name("input", None).unwrap(); node.set_constraint( ConstraintValue::Metric(Metric::ParameterValue(parameter)), Constraint::MaxFlow, @@ -1672,34 +1512,17 @@ mod tests { const NUM_SCENARIOS: usize = 2; let model = simple_model(NUM_SCENARIOS); - let timestepper = default_timestepper(); - let mut timings = RunTimings::default(); - let timesteps = timestepper.timesteps(); - let mut ts_iter = timesteps.iter(); - let (scenario_indices, mut current_state, mut p_internal, _r_internal) = model.setup(×teps).unwrap(); + let mut state = model.setup::(&ClpSolverSettings::default()).unwrap(); - let mut solvers = model.setup_solver::(&ClpSolverSettings::default()).unwrap(); - assert_eq!(current_state.len(), scenario_indices.len()); - - let output_node = model.get_node_by_name("output", None).unwrap(); + let output_node = model.network().get_node_by_name("output", None).unwrap(); for i in 0..2 { - let ts = ts_iter.next().unwrap(); - model - .step( - ts, - &scenario_indices, - &mut solvers, - &mut current_state, - &mut p_internal, - &mut timings, - ) - .unwrap(); + model.step(&mut state, None, &mut timings).unwrap(); for j in 0..NUM_SCENARIOS { - let state_j = current_state.get(j).unwrap(); + let state_j = state.network_state().states.get(j).unwrap(); let output_inflow = state_j .get_network_state() .get_node_in_flow(&output_node.index()) @@ -1713,53 +1536,53 @@ mod tests { /// Test running a simple model fn test_run() { let mut model = simple_model(10); - let timestepper = default_timestepper(); // Set-up assertion for "input" node - let idx = model.get_node_by_name("input", None).unwrap().index(); + let idx = model.network().get_node_by_name("input", None).unwrap().index(); let expected = Array::from_shape_fn((366, 10), |(i, j)| (1.0 + i as f64 + j as f64).min(12.0)); let recorder = AssertionRecorder::new("input-flow", Metric::NodeOutFlow(idx), expected.clone(), None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + model.network_mut().add_recorder(Box::new(recorder)).unwrap(); - let idx = model.get_node_by_name("link", None).unwrap().index(); + let idx = model.network().get_node_by_name("link", None).unwrap().index(); let recorder = AssertionRecorder::new("link-flow", Metric::NodeOutFlow(idx), expected.clone(), None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + model.network_mut().add_recorder(Box::new(recorder)).unwrap(); - let idx = model.get_node_by_name("output", None).unwrap().index(); + let idx = model.network().get_node_by_name("output", None).unwrap().index(); let recorder = AssertionRecorder::new("output-flow", Metric::NodeInFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + model.network_mut().add_recorder(Box::new(recorder)).unwrap(); - let idx = model.get_parameter_index_by_name("total-demand").unwrap(); + let idx = model.network().get_parameter_index_by_name("total-demand").unwrap(); let expected = Array2::from_elem((366, 10), 12.0); let recorder = AssertionRecorder::new("total-demand", Metric::ParameterValue(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + model.network_mut().add_recorder(Box::new(recorder)).unwrap(); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } #[test] fn test_run_storage() { let mut model = simple_storage_model(); - let timestepper = default_timestepper(); - let idx = model.get_node_by_name("output", None).unwrap().index(); + let network = model.network_mut(); + + let idx = network.get_node_by_name("output", None).unwrap().index(); let expected = Array2::from_shape_fn((15, 10), |(i, _j)| if i < 10 { 10.0 } else { 0.0 }); let recorder = AssertionRecorder::new("output-flow", Metric::NodeInFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); - let idx = model.get_node_by_name("reservoir", None).unwrap().index(); + let idx = network.get_node_by_name("reservoir", None).unwrap().index(); let expected = Array2::from_shape_fn((15, 10), |(i, _j)| (90.0 - 10.0 * i as f64).max(0.0)); let recorder = AssertionRecorder::new("reservoir-volume", Metric::NodeVolume(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } /// Test proportional storage derived metric. @@ -1769,10 +1592,9 @@ mod tests { #[test] fn test_storage_proportional_volume() { let mut model = simple_storage_model(); - let timestepper = default_timestepper(); - - let idx = model.get_node_by_name("reservoir", None).unwrap().index(); - let dm_idx = model.add_derived_metric(DerivedMetric::NodeProportionalVolume(idx)); + let network = model.network_mut(); + let idx = network.get_node_by_name("reservoir", None).unwrap().index(); + let dm_idx = network.add_derived_metric(DerivedMetric::NodeProportionalVolume(idx)); // These are the expected values for the proportional volume at the end of the time-step let expected = Array2::from_shape_fn((15, 10), |(i, _j)| (90.0 - 10.0 * i as f64).max(0.0) / 100.0); @@ -1783,7 +1605,7 @@ mod tests { None, None, ); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); // Set-up a control curve that uses the proportional volume // This should be use the initial proportion (100%) on the first time-step, and then the previous day's end value @@ -1793,14 +1615,14 @@ mod tests { vec![], vec![Metric::Constant(100.0), Metric::Constant(0.0)], ); - let p_idx = model.add_parameter(Box::new(cc)).unwrap(); + let p_idx = network.add_parameter(Box::new(cc)).unwrap(); let expected = Array2::from_shape_fn((15, 10), |(i, _j)| (100.0 - 10.0 * i as f64).max(0.0)); let recorder = AssertionRecorder::new("reservoir-cc", Metric::ParameterValue(p_idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } #[test] @@ -1811,8 +1633,8 @@ mod tests { collection.add_group("Scenarion B", 2); collection.add_group("Scenarion C", 5); - let scenario_indices = collection.scenario_indices(); - let mut iter = scenario_indices.iter(); + let domain: ScenarioDomain = collection.into(); + let mut iter = domain.indices().iter(); // Test generation of scenario indices assert_eq!( @@ -1906,8 +1728,8 @@ mod tests { #[test] /// Test the variable API fn test_variable_api() { - let mut model = Model::default(); - let _node_index = model.add_input_node("input", None).unwrap(); + let mut network = Network::default(); + let _node_index = network.add_input_node("input", None).unwrap(); let variable = ActivationFunction::Unit { min: 0.0, max: 10.0 }; let input_max_flow = parameters::ConstantParameter::new("my-constant", 10.0, Some(variable)); @@ -1916,25 +1738,25 @@ mod tests { assert!(input_max_flow.is_f64_variable_active()); assert!(input_max_flow.is_active()); - let input_max_flow_idx = model.add_parameter(Box::new(input_max_flow)).unwrap(); + let input_max_flow_idx = network.add_parameter(Box::new(input_max_flow)).unwrap(); // assign the new parameter to one of the nodes. - let node = model.get_mut_node_by_name("input", None).unwrap(); + let node = network.get_mut_node_by_name("input", None).unwrap(); node.set_constraint( ConstraintValue::Metric(Metric::ParameterValue(input_max_flow_idx)), Constraint::MaxFlow, ) .unwrap(); - let variable_values = model.get_f64_parameter_variable_values(); + let variable_values = network.get_f64_parameter_variable_values(); assert_eq!(variable_values, vec![10.0]); // Update the variable values - model + network .set_f64_parameter_variable_values(input_max_flow_idx, &[5.0]) .unwrap(); - let variable_values = model.get_f64_parameter_variable_values(); + let variable_values = network.get_f64_parameter_variable_values(); assert_eq!(variable_values, vec![5.0]); } } diff --git a/pywr-core/src/node.rs b/pywr-core/src/node.rs index 3cce421f..86ef41c4 100644 --- a/pywr-core/src/node.rs +++ b/pywr-core/src/node.rs @@ -1,6 +1,6 @@ use crate::edge::EdgeIndex; use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::state::{NodeState, State}; use crate::timestep::Timestep; use crate::virtual_storage::VirtualStorageIndex; @@ -352,13 +352,13 @@ impl Node { // } // } - pub fn before(&self, timestep: &Timestep, model: &Model, state: &mut State) -> Result<(), PywrError> { + pub fn before(&self, timestep: &Timestep, network: &Network, state: &mut State) -> Result<(), PywrError> { // Currently only storage nodes do something during before match self { Node::Input(_) => Ok(()), Node::Output(_) => Ok(()), Node::Link(_) => Ok(()), - Node::Storage(n) => n.before(timestep, model, state), + Node::Storage(n) => n.before(timestep, network, state), } } @@ -395,11 +395,11 @@ impl Node { } } - pub fn get_current_min_flow(&self, model: &Model, state: &State) -> Result { + pub fn get_current_min_flow(&self, network: &Network, state: &State) -> Result { match self { - Self::Input(n) => n.get_min_flow(model, state), - Self::Link(n) => n.get_min_flow(model, state), - Self::Output(n) => n.get_min_flow(model, state), + Self::Input(n) => n.get_min_flow(network, state), + Self::Link(n) => n.get_min_flow(network, state), + Self::Output(n) => n.get_min_flow(network, state), Self::Storage(_) => Err(PywrError::FlowConstraintsUndefined), } } @@ -422,11 +422,11 @@ impl Node { } } - pub fn get_current_max_flow(&self, model: &Model, state: &State) -> Result { + pub fn get_current_max_flow(&self, network: &Network, state: &State) -> Result { match self { - Self::Input(n) => n.get_max_flow(model, state), - Self::Link(n) => n.get_max_flow(model, state), - Self::Output(n) => n.get_max_flow(model, state), + Self::Input(n) => n.get_max_flow(network, state), + Self::Link(n) => n.get_max_flow(network, state), + Self::Output(n) => n.get_max_flow(network, state), Self::Storage(_) => Err(PywrError::FlowConstraintsUndefined), } } @@ -440,10 +440,10 @@ impl Node { } } - pub fn get_current_flow_bounds(&self, model: &Model, state: &State) -> Result<(f64, f64), PywrError> { + pub fn get_current_flow_bounds(&self, network: &Network, state: &State) -> Result<(f64, f64), PywrError> { match ( - self.get_current_min_flow(model, state), - self.get_current_max_flow(model, state), + self.get_current_min_flow(network, state), + self.get_current_max_flow(network, state), ) { (Ok(min_flow), Ok(max_flow)) => Ok((min_flow, max_flow)), _ => Err(PywrError::FlowConstraintsUndefined), @@ -462,12 +462,12 @@ impl Node { } } - pub fn get_current_min_volume(&self, model: &Model, state: &State) -> Result { + pub fn get_current_min_volume(&self, network: &Network, state: &State) -> Result { match self { Self::Input(_) => Err(PywrError::StorageConstraintsUndefined), Self::Link(_) => Err(PywrError::StorageConstraintsUndefined), Self::Output(_) => Err(PywrError::StorageConstraintsUndefined), - Self::Storage(n) => n.get_min_volume(model, state), + Self::Storage(n) => n.get_min_volume(network, state), } } @@ -483,29 +483,33 @@ impl Node { } } - pub fn get_current_max_volume(&self, model: &Model, state: &State) -> Result { + pub fn get_current_max_volume(&self, network: &Network, state: &State) -> Result { match self { Self::Input(_) => Err(PywrError::StorageConstraintsUndefined), Self::Link(_) => Err(PywrError::StorageConstraintsUndefined), Self::Output(_) => Err(PywrError::StorageConstraintsUndefined), - Self::Storage(n) => n.get_max_volume(model, state), + Self::Storage(n) => n.get_max_volume(network, state), } } - pub fn get_current_volume_bounds(&self, model: &Model, state: &State) -> Result<(f64, f64), PywrError> { + pub fn get_current_volume_bounds(&self, network: &Network, state: &State) -> Result<(f64, f64), PywrError> { match ( - self.get_current_min_volume(model, state), - self.get_current_max_volume(model, state), + self.get_current_min_volume(network, state), + self.get_current_max_volume(network, state), ) { (Ok(min_vol), Ok(max_vol)) => Ok((min_vol, max_vol)), _ => Err(PywrError::FlowConstraintsUndefined), } } - pub fn get_current_available_volume_bounds(&self, model: &Model, state: &State) -> Result<(f64, f64), PywrError> { + pub fn get_current_available_volume_bounds( + &self, + network: &Network, + state: &State, + ) -> Result<(f64, f64), PywrError> { match ( - self.get_current_min_volume(model, state), - self.get_current_max_volume(model, state), + self.get_current_min_volume(network, state), + self.get_current_max_volume(network, state), ) { (Ok(min_vol), Ok(max_vol)) => { let current_volume = state.get_network_state().get_node_volume(&self.index())?; @@ -539,21 +543,21 @@ impl Node { Ok(()) } - pub fn get_outgoing_cost(&self, model: &Model, state: &State) -> Result { + pub fn get_outgoing_cost(&self, network: &Network, state: &State) -> Result { match self { - Self::Input(n) => n.get_cost(model, state), - Self::Link(n) => Ok(n.get_cost(model, state)? / 2.0), - Self::Output(n) => n.get_cost(model, state), - Self::Storage(n) => Ok(-n.get_cost(model, state)?), + Self::Input(n) => n.get_cost(network, state), + Self::Link(n) => Ok(n.get_cost(network, state)? / 2.0), + Self::Output(n) => n.get_cost(network, state), + Self::Storage(n) => Ok(-n.get_cost(network, state)?), } } - pub fn get_incoming_cost(&self, model: &Model, state: &State) -> Result { + pub fn get_incoming_cost(&self, network: &Network, state: &State) -> Result { match self { - Self::Input(n) => n.get_cost(model, state), - Self::Link(n) => Ok(n.get_cost(model, state)? / 2.0), - Self::Output(n) => n.get_cost(model, state), - Self::Storage(n) => n.get_cost(model, state), + Self::Input(n) => n.get_cost(network, state), + Self::Link(n) => Ok(n.get_cost(network, state)? / 2.0), + Self::Output(n) => n.get_cost(network, state), + Self::Storage(n) => n.get_cost(network, state), } } } @@ -610,21 +614,21 @@ impl FlowConstraints { /// Return the current minimum flow from the parameter state /// /// Defaults to zero if no parameter is defined. - pub(crate) fn get_min_flow(&self, model: &Model, state: &State) -> Result { + pub(crate) fn get_min_flow(&self, network: &Network, state: &State) -> Result { match &self.min_flow { ConstraintValue::None => Ok(0.0), ConstraintValue::Scalar(v) => Ok(*v), - ConstraintValue::Metric(m) => m.get_value(model, state), + ConstraintValue::Metric(m) => m.get_value(network, state), } } /// Return the current maximum flow from the parameter state /// /// Defaults to f64::MAX if no parameter is defined. - pub(crate) fn get_max_flow(&self, model: &Model, state: &State) -> Result { + pub(crate) fn get_max_flow(&self, network: &Network, state: &State) -> Result { match &self.max_flow { ConstraintValue::None => Ok(f64::MAX), // TODO should this return infinity? ConstraintValue::Scalar(v) => Ok(*v), - ConstraintValue::Metric(m) => m.get_value(model, state), + ConstraintValue::Metric(m) => m.get_value(network, state), } } @@ -646,21 +650,21 @@ impl StorageConstraints { /// Return the current minimum volume from the parameter state /// /// Defaults to zero if no parameter is defined. - pub fn get_min_volume(&self, model: &Model, state: &State) -> Result { + pub fn get_min_volume(&self, network: &Network, state: &State) -> Result { match &self.min_volume { ConstraintValue::None => Ok(f64::MAX), ConstraintValue::Scalar(v) => Ok(*v), - ConstraintValue::Metric(m) => m.get_value(model, state), + ConstraintValue::Metric(m) => m.get_value(network, state), } } /// Return the current maximum volume from the metric state /// /// Defaults to f64::MAX if no parameter is defined. - pub fn get_max_volume(&self, model: &Model, state: &State) -> Result { + pub fn get_max_volume(&self, network: &Network, state: &State) -> Result { match &self.max_volume { ConstraintValue::None => Ok(f64::MAX), ConstraintValue::Scalar(v) => Ok(*v), - ConstraintValue::Metric(m) => m.get_value(model, state), + ConstraintValue::Metric(m) => m.get_value(network, state), } } } @@ -684,19 +688,19 @@ impl Default for NodeCost { } impl NodeCost { - fn get_cost(&self, model: &Model, state: &State) -> Result { + fn get_cost(&self, network: &Network, state: &State) -> Result { let local_cost = match &self.local { ConstraintValue::None => Ok(0.0), ConstraintValue::Scalar(v) => Ok(*v), - ConstraintValue::Metric(m) => m.get_value(model, state), + ConstraintValue::Metric(m) => m.get_value(network, state), }?; let vs_costs: Vec = self .virtual_storage_nodes .iter() .map(|idx| { - let vs = model.get_virtual_storage_node(idx)?; - vs.get_cost(model, state) + let vs = network.get_virtual_storage_node(idx)?; + vs.get_cost(network, state) }) .collect::>()?; @@ -743,20 +747,20 @@ impl InputNode { fn set_cost_agg_func(&mut self, agg_func: CostAggFunc) { self.cost.agg_func = agg_func } - fn get_cost(&self, model: &Model, state: &State) -> Result { - self.cost.get_cost(model, state) + fn get_cost(&self, network: &Network, state: &State) -> Result { + self.cost.get_cost(network, state) } fn set_min_flow(&mut self, value: ConstraintValue) { self.flow_constraints.min_flow = value; } - fn get_min_flow(&self, model: &Model, state: &State) -> Result { - self.flow_constraints.get_min_flow(model, state) + fn get_min_flow(&self, network: &Network, state: &State) -> Result { + self.flow_constraints.get_min_flow(network, state) } fn set_max_flow(&mut self, value: ConstraintValue) { self.flow_constraints.max_flow = value; } - fn get_max_flow(&self, model: &Model, state: &State) -> Result { - self.flow_constraints.get_max_flow(model, state) + fn get_max_flow(&self, network: &Network, state: &State) -> Result { + self.flow_constraints.get_max_flow(network, state) } fn is_max_flow_unconstrained(&self) -> bool { self.flow_constraints.is_max_flow_unconstrained() @@ -786,8 +790,8 @@ impl OutputNode { fn set_cost(&mut self, value: ConstraintValue) { self.cost.local = value } - fn get_cost(&self, model: &Model, state: &State) -> Result { - self.cost.get_cost(model, state) + fn get_cost(&self, network: &Network, state: &State) -> Result { + self.cost.get_cost(network, state) } fn set_cost_agg_func(&mut self, agg_func: CostAggFunc) { self.cost.agg_func = agg_func @@ -795,14 +799,14 @@ impl OutputNode { fn set_min_flow(&mut self, value: ConstraintValue) { self.flow_constraints.min_flow = value; } - fn get_min_flow(&self, model: &Model, state: &State) -> Result { - self.flow_constraints.get_min_flow(model, state) + fn get_min_flow(&self, network: &Network, state: &State) -> Result { + self.flow_constraints.get_min_flow(network, state) } fn set_max_flow(&mut self, value: ConstraintValue) { self.flow_constraints.max_flow = value; } - fn get_max_flow(&self, model: &Model, state: &State) -> Result { - self.flow_constraints.get_max_flow(model, state) + fn get_max_flow(&self, network: &Network, state: &State) -> Result { + self.flow_constraints.get_max_flow(network, state) } fn is_max_flow_unconstrained(&self) -> bool { self.flow_constraints.is_max_flow_unconstrained() @@ -837,20 +841,20 @@ impl LinkNode { fn set_cost_agg_func(&mut self, agg_func: CostAggFunc) { self.cost.agg_func = agg_func } - fn get_cost(&self, model: &Model, state: &State) -> Result { - self.cost.get_cost(model, state) + fn get_cost(&self, network: &Network, state: &State) -> Result { + self.cost.get_cost(network, state) } fn set_min_flow(&mut self, value: ConstraintValue) { self.flow_constraints.min_flow = value; } - fn get_min_flow(&self, model: &Model, state: &State) -> Result { - self.flow_constraints.get_min_flow(model, state) + fn get_min_flow(&self, network: &Network, state: &State) -> Result { + self.flow_constraints.get_min_flow(network, state) } fn set_max_flow(&mut self, value: ConstraintValue) { self.flow_constraints.max_flow = value; } - fn get_max_flow(&self, model: &Model, state: &State) -> Result { - self.flow_constraints.get_max_flow(model, state) + fn get_max_flow(&self, network: &Network, state: &State) -> Result { + self.flow_constraints.get_max_flow(network, state) } fn is_max_flow_unconstrained(&self) -> bool { self.flow_constraints.is_max_flow_unconstrained() @@ -898,13 +902,13 @@ impl StorageNode { } } - pub fn before(&self, timestep: &Timestep, model: &Model, state: &mut State) -> Result<(), PywrError> { + pub fn before(&self, timestep: &Timestep, network: &Network, state: &mut State) -> Result<(), PywrError> { // Set the initial volume if it is the first timestep. if timestep.is_first() { let volume = match &self.initial_volume { StorageInitialVolume::Absolute(iv) => *iv, StorageInitialVolume::Proportional(ipc) => { - let max_volume = self.get_max_volume(model, state)?; + let max_volume = self.get_max_volume(network, state)?; max_volume * ipc } }; @@ -917,26 +921,26 @@ impl StorageNode { fn set_cost(&mut self, value: ConstraintValue) { self.cost = value } - fn get_cost(&self, model: &Model, state: &State) -> Result { + fn get_cost(&self, network: &Network, state: &State) -> Result { match &self.cost { ConstraintValue::None => Ok(0.0), ConstraintValue::Scalar(v) => Ok(*v), - ConstraintValue::Metric(m) => m.get_value(model, state), + ConstraintValue::Metric(m) => m.get_value(network, state), } } fn set_min_volume(&mut self, value: ConstraintValue) { // TODO use a set_min_volume method self.storage_constraints.min_volume = value; } - fn get_min_volume(&self, model: &Model, state: &State) -> Result { - self.storage_constraints.get_min_volume(model, state) + fn get_min_volume(&self, network: &Network, state: &State) -> Result { + self.storage_constraints.get_min_volume(network, state) } fn set_max_volume(&mut self, value: ConstraintValue) { // TODO use a set_min_volume method self.storage_constraints.max_volume = value; } - fn get_max_volume(&self, model: &Model, state: &State) -> Result { - self.storage_constraints.get_max_volume(model, state) + fn get_max_volume(&self, network: &Network, state: &State) -> Result { + self.storage_constraints.get_max_volume(network, state) } fn add_incoming_edge(&mut self, edge: EdgeIndex) { self.incoming_edges.push(edge); diff --git a/pywr-core/src/parameters/aggregated.rs b/pywr-core/src/parameters/aggregated.rs index 2ef26cc5..6b00c695 100644 --- a/pywr-core/src/parameters/aggregated.rs +++ b/pywr-core/src/parameters/aggregated.rs @@ -1,6 +1,6 @@ use super::PywrError; use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -58,7 +58,7 @@ impl Parameter for AggregatedParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/aggregated_index.rs b/pywr-core/src/parameters/aggregated_index.rs index d884141d..0ff136db 100644 --- a/pywr-core/src/parameters/aggregated_index.rs +++ b/pywr-core/src/parameters/aggregated_index.rs @@ -1,7 +1,7 @@ /// AggregatedIndexParameter /// use super::PywrError; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{IndexParameter, IndexValue, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -58,7 +58,7 @@ impl IndexParameter for AggregatedIndexParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - _model: &Model, + _model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/array.rs b/pywr-core/src/parameters/array.rs index e0d143ae..bea514b4 100644 --- a/pywr-core/src/parameters/array.rs +++ b/pywr-core/src/parameters/array.rs @@ -1,4 +1,4 @@ -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -34,7 +34,7 @@ impl Parameter for Array1Parameter { &self, timestep: &Timestep, _scenario_index: &ScenarioIndex, - _model: &Model, + _model: &Network, _state: &State, _internal_state: &mut Option>, ) -> Result { @@ -78,7 +78,7 @@ impl Parameter for Array2Parameter { &self, timestep: &Timestep, scenario_index: &ScenarioIndex, - _model: &Model, + _model: &Network, _state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/asymmetric.rs b/pywr-core/src/parameters/asymmetric.rs index 65658de0..c66b6fd6 100644 --- a/pywr-core/src/parameters/asymmetric.rs +++ b/pywr-core/src/parameters/asymmetric.rs @@ -1,4 +1,4 @@ -use crate::model::Model; +use crate::network::Network; use crate::parameters::{downcast_internal_state, IndexParameter, IndexValue, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -38,7 +38,7 @@ impl IndexParameter for AsymmetricSwitchIndexParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - _model: &Model, + _model: &Network, state: &State, internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/constant.rs b/pywr-core/src/parameters/constant.rs index 5111992d..dd28cfdb 100644 --- a/pywr-core/src/parameters/constant.rs +++ b/pywr-core/src/parameters/constant.rs @@ -1,4 +1,4 @@ -use crate::model::Model; +use crate::network::Network; use crate::parameters::{downcast_internal_state, ActivationFunction, Parameter, ParameterMeta, VariableParameter}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -43,7 +43,7 @@ impl Parameter for ConstantParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - _model: &Model, + _model: &Network, _state: &State, internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/control_curves/apportion.rs b/pywr-core/src/parameters/control_curves/apportion.rs index a257274d..5fbf6d1a 100644 --- a/pywr-core/src/parameters/control_curves/apportion.rs +++ b/pywr-core/src/parameters/control_curves/apportion.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{MultiValueParameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::{MultiValue, State}; @@ -40,7 +40,7 @@ impl MultiValueParameter for ApportionParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/control_curves/index.rs b/pywr-core/src/parameters/control_curves/index.rs index 04790b06..7a318ed7 100644 --- a/pywr-core/src/parameters/control_curves/index.rs +++ b/pywr-core/src/parameters/control_curves/index.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{IndexParameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -31,7 +31,7 @@ impl IndexParameter for ControlCurveIndexParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/control_curves/interpolated.rs b/pywr-core/src/parameters/control_curves/interpolated.rs index 42381d45..06a29396 100644 --- a/pywr-core/src/parameters/control_curves/interpolated.rs +++ b/pywr-core/src/parameters/control_curves/interpolated.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::interpolate::interpolate; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; @@ -37,7 +37,7 @@ impl Parameter for ControlCurveInterpolatedParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/control_curves/piecewise.rs b/pywr-core/src/parameters/control_curves/piecewise.rs index 1e83fc53..44841891 100644 --- a/pywr-core/src/parameters/control_curves/piecewise.rs +++ b/pywr-core/src/parameters/control_curves/piecewise.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::interpolate::interpolate; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; @@ -48,7 +48,7 @@ impl Parameter for PiecewiseInterpolatedParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { @@ -84,7 +84,7 @@ mod test { // Create an artificial volume series to use for the interpolation test let volume = Array1Parameter::new("test-x", Array1::linspace(1.0, 0.0, 21), None); - let volume_idx = model.add_parameter(Box::new(volume)).unwrap(); + let volume_idx = model.network_mut().add_parameter(Box::new(volume)).unwrap(); let parameter = PiecewiseInterpolatedParameter::new( "test-parameter", diff --git a/pywr-core/src/parameters/control_curves/simple.rs b/pywr-core/src/parameters/control_curves/simple.rs index 6f724c59..2fe44958 100644 --- a/pywr-core/src/parameters/control_curves/simple.rs +++ b/pywr-core/src/parameters/control_curves/simple.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -36,7 +36,7 @@ impl Parameter for ControlCurveParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/control_curves/volume_between.rs b/pywr-core/src/parameters/control_curves/volume_between.rs index 79907eec..79be39d1 100644 --- a/pywr-core/src/parameters/control_curves/volume_between.rs +++ b/pywr-core/src/parameters/control_curves/volume_between.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -38,20 +38,20 @@ impl Parameter for VolumeBetweenControlCurvesParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + network: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { - let total = self.total.get_value(model, state)?; + let total = self.total.get_value(network, state)?; let lower = self .lower .as_ref() - .map_or(Ok(0.0), |metric| metric.get_value(model, state))?; + .map_or(Ok(0.0), |metric| metric.get_value(network, state))?; let upper = self .upper .as_ref() - .map_or(Ok(1.0), |metric| metric.get_value(model, state))?; + .map_or(Ok(1.0), |metric| metric.get_value(network, state))?; Ok(total * (upper - lower)) } diff --git a/pywr-core/src/parameters/delay.rs b/pywr-core/src/parameters/delay.rs index d247e63a..06ec02d5 100644 --- a/pywr-core/src/parameters/delay.rs +++ b/pywr-core/src/parameters/delay.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{downcast_internal_state, Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -48,7 +48,7 @@ impl Parameter for DelayParameter { &self, timestep: &Timestep, scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, internal_state: &mut Option>, ) -> Result { @@ -68,7 +68,7 @@ impl Parameter for DelayParameter { &self, timestep: &Timestep, scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, internal_state: &mut Option>, ) -> Result<(), PywrError> { @@ -99,7 +99,7 @@ mod test { let volumes = Array1::linspace(1.0, 0.0, 21); let volume = Array1Parameter::new("test-x", volumes.clone(), None); - let volume_idx = model.add_parameter(Box::new(volume)).unwrap(); + let volume_idx = model.network_mut().add_parameter(Box::new(volume)).unwrap(); const DELAY: usize = 3; // 3 time-step delay let parameter = DelayParameter::new( diff --git a/pywr-core/src/parameters/discount_factor.rs b/pywr-core/src/parameters/discount_factor.rs index b06c024e..0965927a 100644 --- a/pywr-core/src/parameters/discount_factor.rs +++ b/pywr-core/src/parameters/discount_factor.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -34,12 +34,12 @@ impl Parameter for DiscountFactorParameter { &self, timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + network: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { let year = timestep.date.year() - self.base_year; - let rate = self.discount_rate.get_value(model, state)?; + let rate = self.discount_rate.get_value(network, state)?; let factor = 1.0 / (1.0 + rate).powi(year); Ok(factor) @@ -57,12 +57,13 @@ mod test { #[test] fn test_basic() { let mut model = simple_model(1); + let mut network = model.network_mut(); // Create an artificial volume series to use for the delay test let volumes = Array1::linspace(1.0, 0.0, 21); let volume = Array1Parameter::new("test-x", volumes.clone(), None); - let volume_idx = model.add_parameter(Box::new(volume)).unwrap(); + let volume_idx = network.add_parameter(Box::new(volume)).unwrap(); const DELAY: usize = 3; // 3 time-step delay let parameter = DiscountFactorParameter::new( diff --git a/pywr-core/src/parameters/division.rs b/pywr-core/src/parameters/division.rs index f3652fba..14f80f27 100644 --- a/pywr-core/src/parameters/division.rs +++ b/pywr-core/src/parameters/division.rs @@ -1,6 +1,6 @@ use super::PywrError; use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -35,7 +35,7 @@ impl Parameter for DivisionParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/indexed_array.rs b/pywr-core/src/parameters/indexed_array.rs index a40165b1..7fa70a6c 100644 --- a/pywr-core/src/parameters/indexed_array.rs +++ b/pywr-core/src/parameters/indexed_array.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{IndexValue, Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -34,7 +34,7 @@ impl Parameter for IndexedArrayParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/interpolated.rs b/pywr-core/src/parameters/interpolated.rs index 909da024..3cbe7d54 100644 --- a/pywr-core/src/parameters/interpolated.rs +++ b/pywr-core/src/parameters/interpolated.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::interpolate::linear_interpolation; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; @@ -38,19 +38,19 @@ impl Parameter for InterpolatedParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + network: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { // Current value - let x = self.x.get_value(model, state)?; + let x = self.x.get_value(network, state)?; let points = self .points .iter() .map(|(x, f)| { - let xp = x.get_value(model, state)?; - let fp = f.get_value(model, state)?; + let xp = x.get_value(network, state)?; + let fp = f.get_value(network, state)?; Ok::<(f64, f64), PywrError>((xp, fp)) }) diff --git a/pywr-core/src/parameters/max.rs b/pywr-core/src/parameters/max.rs index dcd02d20..48dfbcac 100644 --- a/pywr-core/src/parameters/max.rs +++ b/pywr-core/src/parameters/max.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use std::any::Any; @@ -35,7 +35,7 @@ impl Parameter for MaxParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/min.rs b/pywr-core/src/parameters/min.rs index 504f9e60..797324ed 100644 --- a/pywr-core/src/parameters/min.rs +++ b/pywr-core/src/parameters/min.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use std::any::Any; @@ -35,7 +35,7 @@ impl Parameter for MinParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/mod.rs b/pywr-core/src/parameters/mod.rs index 1c724963..507ab23b 100644 --- a/pywr-core/src/parameters/mod.rs +++ b/pywr-core/src/parameters/mod.rs @@ -27,7 +27,7 @@ use std::any::Any; // Re-imports pub use self::rhai::RhaiParameter; use super::PywrError; -use crate::model::Model; +use crate::network::Network; use crate::scenario::ScenarioIndex; use crate::state::{MultiValue, State}; use crate::timestep::Timestep; @@ -181,7 +181,7 @@ pub trait Parameter: Send + Sync { &self, timestep: &Timestep, scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, internal_state: &mut Option>, ) -> Result; @@ -190,7 +190,7 @@ pub trait Parameter: Send + Sync { &self, #[allow(unused_variables)] timestep: &Timestep, #[allow(unused_variables)] scenario_index: &ScenarioIndex, - #[allow(unused_variables)] model: &Model, + #[allow(unused_variables)] model: &Network, #[allow(unused_variables)] state: &State, #[allow(unused_variables)] internal_state: &mut Option>, ) -> Result<(), PywrError> { @@ -262,7 +262,7 @@ pub trait IndexParameter: Send + Sync { &self, timestep: &Timestep, scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, internal_state: &mut Option>, ) -> Result; @@ -271,7 +271,7 @@ pub trait IndexParameter: Send + Sync { &self, #[allow(unused_variables)] timestep: &Timestep, #[allow(unused_variables)] scenario_index: &ScenarioIndex, - #[allow(unused_variables)] model: &Model, + #[allow(unused_variables)] model: &Network, #[allow(unused_variables)] state: &State, #[allow(unused_variables)] internal_state: &mut Option>, ) -> Result<(), PywrError> { @@ -296,7 +296,7 @@ pub trait MultiValueParameter: Send + Sync { &self, timestep: &Timestep, scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, internal_state: &mut Option>, ) -> Result; @@ -305,7 +305,7 @@ pub trait MultiValueParameter: Send + Sync { &self, #[allow(unused_variables)] timestep: &Timestep, #[allow(unused_variables)] scenario_index: &ScenarioIndex, - #[allow(unused_variables)] model: &Model, + #[allow(unused_variables)] model: &Network, #[allow(unused_variables)] state: &State, #[allow(unused_variables)] internal_state: &mut Option>, ) -> Result<(), PywrError> { diff --git a/pywr-core/src/parameters/negative.rs b/pywr-core/src/parameters/negative.rs index 71e3e0c1..fbc17c7c 100644 --- a/pywr-core/src/parameters/negative.rs +++ b/pywr-core/src/parameters/negative.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -32,7 +32,7 @@ impl Parameter for NegativeParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/offset.rs b/pywr-core/src/parameters/offset.rs index c0d13d86..d12cf77d 100644 --- a/pywr-core/src/parameters/offset.rs +++ b/pywr-core/src/parameters/offset.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{ActivationFunction, Parameter, ParameterMeta, VariableParameter}; use crate::scenario::ScenarioIndex; use std::any::Any; @@ -37,7 +37,7 @@ impl Parameter for OffsetParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/polynomial.rs b/pywr-core/src/parameters/polynomial.rs index 2f61c3ce..5499b2d0 100644 --- a/pywr-core/src/parameters/polynomial.rs +++ b/pywr-core/src/parameters/polynomial.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -38,7 +38,7 @@ impl Parameter for Polynomial1DParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/profiles/daily.rs b/pywr-core/src/parameters/profiles/daily.rs index a3c835a1..1a806c8f 100644 --- a/pywr-core/src/parameters/profiles/daily.rs +++ b/pywr-core/src/parameters/profiles/daily.rs @@ -1,4 +1,4 @@ -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -31,7 +31,7 @@ impl Parameter for DailyProfileParameter { &self, timestep: &Timestep, _scenario_index: &ScenarioIndex, - _model: &Model, + _model: &Network, _state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/profiles/monthly.rs b/pywr-core/src/parameters/profiles/monthly.rs index fe4fd4c6..143bc497 100644 --- a/pywr-core/src/parameters/profiles/monthly.rs +++ b/pywr-core/src/parameters/profiles/monthly.rs @@ -1,4 +1,4 @@ -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterIndex, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -69,7 +69,7 @@ impl Parameter for MonthlyProfileParameter { &self, timestep: &Timestep, _scenario_index: &ScenarioIndex, - _model: &Model, + _model: &Network, _state: &State, _internal_state: &mut Option>, ) -> Result { @@ -102,7 +102,7 @@ pub struct MonthlyProfileVariable { #[allow(dead_code)] impl MonthlyProfileVariable { - fn update(&self, model: &mut Model, new_values: &[f64]) { + fn update(&self, model: &mut Network, new_values: &[f64]) { let p = model.get_mut_parameter(&self.index).unwrap(); let profile = p.as_any_mut().downcast_mut::().unwrap(); diff --git a/pywr-core/src/parameters/profiles/rbf.rs b/pywr-core/src/parameters/profiles/rbf.rs index fa334cf2..9cf25f34 100644 --- a/pywr-core/src/parameters/profiles/rbf.rs +++ b/pywr-core/src/parameters/profiles/rbf.rs @@ -1,4 +1,4 @@ -use crate::model::Model; +use crate::network::Network; use crate::parameters::{downcast_internal_state, Parameter, ParameterMeta, VariableParameter}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -70,7 +70,7 @@ impl Parameter for RbfProfileParameter { &self, timestep: &Timestep, _scenario_index: &ScenarioIndex, - _model: &Model, + _network: &Network, _state: &State, internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/profiles/uniform_drawdown.rs b/pywr-core/src/parameters/profiles/uniform_drawdown.rs index 8ed0b6cf..b8eeb125 100644 --- a/pywr-core/src/parameters/profiles/uniform_drawdown.rs +++ b/pywr-core/src/parameters/profiles/uniform_drawdown.rs @@ -1,4 +1,4 @@ -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -43,7 +43,7 @@ impl Parameter for UniformDrawdownProfileParameter { &self, timestep: &Timestep, _scenario_index: &ScenarioIndex, - _model: &Model, + _model: &Network, _state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/py.rs b/pywr-core/src/parameters/py.rs index 3b1ddba3..5e6bb9a0 100644 --- a/pywr-core/src/parameters/py.rs +++ b/pywr-core/src/parameters/py.rs @@ -1,6 +1,6 @@ use super::{IndexValue, Parameter, ParameterMeta, PywrError, Timestep}; use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{downcast_internal_state, MultiValueParameter}; use crate::scenario::ScenarioIndex; use crate::state::{MultiValue, State}; @@ -47,7 +47,7 @@ impl PyParameter { } } - fn get_metrics_dict<'py>(&self, model: &Model, state: &State, py: Python<'py>) -> Result<&'py PyDict, PywrError> { + fn get_metrics_dict<'py>(&self, model: &Network, state: &State, py: Python<'py>) -> Result<&'py PyDict, PywrError> { let metric_values: Vec<(&str, f64)> = self .metrics .iter() @@ -108,7 +108,7 @@ impl Parameter for PyParameter { &self, timestep: &Timestep, scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, internal_state: &mut Option>, ) -> Result { @@ -140,7 +140,7 @@ impl Parameter for PyParameter { &self, timestep: &Timestep, scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, internal_state: &mut Option>, ) -> Result<(), PywrError> { @@ -210,7 +210,7 @@ impl MultiValueParameter for PyParameter { &self, timestep: &Timestep, scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, internal_state: &mut Option>, ) -> Result { @@ -272,7 +272,7 @@ impl MultiValueParameter for PyParameter { &self, timestep: &Timestep, scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, internal_state: &mut Option>, ) -> Result<(), PywrError> { @@ -309,6 +309,7 @@ impl MultiValueParameter for PyParameter { mod tests { use super::*; use crate::test_utils::default_timestepper; + use crate::timestep::TimeDomain; use float_cmp::assert_approx_eq; #[test] @@ -342,7 +343,8 @@ class MyParameter: let param = PyParameter::new("my-parameter", class, args, kwargs, &HashMap::new(), &HashMap::new()); let timestepper = default_timestepper(); - let timesteps = timestepper.timesteps(); + let time: TimeDomain = timestepper.into(); + let timesteps = time.timesteps(); let scenario_indices = [ ScenarioIndex { @@ -355,16 +357,16 @@ class MyParameter: }, ]; - let state = State::new(vec![], 0, vec![], 1, 0, 0, 0); + let state = State::new(vec![], 0, vec![], 1, 0, 0, 0, 0); let mut internal_p_states: Vec<_> = scenario_indices .iter() .map(|si| Parameter::setup(¶m, ×teps, si).expect("Could not setup the PyParameter")) .collect(); - let model = Model::default(); + let model = Network::default(); - for ts in ×teps { + for ts in timesteps { for (si, internal) in scenario_indices.iter().zip(internal_p_states.iter_mut()) { let value = Parameter::compute(¶m, ts, si, &model, &state, internal).unwrap(); @@ -410,7 +412,8 @@ class MyParameter: let param = PyParameter::new("my-parameter", class, args, kwargs, &HashMap::new(), &HashMap::new()); let timestepper = default_timestepper(); - let timesteps = timestepper.timesteps(); + let time: TimeDomain = timestepper.into(); + let timesteps = time.timesteps(); let scenario_indices = [ ScenarioIndex { @@ -423,16 +426,16 @@ class MyParameter: }, ]; - let state = State::new(vec![], 0, vec![], 1, 0, 0, 0); + let state = State::new(vec![], 0, vec![], 1, 0, 0, 0, 0); let mut internal_p_states: Vec<_> = scenario_indices .iter() .map(|si| MultiValueParameter::setup(¶m, ×teps, si).expect("Could not setup the PyParameter")) .collect(); - let model = Model::default(); + let model = Network::default(); - for ts in ×teps { + for ts in timesteps { for (si, internal) in scenario_indices.iter().zip(internal_p_states.iter_mut()) { let value = MultiValueParameter::compute(¶m, ts, si, &model, &state, internal).unwrap(); diff --git a/pywr-core/src/parameters/rhai.rs b/pywr-core/src/parameters/rhai.rs index e88e6adc..9341b498 100644 --- a/pywr-core/src/parameters/rhai.rs +++ b/pywr-core/src/parameters/rhai.rs @@ -1,6 +1,6 @@ use super::{IndexValue, Parameter, ParameterMeta, PywrError, Timestep}; use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::downcast_internal_state; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -84,7 +84,7 @@ impl Parameter for RhaiParameter { &self, timestep: &Timestep, scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, internal_state: &mut Option>, ) -> Result { @@ -119,6 +119,7 @@ impl Parameter for RhaiParameter { mod tests { use super::*; use crate::test_utils::default_timestepper; + use crate::timestep::TimeDomain; use float_cmp::assert_approx_eq; #[test] @@ -148,7 +149,8 @@ mod tests { ); let timestepper = default_timestepper(); - let timesteps = timestepper.timesteps(); + let time: TimeDomain = timestepper.into(); + let timesteps = time.timesteps(); let scenario_indices = [ ScenarioIndex { @@ -161,16 +163,16 @@ mod tests { }, ]; - let state = State::new(vec![], 0, vec![], 1, 0, 0, 0); + let state = State::new(vec![], 0, vec![], 1, 0, 0, 0, 0); let mut internal_p_states: Vec<_> = scenario_indices .iter() .map(|si| param.setup(×teps, si).expect("Could not setup the PyParameter")) .collect(); - let model = Model::default(); + let model = Network::default(); - for ts in ×teps { + for ts in timesteps { for (si, internal) in scenario_indices.iter().zip(internal_p_states.iter_mut()) { let value = param.compute(ts, si, &model, &state, internal).unwrap(); diff --git a/pywr-core/src/parameters/simple_wasm.rs b/pywr-core/src/parameters/simple_wasm.rs index a5f2e9f6..4cac60e7 100644 --- a/pywr-core/src/parameters/simple_wasm.rs +++ b/pywr-core/src/parameters/simple_wasm.rs @@ -1,5 +1,5 @@ use super::{Parameter, ParameterMeta, PywrError, Timestep}; -use crate::model::Model; +use crate::network::Network; use crate::scenario::ScenarioIndex; use crate::state::State; use crate::ParameterIndex; @@ -78,7 +78,7 @@ impl Parameter for SimpleWasmParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - _model: &Model, + _model: &Network, state: &State, internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/threshold.rs b/pywr-core/src/parameters/threshold.rs index 3ec20db6..28a3b170 100644 --- a/pywr-core/src/parameters/threshold.rs +++ b/pywr-core/src/parameters/threshold.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::parameters::{IndexParameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -70,7 +70,7 @@ impl IndexParameter for ThresholdParameter { &self, _timestep: &Timestep, _scenario_index: &ScenarioIndex, - model: &Model, + model: &Network, state: &State, internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/parameters/vector.rs b/pywr-core/src/parameters/vector.rs index 44f2a319..dd7fc80c 100644 --- a/pywr-core/src/parameters/vector.rs +++ b/pywr-core/src/parameters/vector.rs @@ -1,4 +1,4 @@ -use crate::model::Model; +use crate::network::Network; use crate::parameters::{Parameter, ParameterMeta}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -31,7 +31,7 @@ impl Parameter for VectorParameter { &self, timestep: &Timestep, _scenario_index: &ScenarioIndex, - _model: &Model, + _model: &Network, _state: &State, _internal_state: &mut Option>, ) -> Result { diff --git a/pywr-core/src/recorders/csv.rs b/pywr-core/src/recorders/csv.rs index ea3f6223..cefb80a4 100644 --- a/pywr-core/src/recorders/csv.rs +++ b/pywr-core/src/recorders/csv.rs @@ -1,6 +1,6 @@ use super::{PywrError, Recorder, RecorderMeta, Timestep}; use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::recorders::metric_set::MetricSetIndex; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -38,7 +38,7 @@ impl Recorder for CSVRecorder { &self, _timesteps: &[Timestep], scenario_indices: &[ScenarioIndex], - model: &Model, + model: &Network, ) -> Result>, PywrError> { let mut writer = csv::Writer::from_path(&self.filename).map_err(|e| PywrError::CSVError(e.to_string()))?; @@ -123,6 +123,9 @@ impl Recorder for CSVRecorder { sub_name.clone().unwrap_or("".to_string()), "inflow".to_string(), ), + Metric::InterNetworkTransfer(_) => { + continue; // TODO + } }; // Add entries for each scenario @@ -154,7 +157,7 @@ impl Recorder for CSVRecorder { &self, timestep: &Timestep, scenario_indices: &[ScenarioIndex], - model: &Model, + model: &Network, state: &[State], internal_state: &mut Option>, ) -> Result<(), PywrError> { diff --git a/pywr-core/src/recorders/hdf.rs b/pywr-core/src/recorders/hdf.rs index 9bd0f148..3e503cdf 100644 --- a/pywr-core/src/recorders/hdf.rs +++ b/pywr-core/src/recorders/hdf.rs @@ -1,6 +1,6 @@ use super::{PywrError, Recorder, RecorderMeta, Timestep}; use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::recorders::MetricSetIndex; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -61,7 +61,7 @@ impl Recorder for HDF5Recorder { &self, timesteps: &[Timestep], scenario_indices: &[ScenarioIndex], - model: &Model, + model: &Network, ) -> Result>, PywrError> { let file = match hdf5::File::create(&self.filename) { Ok(f) => f, @@ -130,6 +130,9 @@ impl Recorder for HDF5Recorder { Metric::MultiNodeInFlow { name, sub_name, .. } => { require_node_dataset(root_grp, shape, name, sub_name.as_deref(), "inflow")? } + Metric::InterNetworkTransfer(_) => { + continue; // TODO + } }; datasets.push(ds); @@ -143,7 +146,7 @@ impl Recorder for HDF5Recorder { &self, timestep: &Timestep, scenario_indices: &[ScenarioIndex], - model: &Model, + model: &Network, state: &[State], internal_state: &mut Option>, ) -> Result<(), PywrError> { diff --git a/pywr-core/src/recorders/metric_set.rs b/pywr-core/src/recorders/metric_set.rs index dcac9805..0921b22e 100644 --- a/pywr-core/src/recorders/metric_set.rs +++ b/pywr-core/src/recorders/metric_set.rs @@ -1,5 +1,5 @@ use crate::metric::Metric; -use crate::model::Model; +use crate::network::Network; use crate::recorders::aggregator::{PeriodValue, PeriodicAggregator, PeriodicAggregatorState}; use crate::scenario::ScenarioIndex; use crate::state::State; @@ -69,7 +69,7 @@ impl MetricSet { &self, timestep: &Timestep, scenario_indices: &[ScenarioIndex], - model: &Model, + model: &Network, state: &[State], internal_state: &mut MetricSetState, ) -> Result<(), PywrError> { diff --git a/pywr-core/src/recorders/mod.rs b/pywr-core/src/recorders/mod.rs index 105c71c1..4fa3bc2a 100644 --- a/pywr-core/src/recorders/mod.rs +++ b/pywr-core/src/recorders/mod.rs @@ -6,7 +6,7 @@ mod py; pub use self::csv::CSVRecorder; use crate::metric::{IndexMetric, Metric}; -use crate::model::Model; +use crate::network::Network; use crate::scenario::ScenarioIndex; use crate::state::State; use crate::timestep::Timestep; @@ -69,7 +69,7 @@ pub trait Recorder: Send + Sync { &self, _timesteps: &[Timestep], _scenario_indices: &[ScenarioIndex], - _model: &Model, + _model: &Network, ) -> Result>, PywrError> { Ok(None) } @@ -79,7 +79,7 @@ pub trait Recorder: Send + Sync { &self, _timestep: &Timestep, _scenario_indices: &[ScenarioIndex], - _model: &Model, + _model: &Network, _state: &[State], _internal_state: &mut Option>, ) -> Result<(), PywrError> { @@ -113,7 +113,7 @@ impl Recorder for Array2Recorder { &self, timesteps: &[Timestep], scenario_indices: &[ScenarioIndex], - _model: &Model, + _model: &Network, ) -> Result>, PywrError> { let array: Array2 = Array::zeros((timesteps.len(), scenario_indices.len())); @@ -124,7 +124,7 @@ impl Recorder for Array2Recorder { &self, timestep: &Timestep, scenario_indices: &[ScenarioIndex], - model: &Model, + model: &Network, state: &[State], internal_state: &mut Option>, ) -> Result<(), PywrError> { @@ -182,7 +182,7 @@ impl Recorder for AssertionRecorder { &self, timestep: &Timestep, scenario_indices: &[ScenarioIndex], - model: &Model, + model: &Network, state: &[State], _internal_state: &mut Option>, ) -> Result<(), PywrError> { @@ -254,7 +254,7 @@ where &self, timestep: &Timestep, scenario_indices: &[ScenarioIndex], - model: &Model, + model: &Network, state: &[State], _internal_state: &mut Option>, ) -> Result<(), PywrError> { @@ -308,7 +308,7 @@ impl Recorder for IndexAssertionRecorder { &self, timestep: &Timestep, scenario_indices: &[ScenarioIndex], - model: &Model, + network: &Network, state: &[State], _internal_state: &mut Option>, ) -> Result<(), PywrError> { @@ -320,7 +320,7 @@ impl Recorder for IndexAssertionRecorder { None => panic!("Simulation produced results out of range."), }; - let actual_value = self.metric.get_value(model, &state[scenario_index.index])?; + let actual_value = self.metric.get_value(network, &state[scenario_index.index])?; if actual_value != expected_value { panic!( @@ -366,21 +366,19 @@ struct RecorderMetric { #[cfg(test)] mod tests { use super::*; - use crate::solvers::{ClpSolver, ClpSolverSettings}; - use crate::test_utils::{default_timestepper, run_all_solvers, simple_model}; + use crate::test_utils::{run_all_solvers, simple_model}; #[test] fn test_array2_recorder() { let mut model = simple_model(2); - let timestepper = default_timestepper(); - let node_idx = model.get_node_index_by_name("input", None).unwrap(); + let node_idx = model.network().get_node_index_by_name("input", None).unwrap(); let rec = Array2Recorder::new("test", Metric::NodeOutFlow(node_idx)); - let _idx = model.add_recorder(Box::new(rec)).unwrap(); + let _idx = model.network_mut().add_recorder(Box::new(rec)).unwrap(); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); // TODO fix this with respect to the trait. // let array = rec.data_view2().unwrap(); diff --git a/pywr-core/src/scenario.rs b/pywr-core/src/scenario.rs index 53d93d9f..efad4eb7 100644 --- a/pywr-core/src/scenario.rs +++ b/pywr-core/src/scenario.rs @@ -27,7 +27,12 @@ pub struct ScenarioGroupCollection { } impl ScenarioGroupCollection { - /// Find a `ScenarioGroup`'s index in the collection by name + /// Number of [`ScenarioGroup`]s in the collection. + pub fn len(&self) -> usize { + self.groups.len() + } + + /// Find a [`ScenarioGroup`]s index in the collection by name pub fn get_group_index_by_name(&self, name: &str) -> Result { self.groups .iter() @@ -35,7 +40,7 @@ impl ScenarioGroupCollection { .ok_or_else(|| PywrError::ScenarioNotFound(name.to_string())) } - /// Find a `ScenarioGroup`'s index in the collection by name + /// Find a [`ScenarioGroup`]s index in the collection by name pub fn get_group_by_name(&self, name: &str) -> Result<&ScenarioGroup, PywrError> { self.groups .iter() @@ -43,14 +48,14 @@ impl ScenarioGroupCollection { .ok_or_else(|| PywrError::ScenarioNotFound(name.to_string())) } - /// Add a `ScenarioGroup` to the collection + /// Add a [`ScenarioGroup`] to the collection pub fn add_group(&mut self, name: &str, size: usize) { // TODO error with duplicate names self.groups.push(ScenarioGroup::new(name, size)); } /// Return a vector of `ScenarioIndex`s for all combinations of the groups. - pub fn scenario_indices(&self) -> Vec { + fn scenario_indices(&self) -> Vec { let num: usize = self.groups.iter().map(|grp| grp.size).product(); let mut scenario_indices: Vec = Vec::with_capacity(num); @@ -80,3 +85,38 @@ impl ScenarioIndex { Self { index, indices } } } + +pub struct ScenarioDomain { + scenario_indices: Vec, + scenario_group_names: Vec, +} + +impl ScenarioDomain { + pub fn indices(&self) -> &[ScenarioIndex] { + &self.scenario_indices + } + + /// Return the index of a scenario group by name + pub fn group_index(&self, name: &str) -> Option { + self.scenario_group_names.iter().position(|n| n == name) + } +} + +impl From for ScenarioDomain { + fn from(value: ScenarioGroupCollection) -> Self { + // Handle creating at-least one scenario if the collection is empty. + if value.len() > 0 { + let scenario_group_names = value.groups.iter().map(|g| g.name.clone()).collect(); + + Self { + scenario_indices: value.scenario_indices(), + scenario_group_names, + } + } else { + Self { + scenario_indices: vec![ScenarioIndex::new(0, vec![0])], + scenario_group_names: vec!["default".to_string()], + } + } + } +} diff --git a/pywr-core/src/solvers/builder.rs b/pywr-core/src/solvers/builder.rs index e2568c0e..ee575a88 100644 --- a/pywr-core/src/solvers/builder.rs +++ b/pywr-core/src/solvers/builder.rs @@ -1,6 +1,6 @@ use crate::aggregated_node::AggregatedNodeIndex; use crate::edge::EdgeIndex; -use crate::model::Model; +use crate::network::Network; use crate::node::{Node, NodeType}; use crate::solvers::col_edge_map::{ColumnEdgeMap, ColumnEdgeMapBuilder}; use crate::solvers::SolverTimings; @@ -353,13 +353,13 @@ where pub fn update( &mut self, - model: &Model, + network: &Network, timestep: &Timestep, state: &State, timings: &mut SolverTimings, ) -> Result<(), PywrError> { let start_objective_update = Instant::now(); - self.update_edge_objectives(model, state)?; + self.update_edge_objectives(network, state)?; timings.update_objective += start_objective_update.elapsed(); let start_constraint_update = Instant::now(); @@ -367,20 +367,20 @@ where self.builder.reset_row_bounds(); self.builder.reset_coefficients_to_update(); // Then these methods will add their bounds - self.update_node_constraint_bounds(model, timestep, state)?; - self.update_aggregated_node_factor_constraints(model, state)?; - self.update_aggregated_node_constraint_bounds(model, state)?; - self.update_virtual_storage_node_constraint_bounds(model, timestep, state)?; + self.update_node_constraint_bounds(network, timestep, state)?; + self.update_aggregated_node_factor_constraints(network, state)?; + self.update_aggregated_node_constraint_bounds(network, state)?; + self.update_virtual_storage_node_constraint_bounds(network, timestep, state)?; timings.update_constraints += start_constraint_update.elapsed(); Ok(()) } /// Update edge objective coefficients - fn update_edge_objectives(&mut self, model: &Model, state: &State) -> Result<(), PywrError> { + fn update_edge_objectives(&mut self, network: &Network, state: &State) -> Result<(), PywrError> { self.builder.zero_obj_coefficients(); - for edge in model.edges.deref() { - let obj_coef: f64 = edge.cost(&model.nodes, model, state)?; + for edge in network.edges().deref() { + let obj_coef: f64 = edge.cost(network.nodes(), network, state)?; let col = self.col_for_edge(&edge.index()); self.builder.add_obj_coefficient(col.to_usize().unwrap(), obj_coef); @@ -391,18 +391,18 @@ where /// Update node constraints fn update_node_constraint_bounds( &mut self, - model: &Model, + network: &Network, timestep: &Timestep, state: &State, ) -> Result<(), PywrError> { let dt = timestep.days(); - for (row_id, node) in self.node_constraints_row_ids.iter().zip(model.nodes.deref()) { - let (lb, ub): (f64, f64) = match node.get_current_flow_bounds(model, state) { + for (row_id, node) in self.node_constraints_row_ids.iter().zip(network.nodes().deref()) { + let (lb, ub): (f64, f64) = match node.get_current_flow_bounds(network, state) { Ok(bnds) => bnds, Err(PywrError::FlowConstraintsUndefined) => { // Must be a storage node - let (avail, missing) = match node.get_current_available_volume_bounds(model, state) { + let (avail, missing) = match node.get_current_available_volume_bounds(network, state) { Ok(bnds) => bnds, Err(e) => return Err(e), }; @@ -418,16 +418,17 @@ where Ok(()) } - fn update_aggregated_node_factor_constraints(&mut self, model: &Model, state: &State) -> Result<(), PywrError> { + fn update_aggregated_node_factor_constraints(&mut self, network: &Network, state: &State) -> Result<(), PywrError> { for (agg_node_id, row_id) in self.agg_node_factor_constraint_row_ids.iter() { - let agg_node = model.get_aggregated_node(agg_node_id)?; + let agg_node = network.get_aggregated_node(agg_node_id)?; // Only create row for nodes that have factors - if let Some(node_pairs) = agg_node.get_norm_factor_pairs(model, state) { + if let Some(node_pairs) = agg_node.get_norm_factor_pairs(network, state) { for ((n0, f0), (n1, f1)) in node_pairs { // Modify the constraint matrix coefficients for the nodes // TODO error handling? - let node0 = model.nodes.get(&n0).expect("Node index not found!"); - let node1 = model.nodes.get(&n1).expect("Node index not found!"); + let nodes = network.nodes(); + let node0 = nodes.get(&n0).expect("Node index not found!"); + let node1 = nodes.get(&n1).expect("Node index not found!"); self.builder .update_row_coefficients(*row_id, node0, 1.0, &self.col_edge_map); @@ -445,13 +446,13 @@ where } /// Update aggregated node constraints - fn update_aggregated_node_constraint_bounds(&mut self, model: &Model, state: &State) -> Result<(), PywrError> { + fn update_aggregated_node_constraint_bounds(&mut self, network: &Network, state: &State) -> Result<(), PywrError> { for (row_id, agg_node) in self .agg_node_constraint_row_ids .iter() - .zip(model.aggregated_nodes.deref()) + .zip(network.aggregated_nodes().deref()) { - let (lb, ub): (f64, f64) = agg_node.get_current_flow_bounds(model, state)?; + let (lb, ub): (f64, f64) = agg_node.get_current_flow_bounds(network, state)?; self.builder.apply_row_bounds(*row_id, lb, ub); } @@ -460,7 +461,7 @@ where fn update_virtual_storage_node_constraint_bounds( &mut self, - model: &Model, + network: &Network, timestep: &Timestep, state: &State, ) -> Result<(), PywrError> { @@ -469,9 +470,9 @@ where for (row_id, node) in self .virtual_storage_constraint_row_ids .iter() - .zip(model.virtual_storage_nodes.deref()) + .zip(network.virtual_storage_nodes().deref()) { - let (avail, missing) = match node.get_current_available_volume_bounds(model, state) { + let (avail, missing) = match node.get_current_available_volume_bounds(network, state) { Ok(bnds) => bnds, Err(e) => return Err(e), }; @@ -509,20 +510,20 @@ where self.col_edge_map.col_for_edge(edge_index) } - pub fn create(mut self, model: &Model) -> Result, PywrError> { + pub fn create(mut self, network: &Network) -> Result, PywrError> { // Create the columns - self.create_columns(model)?; + self.create_columns(network)?; // Create edge mass balance constraints - self.create_mass_balance_constraints(model); + self.create_mass_balance_constraints(network); // Create the nodal constraints - let node_constraints_row_ids = self.create_node_constraints(model); + let node_constraints_row_ids = self.create_node_constraints(network); // Create the aggregated node constraints - let agg_node_constraint_row_ids = self.create_aggregated_node_constraints(model); + let agg_node_constraint_row_ids = self.create_aggregated_node_constraints(network); // Create the aggregated node factor constraints - let agg_node_factor_constraint_row_ids = self.create_aggregated_node_factor_constraints(model); + let agg_node_factor_constraint_row_ids = self.create_aggregated_node_factor_constraints(network); // Create virtual storage constraints - let virtual_storage_constraint_row_ids = self.create_virtual_storage_constraints(model); + let virtual_storage_constraint_row_ids = self.create_virtual_storage_constraints(network); Ok(BuiltSolver { builder: self.builder.build(), @@ -539,16 +540,16 @@ where /// Typically each edge will have its own column. However, we use the mass-balance information /// to collapse edges (and their columns) where they are trivially the same. I.e. if there /// is a single incoming edge and outgoing edge at a link node. - fn create_columns(&mut self, model: &Model) -> Result<(), PywrError> { + fn create_columns(&mut self, network: &Network) -> Result<(), PywrError> { // One column per edge - let ncols = model.edges.len(); + let ncols = network.edges().len(); if ncols < 1 { return Err(PywrError::NoEdgesDefined); } - for edge in model.edges.iter() { + for edge in network.edges().iter() { let edge_index = edge.index(); - let from_node = model.get_node(&edge.from_node_index)?; + let from_node = network.get_node(&edge.from_node_index)?; if let NodeType::Link = from_node.node_type() { // We only look at link nodes; there should be no output nodes as a @@ -580,8 +581,8 @@ where } /// Create mass balance constraints for each edge - fn create_mass_balance_constraints(&mut self, model: &Model) { - for node in model.nodes.deref() { + fn create_mass_balance_constraints(&mut self, network: &Network) { + for node in network.nodes().deref() { // Only link nodes create mass-balance constraints if let NodeType::Link = node.node_type() { @@ -658,10 +659,10 @@ where /// /// One constraint is created per node to enforce any constraints (flow or storage) /// that it may define. Returns the row_ids associated with each constraint. - fn create_node_constraints(&mut self, model: &Model) -> Vec { - let mut row_ids = Vec::with_capacity(model.nodes.len()); + fn create_node_constraints(&mut self, network: &Network) -> Vec { + let mut row_ids = Vec::with_capacity(network.nodes().len()); - for node in model.nodes.deref() { + for node in network.nodes().deref() { // Create empty arrays to store the matrix data let mut row: RowBuilder = RowBuilder::default(); @@ -676,10 +677,10 @@ where /// Create aggregated node factor constraints /// /// One constraint is created per node to enforce any factor constraints. - fn create_aggregated_node_factor_constraints(&mut self, model: &Model) -> Vec<(AggregatedNodeIndex, I)> { + fn create_aggregated_node_factor_constraints(&mut self, network: &Network) -> Vec<(AggregatedNodeIndex, I)> { let mut row_ids = Vec::new(); - for agg_node in model.aggregated_nodes.deref() { + for agg_node in network.aggregated_nodes().deref() { // Only create row for nodes that have factors if let Some(node_pairs) = agg_node.get_factor_node_pairs() { for (n0, n1) in node_pairs { @@ -688,8 +689,9 @@ where let mut row = RowBuilder::default(); // TODO error handling? - let node0 = model.nodes.get(&n0).expect("Node index not found!"); - let node1 = model.nodes.get(&n1).expect("Node index not found!"); + let nodes = network.nodes(); + let node0 = nodes.get(&n0).expect("Node index not found!"); + let node1 = nodes.get(&n1).expect("Node index not found!"); self.add_node(node0, 1.0, &mut row); self.add_node(node1, -1.0, &mut row); @@ -711,16 +713,16 @@ where /// One constraint is created per node to enforce any constraints (flow or storage) /// that it may define. Returns the row ids associated with each aggregated node constraint. /// Panics if the model contains aggregated nodes with broken references to nodes. - fn create_aggregated_node_constraints(&mut self, model: &Model) -> Vec { - let mut row_ids = Vec::with_capacity(model.aggregated_nodes.len()); + fn create_aggregated_node_constraints(&mut self, network: &Network) -> Vec { + let mut row_ids = Vec::with_capacity(network.aggregated_nodes().len()); - for agg_node in model.aggregated_nodes.deref() { + for agg_node in network.aggregated_nodes().deref() { // Create empty arrays to store the matrix data let mut row: RowBuilder = RowBuilder::default(); for node_index in agg_node.get_nodes() { // TODO error handling? - let node = model.nodes.get(&node_index).expect("Node index not found!"); + let node = network.nodes().get(&node_index).expect("Node index not found!"); self.add_node(node, 1.0, &mut row); } @@ -732,10 +734,10 @@ where /// Create virtual storage node constraints /// - fn create_virtual_storage_constraints(&mut self, model: &Model) -> Vec { - let mut row_ids = Vec::with_capacity(model.virtual_storage_nodes.len()); + fn create_virtual_storage_constraints(&mut self, network: &Network) -> Vec { + let mut row_ids = Vec::with_capacity(network.virtual_storage_nodes().len()); - for virtual_storage in model.virtual_storage_nodes.deref() { + for virtual_storage in network.virtual_storage_nodes().deref() { // Create empty arrays to store the matrix data if let Some(nodes) = virtual_storage.get_nodes_with_factors() { @@ -747,7 +749,7 @@ where virtual_storage.full_name() ); } - let node = model.nodes.get(&node_index).expect("Node index not found!"); + let node = network.nodes().get(&node_index).expect("Node index not found!"); self.add_node(node, -factor, &mut row); } let row_id = self.builder.add_variable_row(row); diff --git a/pywr-core/src/solvers/clp/mod.rs b/pywr-core/src/solvers/clp/mod.rs index e8ef56b4..5dffa9ba 100644 --- a/pywr-core/src/solvers/clp/mod.rs +++ b/pywr-core/src/solvers/clp/mod.rs @@ -1,7 +1,7 @@ mod settings; use super::builder::SolverBuilder; -use crate::model::Model; +use crate::network::Network; use crate::solvers::builder::BuiltSolver; use crate::solvers::{Solver, SolverFeatures, SolverTimings}; use crate::state::State; @@ -237,7 +237,7 @@ impl Solver for ClpSolver { ] } - fn setup(model: &Model, settings: &Self::Settings) -> Result, PywrError> { + fn setup(model: &Network, settings: &Self::Settings) -> Result, PywrError> { let builder = SolverBuilder::default(); let built = builder.create(model)?; @@ -245,7 +245,7 @@ impl Solver for ClpSolver { Ok(Box::new(solver)) } - fn solve(&mut self, model: &Model, timestep: &Timestep, state: &mut State) -> Result { + fn solve(&mut self, model: &Network, timestep: &Timestep, state: &mut State) -> Result { let mut timings = SolverTimings::default(); self.builder.update(model, timestep, state, &mut timings)?; @@ -273,7 +273,7 @@ impl Solver for ClpSolver { network_state.reset(); let start_save_solution = Instant::now(); - for edge in model.edges.iter() { + for edge in model.edges().iter() { let col = self.builder.col_for_edge(&edge.index()) as usize; let flow = solution[col]; network_state.add_flow(edge, timestep, flow)?; diff --git a/pywr-core/src/solvers/highs/mod.rs b/pywr-core/src/solvers/highs/mod.rs index 56ba1088..3eef0a5b 100644 --- a/pywr-core/src/solvers/highs/mod.rs +++ b/pywr-core/src/solvers/highs/mod.rs @@ -1,6 +1,6 @@ mod settings; -use crate::model::Model; +use crate::network::Network; use crate::solvers::builder::{BuiltSolver, SolverBuilder}; use crate::solvers::{Solver, SolverFeatures, SolverTimings}; use crate::state::State; @@ -166,9 +166,9 @@ impl Solver for HighsSolver { &[] } - fn setup(model: &Model, settings: &Self::Settings) -> Result, PywrError> { + fn setup(network: &Network, settings: &Self::Settings) -> Result, PywrError> { let builder: SolverBuilder = SolverBuilder::default(); - let built = builder.create(model)?; + let built = builder.create(network)?; let num_cols = built.num_cols(); let num_rows = built.num_rows(); @@ -193,9 +193,9 @@ impl Solver for HighsSolver { highs: highs_lp, })) } - fn solve(&mut self, model: &Model, timestep: &Timestep, state: &mut State) -> Result { + fn solve(&mut self, network: &Network, timestep: &Timestep, state: &mut State) -> Result { let mut timings = SolverTimings::default(); - self.builder.update(model, timestep, state, &mut timings)?; + self.builder.update(network, timestep, state, &mut timings)?; let num_cols = self.builder.num_cols(); let num_rows = self.builder.num_rows(); @@ -223,12 +223,12 @@ impl Solver for HighsSolver { network_state.reset(); let start_save_solution = Instant::now(); - for edge in model.edges.deref() { + for edge in network.edges().deref() { let col = self.builder.col_for_edge(&edge.index()) as usize; let flow = solution[col]; network_state.add_flow(edge, timestep, flow)?; } - network_state.complete(model, timestep)?; + network_state.complete(network, timestep)?; timings.save_solution += start_save_solution.elapsed(); Ok(timings) diff --git a/pywr-core/src/solvers/ipm_ocl/mod.rs b/pywr-core/src/solvers/ipm_ocl/mod.rs index 348607e1..231ecc7f 100644 --- a/pywr-core/src/solvers/ipm_ocl/mod.rs +++ b/pywr-core/src/solvers/ipm_ocl/mod.rs @@ -1,15 +1,14 @@ mod settings; use crate::edge::EdgeIndex; -use crate::model::Model; +use crate::network::Network; use crate::node::{Node, NodeType}; use crate::solvers::col_edge_map::{ColumnEdgeMap, ColumnEdgeMapBuilder}; -use crate::solvers::{MultiStateSolver, SolverFeatures, SolverSettings, SolverTimings}; +use crate::solvers::{MultiStateSolver, SolverFeatures, SolverTimings}; use crate::state::State; use crate::timestep::Timestep; use crate::PywrError; use ipm_ocl::{GetClProgram, PathFollowingDirectClSolver}; -use num::complex::ComplexFloat; use rayon::iter::IndexedParallelIterator; use rayon::iter::ParallelIterator; use rayon::prelude::ParallelSliceMut; @@ -281,34 +280,34 @@ impl BuiltSolver { fn update( &mut self, - model: &Model, + network: &Network, timestep: &Timestep, states: &[State], timings: &mut SolverTimings, ) -> Result<(), PywrError> { let start_objective_update = Instant::now(); - self.update_edge_objectives(model, states)?; + self.update_edge_objectives(network, states)?; timings.update_objective += start_objective_update.elapsed(); let start_constraint_update = Instant::now(); self.lp.reset_row_bounds(); - self.update_node_constraint_bounds(model, timestep, states)?; - // self.update_aggregated_node_constraint_bounds(model, state)?; + self.update_node_constraint_bounds(network, timestep, states)?; + // self.update_aggregated_node_constraint_bounds(network, state)?; timings.update_constraints += start_constraint_update.elapsed(); Ok(()) } /// Update edge objective coefficients - fn update_edge_objectives(&mut self, model: &Model, states: &[State]) -> Result<(), PywrError> { + fn update_edge_objectives(&mut self, network: &Network, states: &[State]) -> Result<(), PywrError> { self.lp.zero_obj_coefficients(); - for edge in model.edges.deref() { + for edge in network.edges().deref() { // Collect all of the costs for all states together let cost = states .iter() .map(|s| { - edge.cost(&model.nodes, model, s) + edge.cost(&network.nodes(), network, s) .map(|c| if c != 0.0 { -c } else { 0.0 }) }) .collect::, _>>()?; @@ -322,7 +321,7 @@ impl BuiltSolver { /// Update node constraints fn update_node_constraint_bounds( &mut self, - model: &Model, + network: &Network, timestep: &Timestep, states: &[State], ) -> Result<(), PywrError> { @@ -330,7 +329,7 @@ impl BuiltSolver { let dt = timestep.days(); - for node in model.nodes.deref() { + for node in network.nodes().deref() { match node.node_type() { NodeType::Input | NodeType::Output | NodeType::Link => { if !node.is_max_flow_unconstrained().unwrap() { @@ -339,7 +338,7 @@ impl BuiltSolver { .iter() .map(|state| { // TODO check for non-zero lower bounds and error? - node.get_current_flow_bounds(model, state) + node.get_current_flow_bounds(network, state) .expect("Flow bounds expected for Input, Output and Link nodes.") .1 .min(B_MAX) @@ -355,7 +354,7 @@ impl BuiltSolver { .iter() .map(|state| { let (avail, missing) = node - .get_current_available_volume_bounds(model, state) + .get_current_available_volume_bounds(network, state) .expect("Volumes bounds expected for Storage nodes."); (avail / dt, missing / dt) }) @@ -394,20 +393,20 @@ impl SolverBuilder { self.col_edge_map.col_for_edge(edge_index) } - fn create(mut self, model: &Model) -> Result { + fn create(mut self, network: &Network) -> Result { // Create the columns - self.create_columns(model)?; + self.create_columns(network)?; // Create edge mass balance constraints - self.create_mass_balance_constraints(model); + self.create_mass_balance_constraints(network); // Create the nodal constraints - let node_constraints_row_ids = self.create_node_constraints(model); + let node_constraints_row_ids = self.create_node_constraints(network); // // Create the aggregated node constraints - // builder.create_aggregated_node_constraints(model); + // builder.create_aggregated_node_constraints(network); // // Create the aggregated node factor constraints - // builder.create_aggregated_node_factor_constraints(model); + // builder.create_aggregated_node_factor_constraints(network); // // Create virtual storage constraints - // builder.create_virtual_storage_constraints(model); + // builder.create_virtual_storage_constraints(network); Ok(BuiltSolver { lp: self.builder.build(), @@ -421,16 +420,16 @@ impl SolverBuilder { /// Typically each edge will have its own column. However, we use the mass-balance information /// to collapse edges (and their columns) where they are trivially the same. I.e. if there /// is a single incoming edge and outgoing edge at a link node. - fn create_columns(&mut self, model: &Model) -> Result<(), PywrError> { + fn create_columns(&mut self, network: &Network) -> Result<(), PywrError> { // One column per edge - let ncols = model.edges.len(); + let ncols = network.edges().len(); if ncols < 1 { return Err(PywrError::NoEdgesDefined); } - for edge in model.edges.iter() { + for edge in network.edges().iter() { let edge_index = edge.index(); - let from_node = model.get_node(&edge.from_node_index)?; + let from_node = network.get_node(&edge.from_node_index)?; if let NodeType::Link = from_node.node_type() { // We only look at link nodes; there should be no output nodes as a @@ -462,8 +461,8 @@ impl SolverBuilder { } /// Create mass balance constraints for each edge - fn create_mass_balance_constraints(&mut self, model: &Model) { - for node in model.nodes.deref() { + fn create_mass_balance_constraints(&mut self, network: &Network) { + for node in network.nodes().deref() { // Only link nodes create mass-balance constraints if let NodeType::Link = node.node_type() { @@ -530,10 +529,10 @@ impl SolverBuilder { /// /// One constraint is created per node to enforce any constraints (flow or storage) /// that it may define. - fn create_node_constraints(&mut self, model: &Model) -> Vec { - let mut row_ids = Vec::with_capacity(model.nodes.len()); + fn create_node_constraints(&mut self, network: &Network) -> Vec { + let mut row_ids = Vec::with_capacity(network.nodes().len()); - for node in model.nodes.deref() { + for node in network.nodes().deref() { match node.node_type() { NodeType::Input | NodeType::Output | NodeType::Link => { // Only create node constraints for nodes that could become constrained @@ -579,7 +578,7 @@ impl MultiStateSolver for ClIpmF32Solver { &[] } - fn setup(model: &Model, num_scenarios: usize, settings: &Self::Settings) -> Result, PywrError> { + fn setup(network: &Network, num_scenarios: usize, settings: &Self::Settings) -> Result, PywrError> { let platform = ocl::Platform::default(); let device = ocl::Device::first(platform).expect("Failed to get OpenCL device."); let context = ocl::Context::builder() @@ -600,7 +599,7 @@ impl MultiStateSolver for ClIpmF32Solver { for chunk_scenarios in (0..num_scenarios).collect::>().chunks(chunk_size.get()) { let builder = SolverBuilder::new(chunk_scenarios.len()); - let built = builder.create(model)?; + let built = builder.create(network)?; let matrix = built.lp.get_full_matrix(); let num_rows = matrix.row_starts.len() - 1; @@ -633,9 +632,14 @@ impl MultiStateSolver for ClIpmF32Solver { })) } - fn solve(&mut self, model: &Model, timestep: &Timestep, states: &mut [State]) -> Result { + fn solve( + &mut self, + network: &Network, + timestep: &Timestep, + states: &mut [State], + ) -> Result { // TODO complete the timings - let mut timings = SolverTimings::default(); + let timings = SolverTimings::default(); states .par_chunks_mut(self.chunk_size.get()) @@ -644,7 +648,7 @@ impl MultiStateSolver for ClIpmF32Solver { .for_each(|((chunk_states, built), ipm)| { let mut timings = SolverTimings::default(); - built.update(model, timestep, chunk_states, &mut timings).unwrap(); + built.update(network, timestep, chunk_states, &mut timings).unwrap(); let now = Instant::now(); let row_upper: Vec<_> = built.row_upper().iter().map(|&v| v as f32).collect(); @@ -661,7 +665,7 @@ impl MultiStateSolver for ClIpmF32Solver { let network_state = state.get_mut_network_state(); network_state.reset(); - for edge in model.edges.deref() { + for edge in network.edges().deref() { let col = built.col_for_edge(&edge.index()); let flow = solution[col * num_states + i]; network_state.add_flow(edge, timestep, flow as f64).unwrap(); @@ -689,7 +693,7 @@ impl MultiStateSolver for ClIpmF64Solver { &[] } - fn setup(model: &Model, num_scenarios: usize, settings: &Self::Settings) -> Result, PywrError> { + fn setup(network: &Network, num_scenarios: usize, settings: &Self::Settings) -> Result, PywrError> { let platform = ocl::Platform::default(); let device = ocl::Device::first(platform).expect("Failed to get OpenCL device."); let context = ocl::Context::builder() @@ -713,7 +717,7 @@ impl MultiStateSolver for ClIpmF64Solver { let queue = ocl::Queue::new(&context, device, None).expect("Failed to create OpenCL queue."); let builder = SolverBuilder::new(chunk_scenarios.len()); - let built = builder.create(model)?; + let built = builder.create(network)?; let matrix = built.lp.get_full_matrix(); let num_rows = matrix.row_starts.len() - 1; @@ -747,9 +751,14 @@ impl MultiStateSolver for ClIpmF64Solver { })) } - fn solve(&mut self, model: &Model, timestep: &Timestep, states: &mut [State]) -> Result { + fn solve( + &mut self, + network: &Network, + timestep: &Timestep, + states: &mut [State], + ) -> Result { // TODO complete the timings - let mut timings = SolverTimings::default(); + let timings = SolverTimings::default(); states .par_chunks_mut(self.chunk_size.get()) @@ -759,7 +768,7 @@ impl MultiStateSolver for ClIpmF64Solver { .for_each(|(((chunk_states, built), ipm), queue)| { let mut timings = SolverTimings::default(); - built.update(model, timestep, chunk_states, &mut timings).unwrap(); + built.update(network, timestep, chunk_states, &mut timings).unwrap(); let now = Instant::now(); @@ -774,7 +783,7 @@ impl MultiStateSolver for ClIpmF64Solver { let network_state = state.get_mut_network_state(); network_state.reset(); - for edge in model.edges.deref() { + for edge in network.edges().deref() { let col = built.col_for_edge(&edge.index()); let flow = solution[col * num_states + i]; network_state.add_flow(edge, timestep, flow).unwrap(); diff --git a/pywr-core/src/solvers/ipm_simd/mod.rs b/pywr-core/src/solvers/ipm_simd/mod.rs index 7c3fdedf..6718c4c1 100644 --- a/pywr-core/src/solvers/ipm_simd/mod.rs +++ b/pywr-core/src/solvers/ipm_simd/mod.rs @@ -1,7 +1,7 @@ mod settings; use crate::edge::EdgeIndex; -use crate::model::Model; +use crate::network::Network; use crate::node::{Node, NodeType}; use crate::solvers::col_edge_map::{ColumnEdgeMap, ColumnEdgeMapBuilder}; use crate::solvers::{MultiStateSolver, SolverFeatures, SolverTimings}; @@ -9,7 +9,6 @@ use crate::state::State; use crate::timestep::Timestep; use crate::PywrError; use ipm_simd::{PathFollowingDirectSimdSolver, Tolerances}; -use num::complex::ComplexFloat; use rayon::iter::IndexedParallelIterator; use rayon::iter::ParallelIterator; use rayon::prelude::ParallelSliceMut; @@ -17,7 +16,7 @@ pub use settings::{SimdIpmSolverSettings, SimdIpmSolverSettingsBuilder}; use std::collections::BTreeMap; use std::num::NonZeroUsize; use std::ops::Deref; -use std::simd::{LaneCount, Simd, SimdElement, SimdFloat, SupportedLaneCount}; +use std::simd::{LaneCount, Simd, SimdFloat, SupportedLaneCount}; use std::time::Instant; const B_MAX: f64 = 999999.0; @@ -309,34 +308,34 @@ where fn update( &mut self, - model: &Model, + network: &Network, timestep: &Timestep, states: &[State], timings: &mut SolverTimings, ) -> Result<(), PywrError> { let start_objective_update = Instant::now(); - self.update_edge_objectives(model, states)?; + self.update_edge_objectives(network, states)?; timings.update_objective += start_objective_update.elapsed(); let start_constraint_update = Instant::now(); self.lp.reset_row_bounds(); - self.update_node_constraint_bounds(model, timestep, states)?; - // self.update_aggregated_node_constraint_bounds(model, state)?; + self.update_node_constraint_bounds(network, timestep, states)?; + // self.update_aggregated_node_constraint_bounds(network, state)?; timings.update_constraints += start_constraint_update.elapsed(); Ok(()) } /// Update edge objective coefficients - fn update_edge_objectives(&mut self, model: &Model, states: &[State]) -> Result<(), PywrError> { + fn update_edge_objectives(&mut self, network: &Network, states: &[State]) -> Result<(), PywrError> { self.lp.zero_obj_coefficients(); - for edge in model.edges.deref() { + for edge in network.edges().deref() { // Collect all of the costs for all states together let cost = states .iter() .map(|s| { - edge.cost(&model.nodes, model, s) + edge.cost(&network.nodes(), network, s) .map(|c| if c != 0.0 { -c } else { 0.0 }) }) .collect::, _>>()?; @@ -350,7 +349,7 @@ where /// Update node constraints fn update_node_constraint_bounds( &mut self, - model: &Model, + network: &Network, timestep: &Timestep, states: &[State], ) -> Result<(), PywrError> { @@ -358,7 +357,7 @@ where let dt = timestep.days(); - for node in model.nodes.deref() { + for node in network.nodes().deref() { match node.node_type() { NodeType::Input | NodeType::Output | NodeType::Link => { if !node.is_max_flow_unconstrained().unwrap() { @@ -367,7 +366,7 @@ where .iter() .map(|state| { // TODO check for non-zero lower bounds and error? - node.get_current_flow_bounds(model, state) + node.get_current_flow_bounds(network, state) .expect("Flow bounds expected for Input, Output and Link nodes.") .1 .min(B_MAX) @@ -383,7 +382,7 @@ where .iter() .map(|state| { let (avail, missing) = node - .get_current_available_volume_bounds(model, state) + .get_current_available_volume_bounds(network, state) .expect("Volumes bounds expected for Storage nodes."); (avail / dt, missing / dt) }) @@ -422,23 +421,23 @@ impl SolverBuilder { self.col_edge_map.col_for_edge(edge_index) } - fn create(mut self, model: &Model) -> Result, PywrError> + fn create(mut self, network: &Network) -> Result, PywrError> where LaneCount: SupportedLaneCount, { // Create the columns - self.create_columns(model)?; + self.create_columns(network)?; // Create edge mass balance constraints - self.create_mass_balance_constraints(model); + self.create_mass_balance_constraints(network); // Create the nodal constraints - let node_constraints_row_ids = self.create_node_constraints(model); + let node_constraints_row_ids = self.create_node_constraints(network); // // Create the aggregated node constraints - // builder.create_aggregated_node_constraints(model); + // builder.create_aggregated_node_constraints(network); // // Create the aggregated node factor constraints - // builder.create_aggregated_node_factor_constraints(model); + // builder.create_aggregated_node_factor_constraints(network); // // Create virtual storage constraints - // builder.create_virtual_storage_constraints(model); + // builder.create_virtual_storage_constraints(network); Ok(BuiltSolver { lp: self.builder.build(), @@ -452,16 +451,16 @@ impl SolverBuilder { /// Typically each edge will have its own column. However, we use the mass-balance information /// to collapse edges (and their columns) where they are trivially the same. I.e. if there /// is a single incoming edge and outgoing edge at a link node. - fn create_columns(&mut self, model: &Model) -> Result<(), PywrError> { + fn create_columns(&mut self, network: &Network) -> Result<(), PywrError> { // One column per edge - let ncols = model.edges.len(); + let ncols = network.edges().len(); if ncols < 1 { return Err(PywrError::NoEdgesDefined); } - for edge in model.edges.iter() { + for edge in network.edges().iter() { let edge_index = edge.index(); - let from_node = model.get_node(&edge.from_node_index)?; + let from_node = network.get_node(&edge.from_node_index)?; if let NodeType::Link = from_node.node_type() { // We only look at link nodes; there should be no output nodes as a @@ -493,8 +492,8 @@ impl SolverBuilder { } /// Create mass balance constraints for each edge - fn create_mass_balance_constraints(&mut self, model: &Model) { - for node in model.nodes.deref() { + fn create_mass_balance_constraints(&mut self, network: &Network) { + for node in network.nodes().deref() { // Only link nodes create mass-balance constraints if let NodeType::Link = node.node_type() { @@ -561,10 +560,10 @@ impl SolverBuilder { /// /// One constraint is created per node to enforce any constraints (flow or storage) /// that it may define. - fn create_node_constraints(&mut self, model: &Model) -> Vec { - let mut row_ids = Vec::with_capacity(model.nodes.len()); + fn create_node_constraints(&mut self, network: &Network) -> Vec { + let mut row_ids = Vec::with_capacity(network.nodes().len()); - for node in model.nodes.deref() { + for node in network.nodes().deref() { match node.node_type() { NodeType::Input | NodeType::Output | NodeType::Link => { // Only create node constraints for nodes that could become constrained @@ -615,13 +614,13 @@ where &[] } - fn setup(model: &Model, num_scenarios: usize, settings: &Self::Settings) -> Result, PywrError> { + fn setup(network: &Network, num_scenarios: usize, settings: &Self::Settings) -> Result, PywrError> { let mut built_solvers = Vec::new(); let mut ipms = Vec::new(); for _ in (0..num_scenarios).collect::>().chunks(N) { let builder = SolverBuilder::new(); - let built = builder.create(model)?; + let built = builder.create(network)?; let matrix = built.lp.get_full_matrix(); let num_rows = matrix.row_starts.len() - 1; @@ -648,9 +647,14 @@ where })) } - fn solve(&mut self, model: &Model, timestep: &Timestep, states: &mut [State]) -> Result { + fn solve( + &mut self, + network: &Network, + timestep: &Timestep, + states: &mut [State], + ) -> Result { // TODO complete the timings - let mut timings = SolverTimings::default(); + let timings = SolverTimings::default(); // TODO this will miss off anything that doesn't divide in to 4 states @@ -660,7 +664,7 @@ where .for_each(|((chunk_states, built), ipm)| { let mut timings = SolverTimings::default(); - built.update(model, timestep, chunk_states, &mut timings).unwrap(); + built.update(network, timestep, chunk_states, &mut timings).unwrap(); let now = Instant::now(); @@ -680,7 +684,7 @@ where state.get_mut_network_state().reset(); } - for edge in model.edges.deref() { + for edge in network.edges().deref() { let col = built.col_for_edge(&edge.index()); let flows = solution[col]; diff --git a/pywr-core/src/solvers/mod.rs b/pywr-core/src/solvers/mod.rs index 11f55dd9..b12e84a8 100644 --- a/pywr-core/src/solvers/mod.rs +++ b/pywr-core/src/solvers/mod.rs @@ -1,4 +1,4 @@ -use crate::model::Model; +use crate::network::Network; use crate::state::State; use crate::timestep::Timestep; use crate::PywrError; @@ -81,14 +81,15 @@ pub trait Solver: Send { /// An array of features that this solver provides. fn features() -> &'static [SolverFeatures]; - fn setup(model: &Model, settings: &Self::Settings) -> Result, PywrError>; - fn solve(&mut self, model: &Model, timestep: &Timestep, state: &mut State) -> Result; + fn setup(model: &Network, settings: &Self::Settings) -> Result, PywrError>; + fn solve(&mut self, model: &Network, timestep: &Timestep, state: &mut State) -> Result; } pub trait MultiStateSolver: Send { type Settings; /// An array of features that this solver provides. fn features() -> &'static [SolverFeatures]; - fn setup(model: &Model, num_scenarios: usize, settings: &Self::Settings) -> Result, PywrError>; - fn solve(&mut self, model: &Model, timestep: &Timestep, states: &mut [State]) -> Result; + fn setup(model: &Network, num_scenarios: usize, settings: &Self::Settings) -> Result, PywrError>; + fn solve(&mut self, model: &Network, timestep: &Timestep, states: &mut [State]) + -> Result; } diff --git a/pywr-core/src/state.rs b/pywr-core/src/state.rs index 54dae7cc..944d8372 100644 --- a/pywr-core/src/state.rs +++ b/pywr-core/src/state.rs @@ -1,6 +1,7 @@ use crate::derived_metric::DerivedMetricIndex; use crate::edge::{Edge, EdgeIndex}; -use crate::model::Model; +use crate::models::MultiNetworkTransferIndex; +use crate::network::Network; use crate::node::{Node, NodeIndex}; use crate::parameters::{IndexParameterIndex, MultiValueParameterIndex, ParameterIndex}; use crate::timestep::Timestep; @@ -368,12 +369,12 @@ impl NetworkState { /// /// This final step ensures that derived states (e.g. virtual storage volume) are updated /// once all of the flows have been updated. - pub fn complete(&mut self, model: &Model, timestep: &Timestep) -> Result<(), PywrError> { + pub fn complete(&mut self, model: &Network, timestep: &Timestep) -> Result<(), PywrError> { // Update virtual storage node states for (state, node) in self .virtual_storage_states .iter_mut() - .zip(model.virtual_storage_nodes.iter()) + .zip(model.virtual_storage_nodes().iter()) { if let Some(node_factors) = node.get_nodes_with_factors() { let flow = node_factors @@ -381,7 +382,7 @@ impl NetworkState { .map(|(idx, factor)| match self.node_states.get(*idx.deref()) { None => Err(PywrError::NodeIndexNotFound), Some(s) => { - let node = model.nodes.get(idx)?; + let node = model.nodes().get(idx)?; match node { Node::Input(_) => Ok(factor * s.get_out_flow()), Node::Output(_) => Ok(factor * s.get_in_flow()), @@ -505,6 +506,7 @@ pub struct State { network: NetworkState, parameters: ParameterValues, derived_metrics: Vec, + inter_network_values: Vec, } impl State { @@ -516,11 +518,13 @@ impl State { num_parameter_indices: usize, num_multi_parameters: usize, num_derived_metrics: usize, + num_inter_network_values: usize, ) -> Self { Self { network: NetworkState::new(initial_node_states, num_edges, initial_virtual_storage_states), parameters: ParameterValues::new(num_parameter_values, num_parameter_indices, num_multi_parameters), derived_metrics: vec![0.0; num_derived_metrics], + inter_network_values: vec![0.0; num_inter_network_values], } } @@ -593,4 +597,25 @@ impl State { None => Err(PywrError::DerivedMetricIndexNotFound(idx)), } } + + pub fn get_inter_network_transfer_value(&self, idx: MultiNetworkTransferIndex) -> Result { + match self.inter_network_values.get(*idx.deref()) { + Some(s) => Ok(*s), + None => Err(PywrError::MultiNetworkTransferIndexNotFound(idx)), + } + } + + pub fn set_inter_network_transfer_value( + &mut self, + idx: MultiNetworkTransferIndex, + value: f64, + ) -> Result<(), PywrError> { + match self.inter_network_values.get_mut(*idx.deref()) { + Some(s) => { + *s = value; + Ok(()) + } + None => Err(PywrError::MultiNetworkTransferIndexNotFound(idx)), + } + } } diff --git a/pywr-core/src/test_utils.rs b/pywr-core/src/test_utils.rs index 3fa7d348..2db019aa 100644 --- a/pywr-core/src/test_utils.rs +++ b/pywr-core/src/test_utils.rs @@ -1,10 +1,12 @@ use crate::metric::Metric; +use crate::models::{Model, ModelDomain}; /// Utilities for unit tests. /// TODO move this to its own local crate ("test-utilities") as part of a workspace. -use crate::model::Model; +use crate::network::Network; use crate::node::{Constraint, ConstraintValue, StorageInitialVolume}; use crate::parameters::{AggFunc, AggregatedParameter, Array2Parameter, ConstantParameter, Parameter}; use crate::recorders::AssertionRecorder; +use crate::scenario::ScenarioGroupCollection; #[cfg(feature = "ipm-ocl")] use crate::solvers::ClIpmF64Solver; use crate::solvers::ClpSolver; @@ -12,7 +14,7 @@ use crate::solvers::ClpSolver; use crate::solvers::HighsSolver; #[cfg(feature = "ipm-simd")] use crate::solvers::SimdIpmF64Solver; -use crate::timestep::Timestepper; +use crate::timestep::{TimeDomain, Timestepper}; use crate::PywrError; use ndarray::{Array, Array2}; use rand::Rng; @@ -24,25 +26,25 @@ pub fn default_timestepper() -> Timestepper { Timestepper::new(date!(2020 - 01 - 01), date!(2020 - 01 - 15), 1) } -/// Create a simple test model with three nodes. -pub fn simple_model(num_scenarios: usize) -> Model { - let mut model = Model::default(); - model.add_scenario_group("test-scenario", num_scenarios).unwrap(); - let scenario_idx = model.get_scenario_group_index_by_name("test-scenario").unwrap(); +pub fn default_time_domain() -> TimeDomain { + default_timestepper().into() +} - let input_node = model.add_input_node("input", None).unwrap(); - let link_node = model.add_link_node("link", None).unwrap(); - let output_node = model.add_output_node("output", None).unwrap(); +/// Create a simple test network with three nodes. +pub fn simple_network(network: &mut Network, inflow_scenario_index: usize, num_inflow_scenarios: usize) { + let input_node = network.add_input_node("input", None).unwrap(); + let link_node = network.add_link_node("link", None).unwrap(); + let output_node = network.add_output_node("output", None).unwrap(); - model.connect_nodes(input_node, link_node).unwrap(); - model.connect_nodes(link_node, output_node).unwrap(); + network.connect_nodes(input_node, link_node).unwrap(); + network.connect_nodes(link_node, output_node).unwrap(); - let inflow = Array::from_shape_fn((366, num_scenarios), |(i, j)| 1.0 + i as f64 + j as f64); - let inflow = Array2Parameter::new("inflow", inflow, scenario_idx, None); + let inflow = Array::from_shape_fn((366, num_inflow_scenarios), |(i, j)| 1.0 + i as f64 + j as f64); + let inflow = Array2Parameter::new("inflow", inflow, inflow_scenario_index, None); - let inflow = model.add_parameter(Box::new(inflow)).unwrap(); + let inflow = network.add_parameter(Box::new(inflow)).unwrap(); - let input_node = model.get_mut_node_by_name("input", None).unwrap(); + let input_node = network.get_mut_node_by_name("input", None).unwrap(); input_node .set_constraint( ConstraintValue::Metric(Metric::ParameterValue(inflow)), @@ -53,19 +55,19 @@ pub fn simple_model(num_scenarios: usize) -> Model { let base_demand = 10.0; let demand_factor = ConstantParameter::new("demand-factor", 1.2, None); - let demand_factor = model.add_parameter(Box::new(demand_factor)).unwrap(); + let demand_factor = network.add_parameter(Box::new(demand_factor)).unwrap(); let total_demand = AggregatedParameter::new( "total-demand", &[Metric::Constant(base_demand), Metric::ParameterValue(demand_factor)], AggFunc::Product, ); - let total_demand = model.add_parameter(Box::new(total_demand)).unwrap(); + let total_demand = network.add_parameter(Box::new(total_demand)).unwrap(); let demand_cost = ConstantParameter::new("demand-cost", -10.0, None); - let demand_cost = model.add_parameter(Box::new(demand_cost)).unwrap(); + let demand_cost = network.add_parameter(Box::new(demand_cost)).unwrap(); - let output_node = model.get_mut_node_by_name("output", None).unwrap(); + let output_node = network.get_mut_node_by_name("output", None).unwrap(); output_node .set_constraint( ConstraintValue::Metric(Metric::ParameterValue(total_demand)), @@ -73,15 +75,29 @@ pub fn simple_model(num_scenarios: usize) -> Model { ) .unwrap(); output_node.set_cost(ConstraintValue::Metric(Metric::ParameterValue(demand_cost))); +} +/// Create a simple test model with three nodes. +pub fn simple_model(num_scenarios: usize) -> Model { + let mut scenario_collection = ScenarioGroupCollection::default(); + scenario_collection.add_group("test-scenario", num_scenarios); - model + let domain = ModelDomain::from(default_timestepper(), scenario_collection); + let mut network = Network::default(); + + let idx = domain + .scenarios() + .group_index("test-scenario") + .expect("Could not find scenario group"); + + simple_network(&mut network, idx, num_scenarios); + + Model::new(domain, network) } /// A test model with a single storage node. pub fn simple_storage_model() -> Model { - let mut model = Model::default(); - - let storage_node = model + let mut network = Network::default(); + let storage_node = network .add_storage_node( "reservoir", None, @@ -90,19 +106,19 @@ pub fn simple_storage_model() -> Model { ConstraintValue::Scalar(100.0), ) .unwrap(); - let output_node = model.add_output_node("output", None).unwrap(); + let output_node = network.add_output_node("output", None).unwrap(); - model.connect_nodes(storage_node, output_node).unwrap(); + network.connect_nodes(storage_node, output_node).unwrap(); // Apply demand to the model // TODO convenience function for adding a constant constraint. let demand = ConstantParameter::new("demand", 10.0, None); - let demand = model.add_parameter(Box::new(demand)).unwrap(); + let demand = network.add_parameter(Box::new(demand)).unwrap(); let demand_cost = ConstantParameter::new("demand-cost", -10.0, None); - let demand_cost = model.add_parameter(Box::new(demand_cost)).unwrap(); + let demand_cost = network.add_parameter(Box::new(demand_cost)).unwrap(); - let output_node = model.get_mut_node_by_name("output", None).unwrap(); + let output_node = network.get_mut_node_by_name("output", None).unwrap(); output_node .set_constraint( ConstraintValue::Metric(Metric::ParameterValue(demand)), @@ -111,14 +127,7 @@ pub fn simple_storage_model() -> Model { .unwrap(); output_node.set_cost(ConstraintValue::Metric(Metric::ParameterValue(demand_cost))); - let max_volume = 100.0; - - let storage_node = model.get_mut_node_by_name("reservoir", None).unwrap(); - storage_node - .set_constraint(ConstraintValue::Scalar(max_volume), Constraint::MaxVolume) - .unwrap(); - - model + Model::new(default_time_domain().into(), network) } /// Add the given parameter to the given model along with an assertion recorder that asserts @@ -135,32 +144,31 @@ pub fn run_and_assert_parameter( ulps: Option, epsilon: Option, ) { - let p_idx = model.add_parameter(parameter).unwrap(); + let p_idx = model.network_mut().add_parameter(parameter).unwrap(); let start = date!(2020 - 01 - 01); let end = start.checked_add((expected_values.nrows() as i64 - 1).days()).unwrap(); - let timestepper = Timestepper::new(start, end, 1); let rec = AssertionRecorder::new("assert", Metric::ParameterValue(p_idx), expected_values, ulps, epsilon); - model.add_recorder(Box::new(rec)).unwrap(); - run_all_solvers(model, ×tepper) + model.network_mut().add_recorder(Box::new(rec)).unwrap(); + run_all_solvers(model) } /// Run a model using each of the in-built solvers. /// /// The model will only be run if the solver has the required solver features (and /// is also enabled as a Cargo feature). -pub fn run_all_solvers(model: &Model, timestepper: &Timestepper) { +pub fn run_all_solvers(model: &Model) { model - .run::(timestepper, &Default::default()) + .run::(&Default::default()) .expect("Failed to solve with CLP"); #[cfg(feature = "highs")] { if model.check_solver_features::() { model - .run::(timestepper, &Default::default()) + .run::(&Default::default()) .expect("Failed to solve with Highs"); } } @@ -169,7 +177,7 @@ pub fn run_all_solvers(model: &Model, timestepper: &Timestepper) { { if model.check_multi_scenario_solver_features::>() { model - .run_multi_scenario::>(timestepper, &Default::default()) + .run_multi_scenario::>(&Default::default()) .expect("Failed to solve with SIMD IPM"); } } @@ -178,7 +186,7 @@ pub fn run_all_solvers(model: &Model, timestepper: &Timestepper) { { if model.check_multi_scenario_solver_features::() { model - .run_multi_scenario::(timestepper, &Default::default()) + .run_multi_scenario::(&Default::default()) .expect("Failed to solve with OpenCl IPM"); } } @@ -186,47 +194,46 @@ pub fn run_all_solvers(model: &Model, timestepper: &Timestepper) { /// Make a simple system with random inputs. fn make_simple_system( - model: &mut Model, + network: &mut Network, suffix: &str, num_timesteps: usize, + num_inflow_scenarios: usize, + inflow_scenario_group_index: usize, rng: &mut R, ) -> Result<(), PywrError> { - let input_idx = model.add_input_node("input", Some(suffix))?; - let link_idx = model.add_link_node("link", Some(suffix))?; - let output_idx = model.add_output_node("output", Some(suffix))?; - - model.connect_nodes(input_idx, link_idx)?; - model.connect_nodes(link_idx, output_idx)?; + let input_idx = network.add_input_node("input", Some(suffix))?; + let link_idx = network.add_link_node("link", Some(suffix))?; + let output_idx = network.add_output_node("output", Some(suffix))?; - let num_scenarios = model.get_scenario_group_size_by_name("test-scenario")?; - let scenario_group_index = model.get_scenario_group_index_by_name("test-scenario")?; + network.connect_nodes(input_idx, link_idx)?; + network.connect_nodes(link_idx, output_idx)?; let inflow_distr: Normal = Normal::new(9.0, 1.0).unwrap(); - let mut inflow = ndarray::Array2::zeros((num_timesteps, num_scenarios)); + let mut inflow = ndarray::Array2::zeros((num_timesteps, num_inflow_scenarios)); for x in inflow.iter_mut() { *x = inflow_distr.sample(rng).max(0.0); } - let inflow = Array2Parameter::new(&format!("inflow-{suffix}"), inflow, scenario_group_index, None); - let idx = model.add_parameter(Box::new(inflow))?; + let inflow = Array2Parameter::new(&format!("inflow-{suffix}"), inflow, inflow_scenario_group_index, None); + let idx = network.add_parameter(Box::new(inflow))?; - model.set_node_max_flow( + network.set_node_max_flow( "input", Some(suffix), ConstraintValue::Metric(Metric::ParameterValue(idx)), )?; let input_cost = rng.gen_range(-20.0..-5.00); - model.set_node_cost("input", Some(suffix), ConstraintValue::Scalar(input_cost))?; + network.set_node_cost("input", Some(suffix), ConstraintValue::Scalar(input_cost))?; let outflow_distr = Normal::new(8.0, 3.0).unwrap(); let mut outflow: f64 = outflow_distr.sample(rng); outflow = outflow.max(0.0); - model.set_node_max_flow("output", Some(suffix), ConstraintValue::Scalar(outflow))?; + network.set_node_max_flow("output", Some(suffix), ConstraintValue::Scalar(outflow))?; - model.set_node_cost("output", Some(suffix), ConstraintValue::Scalar(-500.0))?; + network.set_node_cost("output", Some(suffix), ConstraintValue::Scalar(-500.0))?; Ok(()) } @@ -235,7 +242,7 @@ fn make_simple_system( /// /// fn make_simple_connections( - model: &mut Model, + model: &mut Network, num_systems: usize, density: usize, rng: &mut R, @@ -276,20 +283,39 @@ fn make_simple_connections( pub fn make_random_model( num_systems: usize, density: usize, - num_timesteps: usize, num_scenarios: usize, rng: &mut R, ) -> Result { - let mut model = Model::default(); + let timestepper = Timestepper::new(date!(2020 - 01 - 01), date!(2020 - 04 - 09), 1); + + let mut scenario_collection = ScenarioGroupCollection::default(); + scenario_collection.add_group("test-scenario", num_scenarios); + + let domain = ModelDomain::from(timestepper, scenario_collection); - model.add_scenario_group("test-scenario", num_scenarios)?; + let inflow_scenario_group_index = domain + .scenarios() + .group_index("test-scenario") + .expect("Could not find scenario group."); + let (num_timesteps, num_inflow_scenarios) = domain.shape(); + + let mut network = Network::default(); for i in 0..num_systems { let suffix = format!("sys-{i:04}"); - make_simple_system(&mut model, &suffix, num_timesteps, rng)?; + make_simple_system( + &mut network, + &suffix, + num_timesteps, + num_inflow_scenarios, + inflow_scenario_group_index, + rng, + )?; } - make_simple_connections(&mut model, num_systems, density, rng)?; + make_simple_connections(&mut network, num_systems, density, rng)?; + + let model = Model::new(domain, network); Ok(model) } @@ -298,26 +324,23 @@ pub fn make_random_model( mod tests { use super::make_random_model; use crate::solvers::{SimdIpmF64Solver, SimdIpmSolverSettings}; - use crate::timestep::Timestepper; use rand::SeedableRng; use rand_chacha::ChaCha8Rng; - use time::macros::date; #[test] fn test_random_model() { let n_sys = 50; let density = 5; let n_sc = 12; - let timestepper = Timestepper::new(date!(2020 - 01 - 01), date!(2020 - 04 - 09), 1); // Make a consistent random number generator // ChaCha8 should be consistent across builds and platforms let mut rng = ChaCha8Rng::seed_from_u64(0); - let model = make_random_model(n_sys, density, timestepper.timesteps().len(), n_sc, &mut rng).unwrap(); + let model = make_random_model(n_sys, density, n_sc, &mut rng).unwrap(); let settings = SimdIpmSolverSettings::default(); model - .run_multi_scenario::>(×tepper, &settings) + .run_multi_scenario::>(&settings) .expect("Failed to run model!"); } } diff --git a/pywr-core/src/timestep.rs b/pywr-core/src/timestep.rs index 942dc256..84d181ff 100644 --- a/pywr-core/src/timestep.rs +++ b/pywr-core/src/timestep.rs @@ -63,7 +63,7 @@ impl Timestepper { } /// Create a vector of `Timestep`s between the start and end dates at the given duration. - pub fn timesteps(&self) -> Vec { + fn timesteps(&self) -> Vec { let mut timesteps: Vec = Vec::new(); let mut current = Timestep::new(self.start, 0, self.timestep); @@ -75,3 +75,22 @@ impl Timestepper { timesteps } } + +/// The time domain that a model will be simulated over. +pub struct TimeDomain { + timesteps: Vec, +} + +impl TimeDomain { + pub fn timesteps(&self) -> &[Timestep] { + &self.timesteps + } +} + +impl From for TimeDomain { + fn from(value: Timestepper) -> Self { + Self { + timesteps: value.timesteps(), + } + } +} diff --git a/pywr-core/src/virtual_storage.rs b/pywr-core/src/virtual_storage.rs index d16b5063..d78760c9 100644 --- a/pywr-core/src/virtual_storage.rs +++ b/pywr-core/src/virtual_storage.rs @@ -1,4 +1,4 @@ -use crate::model::Model; +use crate::network::Network; use crate::node::{ConstraintValue, FlowConstraints, NodeMeta, StorageConstraints, StorageInitialVolume}; use crate::state::{State, VirtualStorageState}; use crate::timestep::Timestep; @@ -144,15 +144,15 @@ impl VirtualStorage { VirtualStorageState::new(0.0) } - pub fn get_cost(&self, model: &Model, state: &State) -> Result { + pub fn get_cost(&self, network: &Network, state: &State) -> Result { match &self.cost { ConstraintValue::None => Ok(0.0), ConstraintValue::Scalar(v) => Ok(*v), - ConstraintValue::Metric(m) => m.get_value(model, state), + ConstraintValue::Metric(m) => m.get_value(network, state), } } - pub fn before(&self, timestep: &Timestep, model: &Model, state: &mut State) -> Result<(), PywrError> { + pub fn before(&self, timestep: &Timestep, network: &Network, state: &mut State) -> Result<(), PywrError> { let do_reset = if timestep.is_first() { // Set the initial volume if it is the first timestep. true @@ -181,7 +181,7 @@ impl VirtualStorage { let volume = match &self.initial_volume { StorageInitialVolume::Absolute(iv) => *iv, StorageInitialVolume::Proportional(ipc) => { - let max_volume = self.get_max_volume(model, state)?; + let max_volume = self.get_max_volume(network, state)?; max_volume * ipc } }; @@ -202,15 +202,15 @@ impl VirtualStorage { .map(|factors| self.nodes.iter().zip(factors.iter()).map(|(n, f)| (*n, *f)).collect()) } - pub fn get_min_volume(&self, model: &Model, state: &State) -> Result { + pub fn get_min_volume(&self, model: &Network, state: &State) -> Result { self.storage_constraints.get_min_volume(model, state) } - pub fn get_max_volume(&self, model: &Model, state: &State) -> Result { + pub fn get_max_volume(&self, model: &Network, state: &State) -> Result { self.storage_constraints.get_max_volume(model, state) } - pub fn get_current_available_volume_bounds(&self, model: &Model, state: &State) -> Result<(f64, f64), PywrError> { + pub fn get_current_available_volume_bounds(&self, model: &Network, state: &State) -> Result<(f64, f64), PywrError> { let min_vol = self.get_min_volume(model, state)?; let max_vol = self.get_max_volume(model, state)?; @@ -230,11 +230,11 @@ fn months_since_last_reset(current: &Date, last_reset: &Date) -> i32 { #[cfg(test)] mod tests { use crate::metric::Metric; - use crate::model::Model; + use crate::models::Model; + use crate::network::Network; use crate::node::{ConstraintValue, StorageInitialVolume}; use crate::recorders::{AssertionFnRecorder, AssertionRecorder}; use crate::scenario::ScenarioIndex; - use crate::solvers::{ClpSolver, ClpSolverSettings}; use crate::test_utils::{default_timestepper, run_all_solvers, simple_model}; use crate::timestep::Timestep; use crate::virtual_storage::{months_since_last_reset, VirtualStorageReset}; @@ -265,25 +265,23 @@ mod tests { /// Test the virtual storage constraints #[test] fn test_basic_virtual_storage() { - let mut model = Model::default(); - let timestepper = default_timestepper(); + let mut network = Network::default(); - let input_node = model.add_input_node("input", None).unwrap(); - let link_node0 = model.add_link_node("link", Some("0")).unwrap(); - let output_node0 = model.add_output_node("output", Some("0")).unwrap(); + let input_node = network.add_input_node("input", None).unwrap(); + let link_node0 = network.add_link_node("link", Some("0")).unwrap(); + let output_node0 = network.add_output_node("output", Some("0")).unwrap(); - model.connect_nodes(input_node, link_node0).unwrap(); - model.connect_nodes(link_node0, output_node0).unwrap(); + network.connect_nodes(input_node, link_node0).unwrap(); + network.connect_nodes(link_node0, output_node0).unwrap(); - let link_node1 = model.add_link_node("link", Some("1")).unwrap(); - let output_node1 = model.add_output_node("output", Some("1")).unwrap(); + let link_node1 = network.add_link_node("link", Some("1")).unwrap(); + let output_node1 = network.add_output_node("output", Some("1")).unwrap(); - model.connect_nodes(input_node, link_node1).unwrap(); - model.connect_nodes(link_node1, output_node1).unwrap(); + network.connect_nodes(input_node, link_node1).unwrap(); + network.connect_nodes(link_node1, output_node1).unwrap(); // Virtual storage with contributions from link-node0 than link-node1 - - let _vs = model.add_virtual_storage_node( + let _vs = network.add_virtual_storage_node( "virtual-storage", None, &[link_node0, link_node1], @@ -297,7 +295,7 @@ mod tests { // Setup a demand on output-0 and output-1 for sub_name in &["0", "1"] { - let output_node = model.get_mut_node_by_name("output", Some(sub_name)).unwrap(); + let output_node = network.get_mut_node_by_name("output", Some(sub_name)).unwrap(); output_node .set_max_flow_constraint(ConstraintValue::Scalar(10.0)) .unwrap(); @@ -308,7 +306,7 @@ mod tests { // 30 per day. // TODO assert let expected_vol = |ts: &Timestep, _si| (70.0 - ts.index as f64 * 30.0).max(0.0); // Set-up assertion for "link" node - let idx = model.get_node_by_name("link", Some("0")).unwrap().index(); + let idx = network.get_node_by_name("link", Some("0")).unwrap().index(); let expected = |ts: &Timestep, _si: &ScenarioIndex| { if ts.index < 3 { 10.0 @@ -317,10 +315,10 @@ mod tests { } }; let recorder = AssertionFnRecorder::new("link-0-flow", Metric::NodeOutFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); // Set-up assertion for "input" node - let idx = model.get_node_by_name("link", Some("1")).unwrap().index(); + let idx = network.get_node_by_name("link", Some("1")).unwrap().index(); let expected = |ts: &Timestep, _si: &ScenarioIndex| { if ts.index < 4 { 10.0 @@ -329,21 +327,23 @@ mod tests { } }; let recorder = AssertionFnRecorder::new("link-1-flow", Metric::NodeOutFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); + let model = Model::new(default_timestepper().into(), network); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } #[test] /// Test virtual storage node costs fn test_virtual_storage_node_costs() { let mut model = simple_model(1); + let network = model.network_mut(); let timestepper = default_timestepper(); - let nodes = vec![model.get_node_index_by_name("input", None).unwrap()]; + let nodes = vec![network.get_node_index_by_name("input", None).unwrap()]; // Virtual storage node cost is high enough to prevent any flow - model + network .add_virtual_storage_node( "vs", None, @@ -358,11 +358,11 @@ mod tests { .unwrap(); let expected = Array::zeros((366, 1)); - let idx = model.get_node_by_name("output", None).unwrap().index(); + let idx = network.get_node_by_name("output", None).unwrap().index(); let recorder = AssertionRecorder::new("output-flow", Metric::NodeInFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } } diff --git a/pywr-python/src/lib.rs b/pywr-python/src/lib.rs index fd88f96c..e1e87835 100644 --- a/pywr-python/src/lib.rs +++ b/pywr-python/src/lib.rs @@ -4,9 +4,7 @@ use crate::solvers::{ClIpmF32Solver, ClIpmF64Solver, ClIpmSolverSettings}; use crate::solvers::{HighsSolver, HighsSolverSettings}; use pyo3::exceptions::PyException; use pyo3::prelude::*; -use pywr_core::model::Model; use pywr_core::solvers::{ClpSolver, ClpSolverSettings}; -use pywr_core::timestep::Timestepper; use pywr_core::tracing::setup_tracing; use pywr_core::ParameterNotFoundError; use pywr_schema::PywrModel; @@ -676,19 +674,19 @@ fn run_model_from_string( // TODO handle the serde error properly let schema_v2: PywrModel = serde_json::from_str(data.as_str()).unwrap(); - let (mut model, timestepper): (Model, Timestepper) = schema_v2.build_model(path.as_deref(), None)?; + let model = schema_v2.build_model(path.as_deref(), None)?; let nt = num_threads.unwrap_or(1); py.allow_threads(|| { match solver_name.as_str() { - "clp" => model.run::(×tepper, &ClpSolverSettings::default()), + "clp" => model.run::(&ClpSolverSettings::default()), #[cfg(feature = "highs")] - "highs" => model.run::(×tepper, &HighsSolverSettings::default()), + "highs" => model.run::(&HighsSolverSettings::default()), #[cfg(feature = "ipm-ocl")] - "clipm-f32" => model.run_multi_scenario::(×tepper, &ClIpmSolverSettings::default()), + "clipm-f32" => model.run_multi_scenario::(&ClIpmSolverSettings::default()), #[cfg(feature = "ipm-ocl")] - "clipm-f64" => model.run_multi_scenario::(×tepper, &ClIpmSolverSettings::default()), + "clipm-f64" => model.run_multi_scenario::(&ClIpmSolverSettings::default()), _ => panic!("Solver {solver_name} not recognised."), } .unwrap(); diff --git a/pywr-schema/src/error.rs b/pywr-schema/src/error.rs index 5c99e541..a3d0359c 100644 --- a/pywr-schema/src/error.rs +++ b/pywr-schema/src/error.rs @@ -13,6 +13,8 @@ pub enum SchemaError { NodeNotFound(String), #[error("parameter {0} not found")] ParameterNotFound(String), + #[error("network {0} not found")] + NetworkNotFound(String), #[error("missing initial volume for node: {0}")] MissingInitialVolume(String), #[error("Pywr core error: {0}")] @@ -43,6 +45,10 @@ pub enum SchemaError { DataLengthMismatch { expected: usize, found: usize }, #[error("Failed to estimate epsilon for use in the radial basis function.")] RbfEpsilonEstimation, + #[error("Scenario group with name {0} not found")] + ScenarioGroupNotFound(String), + #[error("Inter-network transfer with name {0} not found")] + InterNetworkTransferNotFound(String), } impl From for PyErr { diff --git a/pywr-schema/src/metric_sets/mod.rs b/pywr-schema/src/metric_sets/mod.rs index 02d2c631..12542a43 100644 --- a/pywr-schema/src/metric_sets/mod.rs +++ b/pywr-schema/src/metric_sets/mod.rs @@ -1,5 +1,5 @@ use crate::error::SchemaError; -use crate::PywrModel; +use crate::model::PywrNetwork; use serde::{Deserialize, Serialize}; /// @@ -12,8 +12,8 @@ pub enum OutputMetric { impl OutputMetric { fn try_clone_into_metric( &self, - model: &pywr_core::model::Model, - schema: &PywrModel, + network: &pywr_core::network::Network, + schema: &PywrNetwork, ) -> Result { match self { OutputMetric::NodeName(node_name) => { @@ -22,7 +22,7 @@ impl OutputMetric { .get_node_by_name(node_name) .ok_or_else(|| SchemaError::NodeNotFound(node_name.to_string()))?; // Create and return the node's default metric - node.default_metric(model) + node.default_metric(network) } } } @@ -36,15 +36,19 @@ pub struct MetricSet { } impl MetricSet { - pub fn add_to_model(&self, model: &mut pywr_core::model::Model, schema: &PywrModel) -> Result<(), SchemaError> { + pub fn add_to_model( + &self, + network: &mut pywr_core::network::Network, + schema: &PywrNetwork, + ) -> Result<(), SchemaError> { // Convert the schema representation to internal metrics. let metrics: Vec = self .metrics .iter() - .map(|m| m.try_clone_into_metric(model, schema)) + .map(|m| m.try_clone_into_metric(network, schema)) .collect::>()?; let metric_set = pywr_core::recorders::MetricSet::new(&self.name, None, metrics); - let _ = model.add_metric_set(metric_set)?; + let _ = network.add_metric_set(metric_set)?; Ok(()) } diff --git a/pywr-schema/src/model.rs b/pywr-schema/src/model.rs index aab67f15..b67dc898 100644 --- a/pywr-schema/src/model.rs +++ b/pywr-schema/src/model.rs @@ -5,9 +5,10 @@ use crate::data_tables::{DataTable, LoadedTableCollection}; use crate::error::{ConversionError, SchemaError}; use crate::metric_sets::MetricSet; use crate::outputs::Output; -use crate::parameters::TryIntoV2Parameter; +use crate::parameters::{MetricFloatReference, TryIntoV2Parameter}; +use pywr_core::models::ModelDomain; use pywr_core::PywrError; -use std::path::Path; +use std::path::{Path, PathBuf}; use time::Date; #[derive(serde::Deserialize, serde::Serialize, Clone)] @@ -85,10 +86,7 @@ pub struct Scenario { } #[derive(serde::Deserialize, serde::Serialize, Clone)] -pub struct PywrModel { - pub metadata: Metadata, - pub timestepper: Timestepper, - pub scenarios: Option>, +pub struct PywrNetwork { pub nodes: Vec, pub edges: Vec, pub parameters: Option>, @@ -97,7 +95,7 @@ pub struct PywrModel { pub outputs: Option>, } -impl PywrModel { +impl PywrNetwork { pub fn from_path>(path: P) -> Result { let data = std::fs::read_to_string(path).map_err(|e| SchemaError::IO(e.to_string()))?; Ok(serde_json::from_str(data.as_str())?) @@ -129,18 +127,14 @@ impl PywrModel { } } - pub fn build_model( + pub fn build_network( &self, + domain: &ModelDomain, data_path: Option<&Path>, output_path: Option<&Path>, - ) -> Result<(pywr_core::model::Model, pywr_core::timestep::Timestepper), SchemaError> { - let mut model = pywr_core::model::Model::default(); - - if let Some(scenarios) = &self.scenarios { - for scenario in scenarios { - model.add_scenario_group(&scenario.name, scenario.size)?; - } - } + inter_network_transfers: &[PywrMultiNetworkTransfer], + ) -> Result { + let mut network = pywr_core::network::Network::default(); // Load all the data tables let tables = LoadedTableCollection::from_schema(self.tables.as_deref(), data_path)?; @@ -152,7 +146,7 @@ impl PywrModel { let mut failed_nodes: Vec = Vec::new(); let n = remaining_nodes.len(); for node in remaining_nodes.into_iter() { - if let Err(e) = node.add_to_model(&mut model, &tables, data_path) { + if let Err(e) = node.add_to_model(&mut network, &domain, &tables, data_path, inter_network_transfers) { // Adding the node failed! match e { SchemaError::PywrCore(core_err) => match core_err { @@ -190,9 +184,9 @@ impl PywrModel { for from_connector in from_node.output_connectors(from_slot) { for to_connector in to_node.input_connectors() { let from_node_index = - model.get_node_index_by_name(from_connector.0, from_connector.1.as_deref())?; - let to_node_index = model.get_node_index_by_name(to_connector.0, to_connector.1.as_deref())?; - model.connect_nodes(from_node_index, to_node_index)?; + network.get_node_index_by_name(from_connector.0, from_connector.1.as_deref())?; + let to_node_index = network.get_node_index_by_name(to_connector.0, to_connector.1.as_deref())?; + network.connect_nodes(from_node_index, to_node_index)?; } } } @@ -203,7 +197,9 @@ impl PywrModel { let mut failed_parameters: Vec = Vec::new(); let n = remaining_parameters.len(); for parameter in remaining_parameters.into_iter() { - if let Err(e) = parameter.add_to_model(&mut model, &tables, data_path) { + if let Err(e) = + parameter.add_to_model(&mut network, &domain, &tables, data_path, inter_network_transfers) + { // Adding the parameter failed! match e { SchemaError::PywrCore(core_err) => match core_err { @@ -229,26 +225,95 @@ impl PywrModel { // Apply the inline parameters & constraints to the nodes for node in &self.nodes { - node.set_constraints(&mut model, &tables, data_path)?; + node.set_constraints(&mut network, &domain, &tables, data_path, inter_network_transfers)?; } // Create all of the metric sets if let Some(metric_sets) = &self.metric_sets { for metric_set in metric_sets { - metric_set.add_to_model(&mut model, self)?; + metric_set.add_to_model(&mut network, self)?; } } // Create all of the outputs if let Some(outputs) = &self.outputs { for output in outputs { - output.add_to_model(&mut model, output_path)?; + output.add_to_model(&mut network, output_path)?; } } + Ok(network) + } +} + +#[derive(serde::Deserialize, serde::Serialize, Clone)] +#[serde(untagged)] +pub enum PywrNetworkRef { + Path(PathBuf), + Inline(PywrNetwork), +} + +/// The top-level schema for a Pywr model. +/// +/// A Pywr model is defined by this top-level schema which is mostly conveniently loaded from a +/// JSON file. The schema is used to "build" a [`pywr_core::models::Model`] which can then be +/// "run" to produce results. The purpose of the schema is to provide a higher level and more +/// user friendly interface to model definition than the core model itself. This allows +/// abstractions, such as [`crate::nodes::WaterTreatmentWorks`], to be created and used in the +/// schema without the user needing to know the details of how this is implemented in the core +/// model. +/// +/// +/// # Example +/// +/// The simplest model is given in the example below: +/// +/// ```json +#[doc = include_str!("test_models/simple1.json")] +/// ``` +/// +/// +/// +#[derive(serde::Deserialize, serde::Serialize, Clone)] +pub struct PywrModel { + pub metadata: Metadata, + pub timestepper: Timestepper, + pub scenarios: Option>, + pub network: PywrNetwork, +} + +impl PywrModel { + pub fn from_path>(path: P) -> Result { + let data = std::fs::read_to_string(path).map_err(|e| SchemaError::IO(e.to_string()))?; + Ok(serde_json::from_str(data.as_str())?) + } + + pub fn from_str(data: &str) -> Result { + Ok(serde_json::from_str(data)?) + } + + pub fn build_model( + &self, + data_path: Option<&Path>, + output_path: Option<&Path>, + ) -> Result { let timestepper = self.timestepper.clone().into(); - Ok((model, timestepper)) + let mut scenario_collection = pywr_core::scenario::ScenarioGroupCollection::default(); + + if let Some(scenarios) = &self.scenarios { + for scenario in scenarios { + scenario_collection.add_group(&scenario.name, scenario.size); + } + } + + let domain = ModelDomain::from(timestepper, scenario_collection); + + let network = self.network.build_network(&domain, data_path, output_path, &[])?; + + let model = pywr_core::models::Model::new(domain, network); + + Ok(model) } } @@ -283,32 +348,195 @@ impl TryFrom for PywrModel { let tables = None; let outputs = None; let metric_sets = None; - - Ok(Self { - metadata, - timestepper, - scenarios: None, + let network = PywrNetwork { nodes, edges, parameters, tables, metric_sets, outputs, + }; + + Ok(Self { + metadata, + timestepper, + scenarios: None, + network, }) } } +#[derive(serde::Deserialize, serde::Serialize, Clone)] +pub struct PywrMultiNetworkTransfer { + pub from_network: String, + pub metric: MetricFloatReference, + pub name: String, + pub initial_value: Option, +} + +#[derive(serde::Deserialize, serde::Serialize, Clone)] +pub struct PywrMultiNetworkEntry { + pub name: String, + pub network: PywrNetworkRef, + pub transfers: Vec, +} + +/// A Pywr model containing multiple link networks. +/// +/// This schema is used to define a model containing multiple linked networks. Each network +/// is self-contained and solved as like a single a model. However, the networks can be linked +/// together using [`PywrMultiNetworkTransfer`]s. These transfers allow the value of a metric +/// in one network to be used as the value of a parameter in another network. This allows complex +/// inter-model relationships to be defined. +/// +/// The model is solved by iterating over the networks within each time-step. Inter-network +/// transfers are updated between each network solve. The networks are solved in the order +/// that they are defined. This means that the order of the networks is important. For example, +/// the 1st network will only be able to use the previous time-step's state from other networks. +/// Whereas the 2nd network can use metrics calculated in the current time-step of the 1st model. +/// +/// The overall algorithm produces an single model run with interleaved solving of each network. +/// The pseudo-code for the algorithm is: +/// +/// ```text +/// for time_step in time_steps { +/// for network in networks { +/// // Get the latest values from the other networks +/// network.update_inter_network_transfers(); +/// // Solve this network's allocation routine / linear program +/// network.solve(); +/// } +/// } +/// ``` +/// +/// # When to use +/// +/// A [`PywrMultiNetworkModel`] should be used in cases where there is a strong separation between +/// the networks being simulated. The allocation routine (linear program) of each network is solved +/// independently each time-step. This means that the only way in which the networks can share +/// information and data is between the linear program solves via the user defined transfers. +/// +/// Configuring a model like this maybe be beneficial in the following cases: +/// 1. Represent separate systems with limited and/or prescribed connectivity. For example, +/// linking networks from two suppliers connected by a strategic transfer. +/// 2. Have important validated behaviour of the allocation that should be retained. If the +/// networks (linear programs) were combined into a single model, the allocation routine could +/// produce different results (i.e. penalty costs from one model influencing another). +/// 2. Are very large and/or complex to control model run times. The run time of a +/// [`PywrMultiNetworkModel`] is roughly the sum of the individual networks. Whereas the time +/// solve a large linear program combining all the networks could be significantly longer. +/// +/// # Example +/// +/// The following example shows a model with networks with the inflow to "supply2" in the second +/// network defined as the flow to "demand1" in the first network. +/// +/// ```json5 +/// // model.json +#[doc = include_str!("test_models/multi1/model.json")] +/// // network1.json +#[doc = include_str!("test_models/multi1/network1.json")] +/// // network2.json +#[doc = include_str!("test_models/multi1/network2.json")] +/// ``` +/// +/// +/// +#[derive(serde::Deserialize, serde::Serialize, Clone)] +pub struct PywrMultiNetworkModel { + pub metadata: Metadata, + pub timestepper: Timestepper, + pub scenarios: Option>, + pub networks: Vec, +} + +impl PywrMultiNetworkModel { + pub fn from_path>(path: P) -> Result { + let data = std::fs::read_to_string(path).map_err(|e| SchemaError::IO(e.to_string()))?; + Ok(serde_json::from_str(data.as_str())?) + } + + pub fn from_str(data: &str) -> Result { + Ok(serde_json::from_str(data)?) + } + + pub fn build_model( + &self, + data_path: Option<&Path>, + output_path: Option<&Path>, + ) -> Result { + let timestepper = self.timestepper.clone().into(); + + let mut scenario_collection = pywr_core::scenario::ScenarioGroupCollection::default(); + + if let Some(scenarios) = &self.scenarios { + for scenario in scenarios { + scenario_collection.add_group(&scenario.name, scenario.size); + } + } + + let domain = ModelDomain::from(timestepper, scenario_collection); + let mut model = pywr_core::models::MultiNetworkModel::new(domain); + + // First load all the networks + // These will contain any parameters that are referenced by the inter-model transfers + // Because of potential circular references, we need to load all the networks first. + for network_entry in &self.networks { + // Load the network itself + let network = match &network_entry.network { + PywrNetworkRef::Path(path) => { + let pth = if let Some(dp) = data_path { + if path.is_relative() { + dp.join(path) + } else { + path.clone() + } + } else { + path.clone() + }; + + let network_schema = PywrNetwork::from_path(pth)?; + network_schema.build_network(model.domain(), data_path, output_path, &network_entry.transfers)? + } + PywrNetworkRef::Inline(network_schema) => { + network_schema.build_network(model.domain(), data_path, output_path, &network_entry.transfers)? + } + }; + + model.add_network(&network_entry.name, network); + } + + // Now load the inter-model transfers + for (to_network_idx, network_entry) in self.networks.iter().enumerate() { + for transfer in &network_entry.transfers { + let from_network_idx = model.get_network_index_by_name(&transfer.from_network)?; + + // Load the metric from the "from" network + let from_network = model.network_mut(from_network_idx)?; + // The transfer metric will fail to load if it is defined as an inter-model transfer itself. + let from_metric = transfer.metric.load(from_network, &[])?; + + model.add_inter_network_transfer(from_network_idx, from_metric, to_network_idx, transfer.initial_value); + } + } + + Ok(model) + } +} + #[cfg(test)] mod tests { - use super::PywrModel; + use super::{PywrModel, PywrMultiNetworkModel}; use crate::parameters::{ - AggFunc, AggregatedParameter, ConstantParameter, ConstantValue, DynamicFloatValue, MetricFloatValue, Parameter, - ParameterMeta, + AggFunc, AggregatedParameter, ConstantParameter, ConstantValue, DynamicFloatValue, MetricFloatReference, + MetricFloatValue, Parameter, ParameterMeta, }; use ndarray::{Array1, Array2, Axis}; use pywr_core::metric::Metric; use pywr_core::recorders::AssertionRecorder; + use pywr_core::solvers::ClpSolver; use pywr_core::test_utils::run_all_solvers; + use std::path::PathBuf; fn model_str() -> &'static str { include_str!("./test_models/simple1.json") @@ -319,20 +547,21 @@ mod tests { let data = model_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - assert_eq!(schema.nodes.len(), 3); - assert_eq!(schema.edges.len(), 2); + assert_eq!(schema.network.nodes.len(), 3); + assert_eq!(schema.network.edges.len(), 2); } #[test] fn test_simple1_run() { let data = model_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - let (mut model, timestepper) = schema.build_model(None, None).unwrap(); + let mut model = schema.build_model(None, None).unwrap(); - assert_eq!(model.nodes.len(), 3); - assert_eq!(model.edges.len(), 2); + let network = model.network_mut(); + assert_eq!(network.nodes().len(), 3); + assert_eq!(network.edges().len(), 2); - let demand1_idx = model.get_node_index_by_name("demand1", None).unwrap(); + let demand1_idx = network.get_node_index_by_name("demand1", None).unwrap(); let expected_values: Array1 = [10.0; 365].to_vec().into(); let expected_values: Array2 = expected_values.insert_axis(Axis(1)); @@ -344,10 +573,10 @@ mod tests { None, None, ); - model.add_recorder(Box::new(rec)).unwrap(); + network.add_recorder(Box::new(rec)).unwrap(); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } /// Test that a cycle in parameter dependencies does not load. @@ -357,7 +586,7 @@ mod tests { let mut schema: PywrModel = serde_json::from_str(data).unwrap(); // Add additional parameters for the test - if let Some(parameters) = &mut schema.parameters { + if let Some(parameters) = &mut schema.network.parameters { parameters.extend(vec![ Parameter::Aggregated(AggregatedParameter { meta: ParameterMeta { @@ -366,14 +595,14 @@ mod tests { }, agg_func: AggFunc::Sum, metrics: vec![ - DynamicFloatValue::Dynamic(MetricFloatValue::Parameter { + DynamicFloatValue::Dynamic(MetricFloatValue::Reference(MetricFloatReference::Parameter { name: "p1".to_string(), key: None, - }), - DynamicFloatValue::Dynamic(MetricFloatValue::Parameter { + })), + DynamicFloatValue::Dynamic(MetricFloatValue::Reference(MetricFloatReference::Parameter { name: "agg2".to_string(), key: None, - }), + })), ], }), Parameter::Constant(ConstantParameter { @@ -391,14 +620,14 @@ mod tests { }, agg_func: AggFunc::Sum, metrics: vec![ - DynamicFloatValue::Dynamic(MetricFloatValue::Parameter { + DynamicFloatValue::Dynamic(MetricFloatValue::Reference(MetricFloatReference::Parameter { name: "p1".to_string(), key: None, - }), - DynamicFloatValue::Dynamic(MetricFloatValue::Parameter { + })), + DynamicFloatValue::Dynamic(MetricFloatValue::Reference(MetricFloatReference::Parameter { name: "agg1".to_string(), key: None, - }), + })), ], }), ]); @@ -414,7 +643,7 @@ mod tests { let data = model_str(); let mut schema: PywrModel = serde_json::from_str(data).unwrap(); - if let Some(parameters) = &mut schema.parameters { + if let Some(parameters) = &mut schema.network.parameters { parameters.extend(vec![ Parameter::Aggregated(AggregatedParameter { meta: ParameterMeta { @@ -423,14 +652,14 @@ mod tests { }, agg_func: AggFunc::Sum, metrics: vec![ - DynamicFloatValue::Dynamic(MetricFloatValue::Parameter { + DynamicFloatValue::Dynamic(MetricFloatValue::Reference(MetricFloatReference::Parameter { name: "p1".to_string(), key: None, - }), - DynamicFloatValue::Dynamic(MetricFloatValue::Parameter { + })), + DynamicFloatValue::Dynamic(MetricFloatValue::Reference(MetricFloatReference::Parameter { name: "p2".to_string(), key: None, - }), + })), ], }), Parameter::Constant(ConstantParameter { @@ -455,4 +684,105 @@ mod tests { let build_result = schema.build_model(None, None); assert!(build_result.is_ok()); } + + /// Test the multi1 model + #[test] + fn test_multi1_model() { + let mut model_fn = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + model_fn.push("src/test_models/multi1/model.json"); + + let schema = PywrMultiNetworkModel::from_path(model_fn.as_path()).unwrap(); + let mut model = schema.build_model(model_fn.parent(), None).unwrap(); + + // Add some recorders for the expected outputs + let network_1_idx = model + .get_network_index_by_name("network1") + .expect("network 1 not found"); + let network_1 = model.network_mut(network_1_idx).expect("network 1 not found"); + let demand1_idx = network_1.get_node_index_by_name("demand1", None).unwrap(); + + let expected_values: Array1 = [10.0; 365].to_vec().into(); + let expected_values: Array2 = expected_values.insert_axis(Axis(1)); + + let rec = AssertionRecorder::new( + "assert-demand1", + Metric::NodeInFlow(demand1_idx), + expected_values, + None, + None, + ); + network_1.add_recorder(Box::new(rec)).unwrap(); + + // Inflow to demand2 should be 10.0 via the transfer from network1 (demand1) + let network_2_idx = model + .get_network_index_by_name("network2") + .expect("network 1 not found"); + let network_2 = model.network_mut(network_2_idx).expect("network 2 not found"); + let demand1_idx = network_2.get_node_index_by_name("demand2", None).unwrap(); + + let expected_values: Array1 = [10.0; 365].to_vec().into(); + let expected_values: Array2 = expected_values.insert_axis(Axis(1)); + + let rec = AssertionRecorder::new( + "assert-demand2", + Metric::NodeInFlow(demand1_idx), + expected_values, + None, + None, + ); + network_2.add_recorder(Box::new(rec)).unwrap(); + + model.run::(&Default::default()).unwrap(); + } + + /// Test the multi2 model + #[test] + fn test_multi2_model() { + let mut model_fn = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + model_fn.push("src/test_models/multi2/model.json"); + + let schema = PywrMultiNetworkModel::from_path(model_fn.as_path()).unwrap(); + let mut model = schema.build_model(model_fn.parent(), None).unwrap(); + + // Add some recorders for the expected outputs + // inflow1 should be set to a max of 20.0 from the "demand" parameter in network2 + let network_1_idx = model + .get_network_index_by_name("network1") + .expect("network 1 not found"); + let network_1 = model.network_mut(network_1_idx).expect("network 1 not found"); + let demand1_idx = network_1.get_node_index_by_name("demand1", None).unwrap(); + + let expected_values: Array1 = [10.0; 365].to_vec().into(); + let expected_values: Array2 = expected_values.insert_axis(Axis(1)); + + let rec = AssertionRecorder::new( + "assert-demand1", + Metric::NodeInFlow(demand1_idx), + expected_values, + None, + None, + ); + network_1.add_recorder(Box::new(rec)).unwrap(); + + // Inflow to demand2 should be 10.0 via the transfer from network1 (demand1) + let network_2_idx = model + .get_network_index_by_name("network2") + .expect("network 1 not found"); + let network_2 = model.network_mut(network_2_idx).expect("network 2 not found"); + let demand1_idx = network_2.get_node_index_by_name("demand2", None).unwrap(); + + let expected_values: Array1 = [10.0; 365].to_vec().into(); + let expected_values: Array2 = expected_values.insert_axis(Axis(1)); + + let rec = AssertionRecorder::new( + "assert-demand2", + Metric::NodeInFlow(demand1_idx), + expected_values, + None, + None, + ); + network_2.add_recorder(Box::new(rec)).unwrap(); + + model.run::(&Default::default()).unwrap(); + } } diff --git a/pywr-schema/src/nodes/annual_virtual_storage.rs b/pywr-schema/src/nodes/annual_virtual_storage.rs index 3b11b08b..919afd3d 100644 --- a/pywr-schema/src/nodes/annual_virtual_storage.rs +++ b/pywr-schema/src/nodes/annual_virtual_storage.rs @@ -1,8 +1,10 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::nodes::NodeMeta; use crate::parameters::{DynamicFloatValue, TryIntoV2Parameter}; use pywr_core::metric::Metric; +use pywr_core::models::ModelDomain; use pywr_core::node::{ConstraintValue, StorageInitialVolume}; use pywr_core::virtual_storage::VirtualStorageReset; use pywr_v1_schema::nodes::AnnualVirtualStorageNode as AnnualVirtualStorageNodeV1; @@ -42,9 +44,11 @@ pub struct AnnualVirtualStorageNode { impl AnnualVirtualStorageNode { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { let initial_volume = if let Some(iv) = self.initial_volume { StorageInitialVolume::Absolute(iv) @@ -55,24 +59,30 @@ impl AnnualVirtualStorageNode { }; let cost = match &self.cost { - Some(v) => v.load(model, tables, data_path)?.into(), + Some(v) => v + .load(network, domain, tables, data_path, inter_network_transfers)? + .into(), None => ConstraintValue::Scalar(0.0), }; let min_volume = match &self.min_volume { - Some(v) => v.load(model, tables, data_path)?.into(), + Some(v) => v + .load(network, domain, tables, data_path, inter_network_transfers)? + .into(), None => ConstraintValue::Scalar(0.0), }; let max_volume = match &self.max_volume { - Some(v) => v.load(model, tables, data_path)?.into(), + Some(v) => v + .load(network, domain, tables, data_path, inter_network_transfers)? + .into(), None => ConstraintValue::None, }; let node_idxs = self .nodes .iter() - .map(|name| model.get_node_index_by_name(name.as_str(), None)) + .map(|name| network.get_node_index_by_name(name.as_str(), None)) .collect::, _>>()?; let reset = VirtualStorageReset::DayOfYear { @@ -80,7 +90,7 @@ impl AnnualVirtualStorageNode { month: self.reset.month, }; - model.add_virtual_storage_node( + network.add_virtual_storage_node( self.meta.name.as_str(), None, node_idxs.as_ref(), @@ -102,8 +112,8 @@ impl AnnualVirtualStorageNode { vec![] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_virtual_storage_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_virtual_storage_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::VirtualStorageVolume(idx)) } } diff --git a/pywr-schema/src/nodes/core.rs b/pywr-schema/src/nodes/core.rs index 4b1579e3..fb5a24f6 100644 --- a/pywr-schema/src/nodes/core.rs +++ b/pywr-schema/src/nodes/core.rs @@ -1,8 +1,10 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::nodes::NodeMeta; use crate::parameters::{DynamicFloatValue, TryIntoV2Parameter}; use pywr_core::metric::Metric; +use pywr_core::models::ModelDomain; use pywr_core::node::{ConstraintValue, StorageInitialVolume}; use pywr_v1_schema::nodes::{ AggregatedNode as AggregatedNodeV1, AggregatedStorageNode as AggregatedStorageNodeV1, @@ -37,30 +39,32 @@ impl InputNode { attributes } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { - model.add_input_node(self.meta.name.as_str(), None)?; + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { + network.add_input_node(self.meta.name.as_str(), None)?; Ok(()) } pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { if let Some(cost) = &self.cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost(self.meta.name.as_str(), None, value.into())?; + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost(self.meta.name.as_str(), None, value.into())?; } if let Some(max_flow) = &self.max_flow { - let value = max_flow.load(model, tables, data_path)?; - model.set_node_max_flow(self.meta.name.as_str(), None, value.into())?; + let value = max_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_max_flow(self.meta.name.as_str(), None, value.into())?; } if let Some(min_flow) = &self.min_flow { - let value = min_flow.load(model, tables, data_path)?; - model.set_node_min_flow(self.meta.name.as_str(), None, value.into())?; + let value = min_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_min_flow(self.meta.name.as_str(), None, value.into())?; } Ok(()) @@ -73,8 +77,8 @@ impl InputNode { vec![(self.meta.name.as_str(), None)] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::NodeOutFlow(idx)) } } @@ -135,30 +139,32 @@ impl LinkNode { attributes } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { - model.add_link_node(self.meta.name.as_str(), None)?; + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { + network.add_link_node(self.meta.name.as_str(), None)?; Ok(()) } pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { if let Some(cost) = &self.cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost(self.meta.name.as_str(), None, value.into())?; + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost(self.meta.name.as_str(), None, value.into())?; } if let Some(max_flow) = &self.max_flow { - let value = max_flow.load(model, tables, data_path)?; - model.set_node_max_flow(self.meta.name.as_str(), None, value.into())?; + let value = max_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_max_flow(self.meta.name.as_str(), None, value.into())?; } if let Some(min_flow) = &self.min_flow { - let value = min_flow.load(model, tables, data_path)?; - model.set_node_min_flow(self.meta.name.as_str(), None, value.into())?; + let value = min_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_min_flow(self.meta.name.as_str(), None, value.into())?; } Ok(()) @@ -171,8 +177,8 @@ impl LinkNode { vec![(self.meta.name.as_str(), None)] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::NodeOutFlow(idx)) } } @@ -232,30 +238,32 @@ impl OutputNode { attributes } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { - model.add_output_node(self.meta.name.as_str(), None)?; + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { + network.add_output_node(self.meta.name.as_str(), None)?; Ok(()) } pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { if let Some(cost) = &self.cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost(self.meta.name.as_str(), None, value.into())?; + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost(self.meta.name.as_str(), None, value.into())?; } if let Some(max_flow) = &self.max_flow { - let value = max_flow.load(model, tables, data_path)?; - model.set_node_max_flow(self.meta.name.as_str(), None, value.into())?; + let value = max_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_max_flow(self.meta.name.as_str(), None, value.into())?; } if let Some(min_flow) = &self.min_flow { - let value = min_flow.load(model, tables, data_path)?; - model.set_node_min_flow(self.meta.name.as_str(), None, value.into())?; + let value = min_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_min_flow(self.meta.name.as_str(), None, value.into())?; } Ok(()) @@ -269,8 +277,8 @@ impl OutputNode { vec![(self.meta.name.as_str(), None)] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::NodeInFlow(idx)) } } @@ -334,9 +342,11 @@ impl StorageNode { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { let initial_volume = if let Some(iv) = self.initial_volume { StorageInitialVolume::Absolute(iv) @@ -347,28 +357,34 @@ impl StorageNode { }; let min_volume = match &self.min_volume { - Some(v) => v.load(model, tables, data_path)?.into(), + Some(v) => v + .load(network, domain, tables, data_path, inter_network_transfers)? + .into(), None => ConstraintValue::Scalar(0.0), }; let max_volume = match &self.max_volume { - Some(v) => v.load(model, tables, data_path)?.into(), + Some(v) => v + .load(network, domain, tables, data_path, inter_network_transfers)? + .into(), None => ConstraintValue::None, }; - model.add_storage_node(self.meta.name.as_str(), None, initial_volume, min_volume, max_volume)?; + network.add_storage_node(self.meta.name.as_str(), None, initial_volume, min_volume, max_volume)?; Ok(()) } pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { if let Some(cost) = &self.cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost(self.meta.name.as_str(), None, value.into())?; + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost(self.meta.name.as_str(), None, value.into())?; } Ok(()) @@ -382,8 +398,8 @@ impl StorageNode { vec![(self.meta.name.as_str(), None)] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::NodeVolume(idx)) } } @@ -474,7 +490,7 @@ impl TryFrom for StorageNode { #[doc = svgbobdoc::transform!( /// This is used to represent a catchment inflow. /// -/// Catchment nodes create a single [`crate::node::InputNode`] node in the model, but +/// Catchment nodes create a single [`crate::node::InputNode`] node in the network, but /// ensure that the maximum and minimum flow are equal to [`Self::flow`]. /// /// ```svgbob @@ -492,26 +508,28 @@ pub struct CatchmentNode { } impl CatchmentNode { - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { - model.add_input_node(self.meta.name.as_str(), None)?; + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { + network.add_input_node(self.meta.name.as_str(), None)?; Ok(()) } pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { if let Some(cost) = &self.cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost(self.meta.name.as_str(), None, value.into())?; + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost(self.meta.name.as_str(), None, value.into())?; } if let Some(flow) = &self.flow { - let value = flow.load(model, tables, data_path)?; - model.set_node_min_flow(self.meta.name.as_str(), None, value.clone().into())?; - model.set_node_max_flow(self.meta.name.as_str(), None, value.into())?; + let value = flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_min_flow(self.meta.name.as_str(), None, value.clone().into())?; + network.set_node_max_flow(self.meta.name.as_str(), None, value.into())?; } Ok(()) @@ -525,8 +543,8 @@ impl CatchmentNode { vec![(self.meta.name.as_str(), None)] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::NodeOutFlow(idx)) } } @@ -570,33 +588,35 @@ pub struct AggregatedNode { } impl AggregatedNode { - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { let nodes = self .nodes .iter() - .map(|name| model.get_node_index_by_name(name, None)) + .map(|name| network.get_node_index_by_name(name, None)) .collect::, _>>()?; // We initialise with no factors, but will update them in the `set_constraints` method // once all the parameters are loaded. - model.add_aggregated_node(self.meta.name.as_str(), None, nodes.as_slice(), None)?; + network.add_aggregated_node(self.meta.name.as_str(), None, nodes.as_slice(), None)?; Ok(()) } pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { if let Some(max_flow) = &self.max_flow { - let value = max_flow.load(model, tables, data_path)?; - model.set_aggregated_node_max_flow(self.meta.name.as_str(), None, value.into())?; + let value = max_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_aggregated_node_max_flow(self.meta.name.as_str(), None, value.into())?; } if let Some(min_flow) = &self.min_flow { - let value = min_flow.load(model, tables, data_path)?; - model.set_aggregated_node_min_flow(self.meta.name.as_str(), None, value.into())?; + let value = min_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_aggregated_node_min_flow(self.meta.name.as_str(), None, value.into())?; } if let Some(factors) = &self.factors { @@ -604,18 +624,18 @@ impl AggregatedNode { Factors::Proportion { factors } => pywr_core::aggregated_node::Factors::Proportion( factors .iter() - .map(|f| f.load(model, tables, data_path)) + .map(|f| f.load(network, domain, tables, data_path, inter_network_transfers)) .collect::, _>>()?, ), Factors::Ratio { factors } => pywr_core::aggregated_node::Factors::Ratio( factors .iter() - .map(|f| f.load(model, tables, data_path)) + .map(|f| f.load(network, domain, tables, data_path, inter_network_transfers)) .collect::, _>>()?, ), }; - model.set_aggregated_node_factors(self.meta.name.as_str(), None, Some(f))?; + network.set_aggregated_node_factors(self.meta.name.as_str(), None, Some(f))?; } Ok(()) @@ -632,8 +652,8 @@ impl AggregatedNode { vec![] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_aggregated_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_aggregated_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::AggregatedNodeOutFlow(idx)) } } @@ -684,14 +704,14 @@ pub struct AggregatedStorageNode { } impl AggregatedStorageNode { - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { let nodes = self .storage_nodes .iter() - .map(|name| model.get_node_index_by_name(name, None)) + .map(|name| network.get_node_index_by_name(name, None)) .collect::>()?; - model.add_aggregated_storage_node(self.meta.name.as_str(), None, nodes)?; + network.add_aggregated_storage_node(self.meta.name.as_str(), None, nodes)?; Ok(()) } @@ -706,8 +726,8 @@ impl AggregatedStorageNode { vec![] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_aggregated_storage_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_aggregated_storage_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::AggregatedNodeVolume(idx)) } } diff --git a/pywr-schema/src/nodes/delay.rs b/pywr-schema/src/nodes/delay.rs index b71b3ecf..d89283f5 100644 --- a/pywr-schema/src/nodes/delay.rs +++ b/pywr-schema/src/nodes/delay.rs @@ -41,29 +41,29 @@ impl DelayNode { Some("outflow") } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { - model.add_output_node(self.meta.name.as_str(), Self::output_sub_name())?; - model.add_input_node(self.meta.name.as_str(), Self::input_sub_now())?; + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { + network.add_output_node(self.meta.name.as_str(), Self::output_sub_name())?; + network.add_input_node(self.meta.name.as_str(), Self::input_sub_now())?; Ok(()) } pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, tables: &LoadedTableCollection, ) -> Result<(), SchemaError> { // Create the delay parameter let name = format!("{}-delay", self.meta.name.as_str()); - let output_idx = model.get_node_index_by_name(self.meta.name.as_str(), Self::output_sub_name())?; + let output_idx = network.get_node_index_by_name(self.meta.name.as_str(), Self::output_sub_name())?; let metric = Metric::NodeInFlow(output_idx); let p = pywr_core::parameters::DelayParameter::new(&name, metric, self.delay, self.initial_value.load(tables)?); - let delay_idx = model.add_parameter(Box::new(p))?; + let delay_idx = network.add_parameter(Box::new(p))?; // Apply it as a constraint on the input node. let metric = Metric::ParameterValue(delay_idx); - model.set_node_max_flow(self.meta.name.as_str(), Self::input_sub_now(), metric.clone().into())?; - model.set_node_min_flow(self.meta.name.as_str(), Self::input_sub_now(), metric.into())?; + network.set_node_max_flow(self.meta.name.as_str(), Self::input_sub_now(), metric.clone().into())?; + network.set_node_min_flow(self.meta.name.as_str(), Self::input_sub_now(), metric.into())?; Ok(()) } @@ -78,8 +78,8 @@ impl DelayNode { vec![(self.meta.name.as_str(), Self::input_sub_now().map(|s| s.to_string()))] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_node_index_by_name(self.meta.name.as_str(), Self::input_sub_now().as_deref())?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_node_index_by_name(self.meta.name.as_str(), Self::input_sub_now().as_deref())?; Ok(Metric::NodeOutFlow(idx)) } } @@ -122,7 +122,6 @@ mod tests { use pywr_core::metric::Metric; use pywr_core::recorders::AssertionRecorder; use pywr_core::test_utils::run_all_solvers; - use pywr_core::timestep::Timestepper; fn model_str() -> &'static str { include_str!("../test_models/delay1.json") @@ -132,27 +131,28 @@ mod tests { fn test_model_run() { let data = model_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - let (mut model, timestepper): (pywr_core::model::Model, Timestepper) = schema.build_model(None, None).unwrap(); + let mut model: pywr_core::models::Model = schema.build_model(None, None).unwrap(); - assert_eq!(model.nodes.len(), 4); - assert_eq!(model.edges.len(), 2); + let network = model.network_mut(); + assert_eq!(network.nodes().len(), 4); + assert_eq!(network.edges().len(), 2); // TODO put this assertion data in the test model file. - let idx = model.get_node_by_name("link1", Some("inflow")).unwrap().index(); + let idx = network.get_node_by_name("link1", Some("inflow")).unwrap().index(); let expected = Array2::from_elem((366, 1), 15.0); let recorder = AssertionRecorder::new("link1-inflow", Metric::NodeInFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); - let idx = model.get_node_by_name("link1", Some("outflow")).unwrap().index(); + let idx = network.get_node_by_name("link1", Some("outflow")).unwrap().index(); let expected = concatenate![ Axis(0), Array2::from_elem((3, 1), 0.0), Array2::from_elem((363, 1), 15.0) ]; let recorder = AssertionRecorder::new("link1-outflow", Metric::NodeOutFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } } diff --git a/pywr-schema/src/nodes/loss_link.rs b/pywr-schema/src/nodes/loss_link.rs index 499a62e2..01ae8676 100644 --- a/pywr-schema/src/nodes/loss_link.rs +++ b/pywr-schema/src/nodes/loss_link.rs @@ -1,8 +1,10 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::nodes::NodeMeta; use crate::parameters::{DynamicFloatValue, TryIntoV2Parameter}; use pywr_core::metric::Metric; +use pywr_core::models::ModelDomain; use pywr_v1_schema::nodes::LossLinkNode as LossLinkNodeV1; use std::path::Path; @@ -42,11 +44,11 @@ impl LossLinkNode { Some("net") } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { - model.add_link_node(self.meta.name.as_str(), Self::net_sub_name())?; - // TODO make the loss node configurable (i.e. it could be a link if a model wanted to use the loss) + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { + network.add_link_node(self.meta.name.as_str(), Self::net_sub_name())?; + // TODO make the loss node configurable (i.e. it could be a link if a network wanted to use the loss) // The above would need to support slots in the connections. - model.add_output_node(self.meta.name.as_str(), Self::loss_sub_name())?; + network.add_output_node(self.meta.name.as_str(), Self::loss_sub_name())?; // TODO add the aggregated node that actually does the losses! Ok(()) @@ -54,23 +56,25 @@ impl LossLinkNode { pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { if let Some(cost) = &self.net_cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; } if let Some(max_flow) = &self.max_net_flow { - let value = max_flow.load(model, tables, data_path)?; - model.set_node_max_flow(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; + let value = max_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_max_flow(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; } if let Some(min_flow) = &self.min_net_flow { - let value = min_flow.load(model, tables, data_path)?; - model.set_node_min_flow(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; + let value = min_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_min_flow(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; } Ok(()) @@ -89,8 +93,8 @@ impl LossLinkNode { vec![(self.meta.name.as_str(), Self::net_sub_name().map(|s| s.to_string()))] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_node_index_by_name(self.meta.name.as_str(), Self::net_sub_name().as_deref())?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_node_index_by_name(self.meta.name.as_str(), Self::net_sub_name().as_deref())?; Ok(Metric::NodeOutFlow(idx)) } } diff --git a/pywr-schema/src/nodes/mod.rs b/pywr-schema/src/nodes/mod.rs index d71b4997..1f52247b 100644 --- a/pywr-schema/src/nodes/mod.rs +++ b/pywr-schema/src/nodes/mod.rs @@ -13,19 +13,20 @@ mod water_treatment_works; use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::{PywrMultiNetworkTransfer, PywrNetwork}; pub use crate::nodes::core::{ AggregatedNode, AggregatedStorageNode, CatchmentNode, InputNode, LinkNode, OutputNode, StorageNode, }; pub use crate::nodes::delay::DelayNode; pub use crate::nodes::river::RiverNode; use crate::parameters::DynamicFloatValue; -use crate::PywrModel; pub use annual_virtual_storage::AnnualVirtualStorageNode; pub use loss_link::LossLinkNode; pub use monthly_virtual_storage::MonthlyVirtualStorageNode; pub use piecewise_link::{PiecewiseLinkNode, PiecewiseLinkStep}; pub use piecewise_storage::PiecewiseStorageNode; use pywr_core::metric::Metric; +use pywr_core::models::ModelDomain; use pywr_v1_schema::nodes::{ CoreNode as CoreNodeV1, Node as NodeV1, NodeMeta as NodeMetaV1, NodePosition as NodePositionV1, }; @@ -124,11 +125,11 @@ impl NodeBuilder { } /// Create the next default name without duplicating an existing name in the model. - pub fn next_default_name_for_model(mut self, model: &PywrModel) -> Self { + pub fn next_default_name_for_model(mut self, network: &PywrNetwork) -> Self { let mut num = 1; loop { let name = format!("{}-{}", self.ty.to_string(), num); - if model.get_node_by_name(&name).is_none() { + if network.get_node_by_name(&name).is_none() { // No node with this name found! self.name = Some(name); break; @@ -312,56 +313,68 @@ impl Node { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { match self { - Node::Input(n) => n.add_to_model(model), - Node::Link(n) => n.add_to_model(model), - Node::Output(n) => n.add_to_model(model), - Node::Storage(n) => n.add_to_model(model, tables, data_path), - Node::Catchment(n) => n.add_to_model(model), - Node::RiverGauge(n) => n.add_to_model(model), - Node::LossLink(n) => n.add_to_model(model), - Node::River(n) => n.add_to_model(model), - Node::RiverSplitWithGauge(n) => n.add_to_model(model), - Node::WaterTreatmentWorks(n) => n.add_to_model(model), - Node::Aggregated(n) => n.add_to_model(model), - Node::AggregatedStorage(n) => n.add_to_model(model), - Node::VirtualStorage(n) => n.add_to_model(model, tables, data_path), - Node::AnnualVirtualStorage(n) => n.add_to_model(model, tables, data_path), - Node::PiecewiseLink(n) => n.add_to_model(model), - Node::PiecewiseStorage(n) => n.add_to_model(model, tables, data_path), - Node::Delay(n) => n.add_to_model(model), - Node::MonthlyVirtualStorage(n) => n.add_to_model(model, tables, data_path), + Node::Input(n) => n.add_to_model(network), + Node::Link(n) => n.add_to_model(network), + Node::Output(n) => n.add_to_model(network), + Node::Storage(n) => n.add_to_model(network, domain, tables, data_path, inter_network_transfers), + Node::Catchment(n) => n.add_to_model(network), + Node::RiverGauge(n) => n.add_to_model(network), + Node::LossLink(n) => n.add_to_model(network), + Node::River(n) => n.add_to_model(network), + Node::RiverSplitWithGauge(n) => n.add_to_model(network), + Node::WaterTreatmentWorks(n) => n.add_to_model(network), + Node::Aggregated(n) => n.add_to_model(network), + Node::AggregatedStorage(n) => n.add_to_model(network), + Node::VirtualStorage(n) => n.add_to_model(network, domain, tables, data_path, inter_network_transfers), + Node::AnnualVirtualStorage(n) => { + n.add_to_model(network, domain, tables, data_path, inter_network_transfers) + } + Node::PiecewiseLink(n) => n.add_to_model(network), + Node::PiecewiseStorage(n) => n.add_to_model(network, domain, tables, data_path, inter_network_transfers), + Node::Delay(n) => n.add_to_model(network), + Node::MonthlyVirtualStorage(n) => { + n.add_to_model(network, domain, tables, data_path, inter_network_transfers) + } } } pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { match self { - Node::Input(n) => n.set_constraints(model, tables, data_path), - Node::Link(n) => n.set_constraints(model, tables, data_path), - Node::Output(n) => n.set_constraints(model, tables, data_path), - Node::Storage(n) => n.set_constraints(model, tables, data_path), - Node::Catchment(n) => n.set_constraints(model, tables, data_path), - Node::RiverGauge(n) => n.set_constraints(model, tables, data_path), - Node::LossLink(n) => n.set_constraints(model, tables, data_path), + Node::Input(n) => n.set_constraints(network, domain, tables, data_path, inter_network_transfers), + Node::Link(n) => n.set_constraints(network, domain, tables, data_path, inter_network_transfers), + Node::Output(n) => n.set_constraints(network, domain, tables, data_path, inter_network_transfers), + Node::Storage(n) => n.set_constraints(network, domain, tables, data_path, inter_network_transfers), + Node::Catchment(n) => n.set_constraints(network, domain, tables, data_path, inter_network_transfers), + Node::RiverGauge(n) => n.set_constraints(network, domain, tables, data_path, inter_network_transfers), + Node::LossLink(n) => n.set_constraints(network, domain, tables, data_path, inter_network_transfers), Node::River(_) => Ok(()), // No constraints on river node - Node::RiverSplitWithGauge(n) => n.set_constraints(model, tables, data_path), - Node::WaterTreatmentWorks(n) => n.set_constraints(model, tables, data_path), - Node::Aggregated(n) => n.set_constraints(model, tables, data_path), + Node::RiverSplitWithGauge(n) => { + n.set_constraints(network, domain, tables, data_path, inter_network_transfers) + } + Node::WaterTreatmentWorks(n) => { + n.set_constraints(network, domain, tables, data_path, inter_network_transfers) + } + Node::Aggregated(n) => n.set_constraints(network, domain, tables, data_path, inter_network_transfers), Node::AggregatedStorage(_) => Ok(()), // No constraints on aggregated storage nodes. Node::VirtualStorage(_) => Ok(()), // TODO Node::AnnualVirtualStorage(_) => Ok(()), // TODO - Node::PiecewiseLink(n) => n.set_constraints(model, tables, data_path), - Node::PiecewiseStorage(n) => n.set_constraints(model, tables, data_path), - Node::Delay(n) => n.set_constraints(model, tables), + Node::PiecewiseLink(n) => n.set_constraints(network, domain, tables, data_path, inter_network_transfers), + Node::PiecewiseStorage(n) => n.set_constraints(network, domain, tables, data_path, inter_network_transfers), + Node::Delay(n) => n.set_constraints(network, tables), Node::MonthlyVirtualStorage(_) => Ok(()), // TODO } } @@ -415,26 +428,26 @@ impl Node { } /// Returns the default metric for this node. - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { match self { - Node::Input(n) => n.default_metric(model), - Node::Link(n) => n.default_metric(model), - Node::Output(n) => n.default_metric(model), - Node::Storage(n) => n.default_metric(model), - Node::Catchment(n) => n.default_metric(model), - Node::RiverGauge(n) => n.default_metric(model), - Node::LossLink(n) => n.default_metric(model), - Node::River(n) => n.default_metric(model), - Node::RiverSplitWithGauge(n) => n.default_metric(model), - Node::WaterTreatmentWorks(n) => n.default_metric(model), - Node::Aggregated(n) => n.default_metric(model), - Node::AggregatedStorage(n) => n.default_metric(model), - Node::VirtualStorage(n) => n.default_metric(model), - Node::AnnualVirtualStorage(n) => n.default_metric(model), - Node::MonthlyVirtualStorage(n) => n.default_metric(model), - Node::PiecewiseLink(n) => n.default_metric(model), - Node::Delay(n) => n.default_metric(model), - Node::PiecewiseStorage(n) => n.default_metric(model), + Node::Input(n) => n.default_metric(network), + Node::Link(n) => n.default_metric(network), + Node::Output(n) => n.default_metric(network), + Node::Storage(n) => n.default_metric(network), + Node::Catchment(n) => n.default_metric(network), + Node::RiverGauge(n) => n.default_metric(network), + Node::LossLink(n) => n.default_metric(network), + Node::River(n) => n.default_metric(network), + Node::RiverSplitWithGauge(n) => n.default_metric(network), + Node::WaterTreatmentWorks(n) => n.default_metric(network), + Node::Aggregated(n) => n.default_metric(network), + Node::AggregatedStorage(n) => n.default_metric(network), + Node::VirtualStorage(n) => n.default_metric(network), + Node::AnnualVirtualStorage(n) => n.default_metric(network), + Node::MonthlyVirtualStorage(n) => n.default_metric(network), + Node::PiecewiseLink(n) => n.default_metric(network), + Node::Delay(n) => n.default_metric(network), + Node::PiecewiseStorage(n) => n.default_metric(network), } } } diff --git a/pywr-schema/src/nodes/monthly_virtual_storage.rs b/pywr-schema/src/nodes/monthly_virtual_storage.rs index 2c9bf47f..3c34ce83 100644 --- a/pywr-schema/src/nodes/monthly_virtual_storage.rs +++ b/pywr-schema/src/nodes/monthly_virtual_storage.rs @@ -1,8 +1,10 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::nodes::NodeMeta; use crate::parameters::{DynamicFloatValue, TryIntoV2Parameter}; use pywr_core::metric::Metric; +use pywr_core::models::ModelDomain; use pywr_core::node::{ConstraintValue, StorageInitialVolume}; use pywr_core::virtual_storage::VirtualStorageReset; use pywr_v1_schema::nodes::MonthlyVirtualStorageNode as MonthlyVirtualStorageNodeV1; @@ -36,9 +38,11 @@ pub struct MonthlyVirtualStorageNode { impl MonthlyVirtualStorageNode { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { let initial_volume = if let Some(iv) = self.initial_volume { StorageInitialVolume::Absolute(iv) @@ -49,24 +53,30 @@ impl MonthlyVirtualStorageNode { }; let cost = match &self.cost { - Some(v) => v.load(model, tables, data_path)?.into(), + Some(v) => v + .load(network, domain, tables, data_path, inter_network_transfers)? + .into(), None => ConstraintValue::Scalar(0.0), }; let min_volume = match &self.min_volume { - Some(v) => v.load(model, tables, data_path)?.into(), + Some(v) => v + .load(network, domain, tables, data_path, inter_network_transfers)? + .into(), None => ConstraintValue::Scalar(0.0), }; let max_volume = match &self.max_volume { - Some(v) => v.load(model, tables, data_path)?.into(), + Some(v) => v + .load(network, domain, tables, data_path, inter_network_transfers)? + .into(), None => ConstraintValue::None, }; let node_idxs = self .nodes .iter() - .map(|name| model.get_node_index_by_name(name.as_str(), None)) + .map(|name| network.get_node_index_by_name(name.as_str(), None)) .collect::, _>>()?; let reset = VirtualStorageReset::NumberOfMonths { @@ -74,7 +84,7 @@ impl MonthlyVirtualStorageNode { }; // TODO this should be an annual virtual storage! - model.add_virtual_storage_node( + network.add_virtual_storage_node( self.meta.name.as_str(), None, node_idxs.as_ref(), @@ -96,8 +106,8 @@ impl MonthlyVirtualStorageNode { vec![] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_virtual_storage_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_virtual_storage_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::VirtualStorageVolume(idx)) } } diff --git a/pywr-schema/src/nodes/piecewise_link.rs b/pywr-schema/src/nodes/piecewise_link.rs index 2b141fff..599bc713 100644 --- a/pywr-schema/src/nodes/piecewise_link.rs +++ b/pywr-schema/src/nodes/piecewise_link.rs @@ -1,8 +1,10 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::nodes::NodeMeta; use crate::parameters::{DynamicFloatValue, TryIntoV2Parameter}; use pywr_core::metric::Metric; +use pywr_core::models::ModelDomain; use pywr_v1_schema::nodes::PiecewiseLinkNode as PiecewiseLinkNodeV1; use std::path::Path; @@ -47,36 +49,38 @@ impl PiecewiseLinkNode { Some(format!("step-{i:02}")) } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { // create a link node for each step for (i, _) in self.steps.iter().enumerate() { - model.add_link_node(self.meta.name.as_str(), Self::step_sub_name(i).as_deref())?; + network.add_link_node(self.meta.name.as_str(), Self::step_sub_name(i).as_deref())?; } Ok(()) } pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { for (i, step) in self.steps.iter().enumerate() { let sub_name = Self::step_sub_name(i); if let Some(cost) = &step.cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost(self.meta.name.as_str(), sub_name.as_deref(), value.into())?; + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost(self.meta.name.as_str(), sub_name.as_deref(), value.into())?; } if let Some(max_flow) = &step.max_flow { - let value = max_flow.load(model, tables, data_path)?; - model.set_node_max_flow(self.meta.name.as_str(), sub_name.as_deref(), value.into())?; + let value = max_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_max_flow(self.meta.name.as_str(), sub_name.as_deref(), value.into())?; } if let Some(min_flow) = &step.min_flow { - let value = min_flow.load(model, tables, data_path)?; - model.set_node_min_flow(self.meta.name.as_str(), sub_name.as_deref(), value.into())?; + let value = min_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_min_flow(self.meta.name.as_str(), sub_name.as_deref(), value.into())?; } } @@ -98,12 +102,12 @@ impl PiecewiseLinkNode { .collect() } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { let indices = self .steps .iter() .enumerate() - .map(|(i, _)| model.get_node_index_by_name(self.meta.name.as_str(), Self::step_sub_name(i).as_deref())) + .map(|(i, _)| network.get_node_index_by_name(self.meta.name.as_str(), Self::step_sub_name(i).as_deref())) .collect::, _>>()?; Ok(Metric::MultiNodeInFlow { @@ -159,7 +163,6 @@ mod tests { use pywr_core::metric::Metric; use pywr_core::recorders::AssertionRecorder; use pywr_core::test_utils::run_all_solvers; - use pywr_core::timestep::Timestepper; fn model_str() -> &'static str { include_str!("../test_models/piecewise_link1.json") @@ -169,28 +172,29 @@ mod tests { fn test_model_run() { let data = model_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - let (mut model, timestepper): (pywr_core::model::Model, Timestepper) = schema.build_model(None, None).unwrap(); + let mut model = schema.build_model(None, None).unwrap(); - assert_eq!(model.nodes.len(), 5); - assert_eq!(model.edges.len(), 6); + let network = model.network_mut(); + assert_eq!(network.nodes().len(), 5); + assert_eq!(network.edges().len(), 6); // TODO put this assertion data in the test model file. - let idx = model.get_node_by_name("link1", Some("step-00")).unwrap().index(); + let idx = network.get_node_by_name("link1", Some("step-00")).unwrap().index(); let expected = Array2::from_elem((366, 1), 1.0); let recorder = AssertionRecorder::new("link1-s0-flow", Metric::NodeOutFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); - let idx = model.get_node_by_name("link1", Some("step-01")).unwrap().index(); + let idx = network.get_node_by_name("link1", Some("step-01")).unwrap().index(); let expected = Array2::from_elem((366, 1), 3.0); let recorder = AssertionRecorder::new("link1-s0-flow", Metric::NodeOutFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); - let idx = model.get_node_by_name("link1", Some("step-02")).unwrap().index(); + let idx = network.get_node_by_name("link1", Some("step-02")).unwrap().index(); let expected = Array2::from_elem((366, 1), 0.0); let recorder = AssertionRecorder::new("link1-s0-flow", Metric::NodeOutFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } } diff --git a/pywr-schema/src/nodes/piecewise_storage.rs b/pywr-schema/src/nodes/piecewise_storage.rs index d9554fd0..75bf2e9b 100644 --- a/pywr-schema/src/nodes/piecewise_storage.rs +++ b/pywr-schema/src/nodes/piecewise_storage.rs @@ -1,8 +1,10 @@ use crate::data_tables::LoadedTableCollection; use crate::error::SchemaError; +use crate::model::PywrMultiNetworkTransfer; use crate::nodes::NodeMeta; use crate::parameters::DynamicFloatValue; use pywr_core::metric::Metric; +use pywr_core::models::ModelDomain; use pywr_core::node::{ConstraintValue, StorageInitialVolume}; use pywr_core::parameters::VolumeBetweenControlCurvesParameter; use std::path::Path; @@ -59,12 +61,16 @@ impl PiecewiseStorageNode { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { // These are the min and max volume of the overall node - let max_volume = self.max_volume.load(model, tables, data_path)?; + let max_volume = self + .max_volume + .load(network, domain, tables, data_path, inter_network_transfers)?; let mut store_node_indices = Vec::new(); @@ -73,12 +79,20 @@ impl PiecewiseStorageNode { // The volume of this step is the proportion between the last control curve // (or zero if first) and this control curve. let lower = if i > 0 { - Some(self.steps[i - 1].control_curve.load(model, tables, data_path)?) + Some(self.steps[i - 1].control_curve.load( + network, + domain, + tables, + data_path, + inter_network_transfers, + )?) } else { None }; - let upper = step.control_curve.load(model, tables, data_path)?; + let upper = step + .control_curve + .load(network, domain, tables, data_path, inter_network_transfers)?; let max_volume_parameter = VolumeBetweenControlCurvesParameter::new( format!("{}-{}-max-volume", self.meta.name, Self::step_sub_name(i).unwrap()).as_str(), @@ -86,7 +100,7 @@ impl PiecewiseStorageNode { Some(upper), lower, ); - let max_volume_parameter_idx = model.add_parameter(Box::new(max_volume_parameter))?; + let max_volume_parameter_idx = network.add_parameter(Box::new(max_volume_parameter))?; let max_volume = ConstraintValue::Metric(Metric::ParameterValue(max_volume_parameter_idx)); // Each store has min volume of zero @@ -94,7 +108,7 @@ impl PiecewiseStorageNode { // Assume each store is full to start with let initial_volume = StorageInitialVolume::Proportional(1.0); - let idx = model.add_storage_node( + let idx = network.add_storage_node( self.meta.name.as_str(), Self::step_sub_name(i).as_deref(), initial_volume, @@ -104,8 +118,8 @@ impl PiecewiseStorageNode { if let Some(prev_idx) = store_node_indices.last() { // There was a lower store; connect to it in both directions - model.connect_nodes(idx, *prev_idx)?; - model.connect_nodes(*prev_idx, idx)?; + network.connect_nodes(idx, *prev_idx)?; + network.connect_nodes(*prev_idx, idx)?; } store_node_indices.push(idx); @@ -113,7 +127,10 @@ impl PiecewiseStorageNode { // The volume of this store the remain proportion above the last control curve let lower = match self.steps.last() { - Some(step) => Some(step.control_curve.load(model, tables, data_path)?), + Some(step) => Some( + step.control_curve + .load(network, domain, tables, data_path, inter_network_transfers)?, + ), None => None, }; @@ -130,7 +147,7 @@ impl PiecewiseStorageNode { upper, lower, ); - let max_volume_parameter_idx = model.add_parameter(Box::new(max_volume_parameter))?; + let max_volume_parameter_idx = network.add_parameter(Box::new(max_volume_parameter))?; let max_volume = ConstraintValue::Metric(Metric::ParameterValue(max_volume_parameter_idx)); // Each store has min volume of zero @@ -139,7 +156,7 @@ impl PiecewiseStorageNode { let initial_volume = StorageInitialVolume::Proportional(1.0); // And one for the residual part above the less step - let idx = model.add_storage_node( + let idx = network.add_storage_node( self.meta.name.as_str(), Self::step_sub_name(self.steps.len()).as_deref(), initial_volume, @@ -149,30 +166,32 @@ impl PiecewiseStorageNode { if let Some(prev_idx) = store_node_indices.last() { // There was a lower store; connect to it in both directions - model.connect_nodes(idx, *prev_idx)?; - model.connect_nodes(*prev_idx, idx)?; + network.connect_nodes(idx, *prev_idx)?; + network.connect_nodes(*prev_idx, idx)?; } store_node_indices.push(idx); // Finally, add an aggregate storage node covering all the individual stores - model.add_aggregated_storage_node(self.meta.name.as_str(), None, store_node_indices)?; + network.add_aggregated_storage_node(self.meta.name.as_str(), None, store_node_indices)?; Ok(()) } pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { for (i, step) in self.steps.iter().enumerate() { let sub_name = Self::step_sub_name(i); if let Some(cost) = &step.cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost(self.meta.name.as_str(), sub_name.as_deref(), value.into())?; + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost(self.meta.name.as_str(), sub_name.as_deref(), value.into())?; } } @@ -186,8 +205,8 @@ impl PiecewiseStorageNode { vec![(self.meta.name.as_str(), Self::step_sub_name(self.steps.len()))] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_aggregated_storage_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_aggregated_storage_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::AggregatedNodeVolume(idx)) } } @@ -199,7 +218,6 @@ mod tests { use pywr_core::metric::{IndexMetric, Metric}; use pywr_core::recorders::{AssertionRecorder, IndexAssertionRecorder}; use pywr_core::test_utils::run_all_solvers; - use pywr_core::timestep::Timestepper; fn piecewise_storage1_str() -> &'static str { include_str!("../test_models/piecewise_storage1.json") @@ -214,13 +232,14 @@ mod tests { fn test_piecewise_storage1() { let data = piecewise_storage1_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - let (mut model, timestepper): (pywr_core::model::Model, Timestepper) = schema.build_model(None, None).unwrap(); + let mut model = schema.build_model(None, None).unwrap(); - assert_eq!(model.nodes.len(), 5); - assert_eq!(model.edges.len(), 6); + let network = model.network_mut(); + assert_eq!(network.nodes().len(), 5); + assert_eq!(network.edges().len(), 6); // TODO put this assertion data in the test model file. - let idx = model + let idx = network .get_aggregated_storage_node_index_by_name("storage1", None) .unwrap(); @@ -244,10 +263,10 @@ mod tests { None, None, ); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } /// Test running `piecewise_storage2.json` @@ -255,13 +274,14 @@ mod tests { fn test_piecewise_storage2() { let data = piecewise_storage2_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - let (mut model, timestepper): (pywr_core::model::Model, Timestepper) = schema.build_model(None, None).unwrap(); + let mut model = schema.build_model(None, None).unwrap(); - assert_eq!(model.nodes.len(), 5); - assert_eq!(model.edges.len(), 6); + let network = model.network_mut(); + assert_eq!(network.nodes().len(), 5); + assert_eq!(network.edges().len(), 6); // TODO put this assertion data in the test model file. - let idx = model + let idx = network .get_aggregated_storage_node_index_by_name("storage1", None) .unwrap(); @@ -307,9 +327,9 @@ mod tests { None, None, ); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); - let idx = model + let idx = network .get_index_parameter_index_by_name("storage1-drought-index") .unwrap(); @@ -318,9 +338,9 @@ mod tests { IndexMetric::IndexParameterValue(idx), expected_drought_index, ); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } } diff --git a/pywr-schema/src/nodes/river.rs b/pywr-schema/src/nodes/river.rs index fed80e7c..fd78a6c7 100644 --- a/pywr-schema/src/nodes/river.rs +++ b/pywr-schema/src/nodes/river.rs @@ -16,8 +16,8 @@ impl RiverNode { HashMap::new() } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { - model.add_link_node(self.meta.name.as_str(), None)?; + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { + network.add_link_node(self.meta.name.as_str(), None)?; Ok(()) } @@ -28,8 +28,8 @@ impl RiverNode { vec![(self.meta.name.as_str(), None)] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::NodeOutFlow(idx)) } } diff --git a/pywr-schema/src/nodes/river_gauge.rs b/pywr-schema/src/nodes/river_gauge.rs index dac0f0f7..ea61eb8c 100644 --- a/pywr-schema/src/nodes/river_gauge.rs +++ b/pywr-schema/src/nodes/river_gauge.rs @@ -1,8 +1,10 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::nodes::NodeMeta; use crate::parameters::{DynamicFloatValue, TryIntoV2Parameter}; use pywr_core::metric::Metric; +use pywr_core::models::ModelDomain; use pywr_v1_schema::nodes::RiverGaugeNode as RiverGaugeNodeV1; use std::path::Path; @@ -38,28 +40,30 @@ impl RiverGaugeNode { Some("bypass") } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { - model.add_link_node(self.meta.name.as_str(), Self::mrf_sub_name())?; - model.add_link_node(self.meta.name.as_str(), Self::bypass_sub_name())?; + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { + network.add_link_node(self.meta.name.as_str(), Self::mrf_sub_name())?; + network.add_link_node(self.meta.name.as_str(), Self::bypass_sub_name())?; Ok(()) } pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { // MRF applies as a maximum on the MRF node. if let Some(cost) = &self.mrf_cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost(self.meta.name.as_str(), Self::mrf_sub_name(), value.into())?; + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost(self.meta.name.as_str(), Self::mrf_sub_name(), value.into())?; } if let Some(mrf) = &self.mrf { - let value = mrf.load(model, tables, data_path)?; - model.set_node_max_flow(self.meta.name.as_str(), Self::mrf_sub_name(), value.into())?; + let value = mrf.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_max_flow(self.meta.name.as_str(), Self::mrf_sub_name(), value.into())?; } Ok(()) @@ -79,10 +83,10 @@ impl RiverGaugeNode { ] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { let indices = vec![ - model.get_node_index_by_name(self.meta.name.as_str(), Self::mrf_sub_name())?, - model.get_node_index_by_name(self.meta.name.as_str(), Self::bypass_sub_name())?, + network.get_node_index_by_name(self.meta.name.as_str(), Self::mrf_sub_name())?, + network.get_node_index_by_name(self.meta.name.as_str(), Self::bypass_sub_name())?, ]; Ok(Metric::MultiNodeInFlow { @@ -119,7 +123,6 @@ impl TryFrom for RiverGaugeNode { mod tests { use crate::model::PywrModel; use pywr_core::test_utils::run_all_solvers; - use pywr_core::timestep::Timestepper; fn model_str() -> &'static str { r#" @@ -134,43 +137,45 @@ mod tests { "end": "2015-12-31", "timestep": 1 }, - "nodes": [ - { - "name": "catchment1", - "type": "Catchment", - "flow": 15 - }, - { - "name": "gauge1", - "type": "RiverGauge", - "mrf": 5.0, - "mrf_cost": -20.0 - }, - { - "name": "term1", - "type": "Output" - }, - { - "name": "demand1", - "type": "Output", - "max_flow": 15.0, - "cost": -10 - } - ], - "edges": [ - { - "from_node": "catchment1", - "to_node": "gauge1" - }, - { - "from_node": "gauge1", - "to_node": "term1" - }, - { - "from_node": "gauge1", - "to_node": "demand1" - } - ] + "network": { + "nodes": [ + { + "name": "catchment1", + "type": "Catchment", + "flow": 15 + }, + { + "name": "gauge1", + "type": "RiverGauge", + "mrf": 5.0, + "mrf_cost": -20.0 + }, + { + "name": "term1", + "type": "Output" + }, + { + "name": "demand1", + "type": "Output", + "max_flow": 15.0, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "catchment1", + "to_node": "gauge1" + }, + { + "from_node": "gauge1", + "to_node": "term1" + }, + { + "from_node": "gauge1", + "to_node": "demand1" + } + ] + } } "# } @@ -180,21 +185,22 @@ mod tests { let data = model_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - assert_eq!(schema.nodes.len(), 4); - assert_eq!(schema.edges.len(), 3); + assert_eq!(schema.network.nodes.len(), 4); + assert_eq!(schema.network.edges.len(), 3); } #[test] fn test_model_run() { let data = model_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - let (model, timestepper): (pywr_core::model::Model, Timestepper) = schema.build_model(None, None).unwrap(); + let model = schema.build_model(None, None).unwrap(); - assert_eq!(model.nodes.len(), 5); - assert_eq!(model.edges.len(), 6); + let network = model.network(); + assert_eq!(network.nodes().len(), 5); + assert_eq!(network.edges().len(), 6); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); // TODO assert the results! } diff --git a/pywr-schema/src/nodes/river_split_with_gauge.rs b/pywr-schema/src/nodes/river_split_with_gauge.rs index cf0f4e83..53d62033 100644 --- a/pywr-schema/src/nodes/river_split_with_gauge.rs +++ b/pywr-schema/src/nodes/river_split_with_gauge.rs @@ -1,9 +1,11 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::nodes::NodeMeta; use crate::parameters::{DynamicFloatValue, TryIntoV2Parameter}; use pywr_core::aggregated_node::Factors; use pywr_core::metric::Metric; +use pywr_core::models::ModelDomain; use pywr_core::node::NodeIndex; use pywr_v1_schema::nodes::RiverSplitWithGaugeNode as RiverSplitWithGaugeNodeV1; use std::path::Path; @@ -55,17 +57,17 @@ impl RiverSplitWithGaugeNode { Some(format!("split-agg-{i}")) } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { // TODO do this properly - model.add_link_node(self.meta.name.as_str(), Self::mrf_sub_name())?; - let bypass_idx = model.add_link_node(self.meta.name.as_str(), Self::bypass_sub_name())?; + network.add_link_node(self.meta.name.as_str(), Self::mrf_sub_name())?; + let bypass_idx = network.add_link_node(self.meta.name.as_str(), Self::bypass_sub_name())?; for (i, _) in self.splits.iter().enumerate() { // Each split has a link node and an aggregated node to enforce the factors - let split_idx = model.add_link_node(self.meta.name.as_str(), Self::split_sub_name(i).as_deref())?; + let split_idx = network.add_link_node(self.meta.name.as_str(), Self::split_sub_name(i).as_deref())?; // The factors will be set during the `set_constraints` method - model.add_aggregated_node( + network.add_aggregated_node( self.meta.name.as_str(), Self::split_agg_sub_name(i).as_deref(), &[bypass_idx, split_idx], @@ -78,25 +80,33 @@ impl RiverSplitWithGaugeNode { pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { // MRF applies as a maximum on the MRF node. if let Some(cost) = &self.mrf_cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost(self.meta.name.as_str(), Self::mrf_sub_name(), value.into())?; + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost(self.meta.name.as_str(), Self::mrf_sub_name(), value.into())?; } if let Some(mrf) = &self.mrf { - let value = mrf.load(model, tables, data_path)?; - model.set_node_max_flow(self.meta.name.as_str(), Self::mrf_sub_name(), value.into())?; + let value = mrf.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_max_flow(self.meta.name.as_str(), Self::mrf_sub_name(), value.into())?; } for (i, (factor, _)) in self.splits.iter().enumerate() { // Set the factors for each split - let factors = Factors::Proportion(vec![factor.load(model, tables, data_path)?]); - model.set_aggregated_node_factors( + let factors = Factors::Proportion(vec![factor.load( + network, + domain, + tables, + data_path, + inter_network_transfers, + )?]); + network.set_aggregated_node_factors( self.meta.name.as_str(), Self::split_agg_sub_name(i).as_deref(), Some(factors), @@ -142,17 +152,17 @@ impl RiverSplitWithGaugeNode { } } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { let mut indices = vec![ - model.get_node_index_by_name(self.meta.name.as_str(), Self::mrf_sub_name())?, - model.get_node_index_by_name(self.meta.name.as_str(), Self::bypass_sub_name())?, + network.get_node_index_by_name(self.meta.name.as_str(), Self::mrf_sub_name())?, + network.get_node_index_by_name(self.meta.name.as_str(), Self::bypass_sub_name())?, ]; let split_idx: Vec = self .splits .iter() .enumerate() - .map(|(i, _)| model.get_node_index_by_name(self.meta.name.as_str(), Self::split_sub_name(i).as_deref())) + .map(|(i, _)| network.get_node_index_by_name(self.meta.name.as_str(), Self::split_sub_name(i).as_deref())) .collect::>()?; indices.extend(split_idx.into_iter()); @@ -209,7 +219,6 @@ impl TryFrom for RiverSplitWithGaugeNode { mod tests { use crate::model::PywrModel; use pywr_core::test_utils::run_all_solvers; - use pywr_core::timestep::Timestepper; fn model_str() -> &'static str { include_str!("../test_models/river_split_with_gauge1.json") @@ -220,21 +229,22 @@ mod tests { let data = model_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - assert_eq!(schema.nodes.len(), 4); - assert_eq!(schema.edges.len(), 3); + assert_eq!(schema.network.nodes.len(), 4); + assert_eq!(schema.network.edges.len(), 3); } #[test] fn test_model_run() { let data = model_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - let (model, timestepper): (pywr_core::model::Model, Timestepper) = schema.build_model(None, None).unwrap(); + let model = schema.build_model(None, None).unwrap(); - assert_eq!(model.nodes.len(), 5); - assert_eq!(model.edges.len(), 6); + let network = model.network(); + assert_eq!(network.nodes().len(), 5); + assert_eq!(network.edges().len(), 6); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); // TODO assert the results! } diff --git a/pywr-schema/src/nodes/virtual_storage.rs b/pywr-schema/src/nodes/virtual_storage.rs index 30da54cb..ccd31192 100644 --- a/pywr-schema/src/nodes/virtual_storage.rs +++ b/pywr-schema/src/nodes/virtual_storage.rs @@ -1,8 +1,10 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::nodes::NodeMeta; use crate::parameters::{DynamicFloatValue, TryIntoV2Parameter}; use pywr_core::metric::Metric; +use pywr_core::models::ModelDomain; use pywr_core::node::{ConstraintValue, StorageInitialVolume}; use pywr_core::virtual_storage::VirtualStorageReset; use pywr_v1_schema::nodes::VirtualStorageNode as VirtualStorageNodeV1; @@ -24,9 +26,11 @@ pub struct VirtualStorageNode { impl VirtualStorageNode { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { let initial_volume = if let Some(iv) = self.initial_volume { StorageInitialVolume::Absolute(iv) @@ -37,30 +41,36 @@ impl VirtualStorageNode { }; let cost = match &self.cost { - Some(v) => v.load(model, tables, data_path)?.into(), + Some(v) => v + .load(network, domain, tables, data_path, inter_network_transfers)? + .into(), None => ConstraintValue::Scalar(0.0), }; let min_volume = match &self.min_volume { - Some(v) => v.load(model, tables, data_path)?.into(), + Some(v) => v + .load(network, domain, tables, data_path, inter_network_transfers)? + .into(), None => ConstraintValue::Scalar(0.0), }; let max_volume = match &self.max_volume { - Some(v) => v.load(model, tables, data_path)?.into(), + Some(v) => v + .load(network, domain, tables, data_path, inter_network_transfers)? + .into(), None => ConstraintValue::None, }; let node_idxs = self .nodes .iter() - .map(|name| model.get_node_index_by_name(name.as_str(), None)) + .map(|name| network.get_node_index_by_name(name.as_str(), None)) .collect::, _>>()?; // Standard virtual storage node never resets. let reset = VirtualStorageReset::Never; - model.add_virtual_storage_node( + network.add_virtual_storage_node( self.meta.name.as_str(), None, &node_idxs, @@ -82,8 +92,8 @@ impl VirtualStorageNode { vec![] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_virtual_storage_node_index_by_name(self.meta.name.as_str(), None)?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_virtual_storage_node_index_by_name(self.meta.name.as_str(), None)?; Ok(Metric::VirtualStorageVolume(idx)) } } diff --git a/pywr-schema/src/nodes/water_treatment_works.rs b/pywr-schema/src/nodes/water_treatment_works.rs index df548ecc..ca96337a 100644 --- a/pywr-schema/src/nodes/water_treatment_works.rs +++ b/pywr-schema/src/nodes/water_treatment_works.rs @@ -1,10 +1,12 @@ use crate::data_tables::LoadedTableCollection; use crate::error::SchemaError; +use crate::model::PywrMultiNetworkTransfer; use crate::nodes::NodeMeta; use crate::parameters::DynamicFloatValue; use num::Zero; use pywr_core::aggregated_node::Factors; use pywr_core::metric::Metric; +use pywr_core::models::ModelDomain; use std::path::Path; #[doc = svgbobdoc::transform!( @@ -74,20 +76,20 @@ impl WaterTreatmentWorks { Some("net_above_soft_min_flow") } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result<(), SchemaError> { - let idx_net = model.add_link_node(self.meta.name.as_str(), Self::net_sub_name())?; - let idx_soft_min_flow = model.add_link_node(self.meta.name.as_str(), Self::net_soft_min_flow_sub_name())?; + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result<(), SchemaError> { + let idx_net = network.add_link_node(self.meta.name.as_str(), Self::net_sub_name())?; + let idx_soft_min_flow = network.add_link_node(self.meta.name.as_str(), Self::net_soft_min_flow_sub_name())?; let idx_above_soft_min_flow = - model.add_link_node(self.meta.name.as_str(), Self::net_above_soft_min_flow_sub_name())?; + network.add_link_node(self.meta.name.as_str(), Self::net_above_soft_min_flow_sub_name())?; // Create the internal connections - model.connect_nodes(idx_net, idx_soft_min_flow)?; - model.connect_nodes(idx_net, idx_above_soft_min_flow)?; + network.connect_nodes(idx_net, idx_soft_min_flow)?; + network.connect_nodes(idx_net, idx_above_soft_min_flow)?; if self.loss_factor.is_some() { - let idx_loss = model.add_output_node(self.meta.name.as_str(), Self::loss_sub_name())?; + let idx_loss = network.add_output_node(self.meta.name.as_str(), Self::loss_sub_name())?; // This aggregated node will contain the factors to enforce the loss - model.add_aggregated_node( + network.add_aggregated_node( self.meta.name.as_str(), Self::agg_sub_name(), &[idx_net, idx_loss], @@ -100,38 +102,40 @@ impl WaterTreatmentWorks { pub fn set_constraints( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result<(), SchemaError> { if let Some(cost) = &self.cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; } if let Some(max_flow) = &self.max_flow { - let value = max_flow.load(model, tables, data_path)?; - model.set_node_max_flow(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; + let value = max_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_max_flow(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; } if let Some(min_flow) = &self.min_flow { - let value = min_flow.load(model, tables, data_path)?; - model.set_node_min_flow(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; + let value = min_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_min_flow(self.meta.name.as_str(), Self::net_sub_name(), value.into())?; } // soft min flow constraints; This typically applies a negative cost upto a maximum // defined by the `soft_min_flow` if let Some(cost) = &self.soft_min_flow_cost { - let value = cost.load(model, tables, data_path)?; - model.set_node_cost( + let value = cost.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_cost( self.meta.name.as_str(), Self::net_soft_min_flow_sub_name(), value.into(), )?; } if let Some(min_flow) = &self.soft_min_flow { - let value = min_flow.load(model, tables, data_path)?; - model.set_node_max_flow( + let value = min_flow.load(network, domain, tables, data_path, inter_network_transfers)?; + network.set_node_max_flow( self.meta.name.as_str(), Self::net_soft_min_flow_sub_name(), value.into(), @@ -141,7 +145,7 @@ impl WaterTreatmentWorks { if let Some(loss_factor) = &self.loss_factor { // Handle the case where we a given a zero loss factor // The aggregated node does not support zero loss factors so filter them here. - let lf = match loss_factor.load(model, tables, data_path)? { + let lf = match loss_factor.load(network, domain, tables, data_path, inter_network_transfers)? { Metric::Constant(f) => { if f.is_zero() { None @@ -156,7 +160,7 @@ impl WaterTreatmentWorks { // Set the factors for the loss // TODO allow for configuring as proportion of gross. let factors = Factors::Ratio(vec![Metric::Constant(1.0), lf]); - model.set_aggregated_node_factors(self.meta.name.as_str(), Self::agg_sub_name(), Some(factors))?; + network.set_aggregated_node_factors(self.meta.name.as_str(), Self::agg_sub_name(), Some(factors))?; } } @@ -187,8 +191,8 @@ impl WaterTreatmentWorks { ] } - pub fn default_metric(&self, model: &pywr_core::model::Model) -> Result { - let idx = model.get_node_index_by_name(self.meta.name.as_str(), Self::net_sub_name().as_deref())?; + pub fn default_metric(&self, network: &pywr_core::network::Network) -> Result { + let idx = network.get_node_index_by_name(self.meta.name.as_str(), Self::net_sub_name().as_deref())?; Ok(Metric::NodeOutFlow(idx)) } } @@ -262,35 +266,37 @@ mod tests { "end": "2015-12-31", "timestep": 1 }, - "nodes": [ - { - "name": "input1", - "type": "Input", - "flow": 15 - }, - { - "name": "wtw1", - "type": "WaterTreatmentWorks", - "max_flow": 10.0, - "loss_factor": 0.1 - }, - { - "name": "demand1", - "type": "Output", - "max_flow": 15.0, - "cost": -10 - } - ], - "edges": [ - { - "from_node": "input1", - "to_node": "wtw1" - }, - { - "from_node": "wtw1", - "to_node": "demand1" - } - ] + "network": { + "nodes": [ + { + "name": "input1", + "type": "Input", + "flow": 15 + }, + { + "name": "wtw1", + "type": "WaterTreatmentWorks", + "max_flow": 10.0, + "loss_factor": 0.1 + }, + { + "name": "demand1", + "type": "Output", + "max_flow": 15.0, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "input1", + "to_node": "wtw1" + }, + { + "from_node": "wtw1", + "to_node": "demand1" + } + ] + } } "# } @@ -300,35 +306,36 @@ mod tests { let data = model_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - assert_eq!(schema.nodes.len(), 3); - assert_eq!(schema.edges.len(), 2); + assert_eq!(schema.network.nodes.len(), 3); + assert_eq!(schema.network.edges.len(), 2); } #[test] fn test_model_run() { let data = model_str(); let schema: PywrModel = serde_json::from_str(data).unwrap(); - let (mut model, timestepper) = schema.build_model(None, None).unwrap(); + let mut model = schema.build_model(None, None).unwrap(); - assert_eq!(model.nodes.len(), 6); - assert_eq!(model.edges.len(), 6); + let shape = model.domain().shape(); - let scenario_indices = model.get_scenario_indices(); + let network = model.network_mut(); + assert_eq!(network.nodes().len(), 6); + assert_eq!(network.edges().len(), 6); // Setup expected results // Set-up assertion for "input" node // TODO write some helper functions for adding these assertion recorders - let idx = model.get_node_by_name("input1", None).unwrap().index(); - let expected = Array2::from_elem((timestepper.timesteps().len(), scenario_indices.len()), 11.0); + let idx = network.get_node_by_name("input1", None).unwrap().index(); + let expected = Array2::from_elem(shape, 11.0); let recorder = AssertionRecorder::new("input-flow", Metric::NodeOutFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); - let idx = model.get_node_by_name("demand1", None).unwrap().index(); - let expected = Array2::from_elem((timestepper.timesteps().len(), scenario_indices.len()), 10.0); + let idx = network.get_node_by_name("demand1", None).unwrap().index(); + let expected = Array2::from_elem(shape, 10.0); let recorder = AssertionRecorder::new("demand-flow", Metric::NodeInFlow(idx), expected, None, None); - model.add_recorder(Box::new(recorder)).unwrap(); + network.add_recorder(Box::new(recorder)).unwrap(); // Test all solvers - run_all_solvers(&model, ×tepper); + run_all_solvers(&model); } } diff --git a/pywr-schema/src/outputs/csv.rs b/pywr-schema/src/outputs/csv.rs index 8853f00e..e6e7f409 100644 --- a/pywr-schema/src/outputs/csv.rs +++ b/pywr-schema/src/outputs/csv.rs @@ -12,7 +12,7 @@ pub struct CsvOutput { impl CsvOutput { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, output_path: Option<&Path>, ) -> Result<(), SchemaError> { let filename = match (output_path, self.filename.is_relative()) { @@ -20,10 +20,10 @@ impl CsvOutput { _ => self.filename.to_path_buf(), }; - let metric_set_idx = model.get_metric_set_index_by_name(&self.metric_set)?; + let metric_set_idx = network.get_metric_set_index_by_name(&self.metric_set)?; let recorder = CSVRecorder::new(&self.name, filename, metric_set_idx); - model.add_recorder(Box::new(recorder))?; + network.add_recorder(Box::new(recorder))?; Ok(()) } @@ -44,9 +44,9 @@ mod tests { let data = model_str(); let schema = PywrModel::from_str(data).unwrap(); - assert_eq!(schema.nodes.len(), 3); - assert_eq!(schema.edges.len(), 2); - assert!(schema.outputs.is_some_and(|o| o.len() == 1)); + assert_eq!(schema.network.nodes.len(), 3); + assert_eq!(schema.network.edges.len(), 2); + assert!(schema.network.outputs.is_some_and(|o| o.len() == 1)); } #[test] @@ -56,11 +56,9 @@ mod tests { let temp_dir = TempDir::new().unwrap(); - let (model, timestepper) = schema.build_model(None, Some(temp_dir.path())).unwrap(); + let model = schema.build_model(None, Some(temp_dir.path())).unwrap(); - model - .run::(×tepper, &ClpSolverSettings::default()) - .unwrap(); + model.run::(&ClpSolverSettings::default()).unwrap(); // After model run there should be an output file. let expected_path = temp_dir.path().join("outputs.csv"); diff --git a/pywr-schema/src/outputs/hdf.rs b/pywr-schema/src/outputs/hdf.rs index a48ab6fd..4cccc664 100644 --- a/pywr-schema/src/outputs/hdf.rs +++ b/pywr-schema/src/outputs/hdf.rs @@ -13,7 +13,7 @@ pub struct Hdf5Output { impl Hdf5Output { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, output_path: Option<&Path>, ) -> Result<(), SchemaError> { let filename = match (output_path, self.filename.is_relative()) { @@ -21,11 +21,11 @@ impl Hdf5Output { _ => self.filename.to_path_buf(), }; - let metric_set_idx = model.get_metric_set_index_by_name(&self.metric_set)?; + let metric_set_idx = network.get_metric_set_index_by_name(&self.metric_set)?; let recorder = HDF5Recorder::new(&self.name, filename, metric_set_idx); - model.add_recorder(Box::new(recorder))?; + network.add_recorder(Box::new(recorder))?; Ok(()) } @@ -46,9 +46,9 @@ mod tests { let data = model_str(); let schema = PywrModel::from_str(data).unwrap(); - assert_eq!(schema.nodes.len(), 3); - assert_eq!(schema.edges.len(), 2); - assert!(schema.outputs.is_some_and(|o| o.len() == 1)); + assert_eq!(schema.network.nodes.len(), 3); + assert_eq!(schema.network.edges.len(), 2); + assert!(schema.network.outputs.is_some_and(|o| o.len() == 1)); } #[test] @@ -58,11 +58,9 @@ mod tests { let temp_dir = TempDir::new().unwrap(); - let (model, timestepper) = schema.build_model(None, Some(temp_dir.path())).unwrap(); + let model = schema.build_model(None, Some(temp_dir.path())).unwrap(); - model - .run::(×tepper, &ClpSolverSettings::default()) - .unwrap(); + model.run::(&ClpSolverSettings::default()).unwrap(); // After model run there should be an output file. let expected_path = temp_dir.path().join("outputs.h5"); diff --git a/pywr-schema/src/outputs/mod.rs b/pywr-schema/src/outputs/mod.rs index 38b623a3..3363cb6e 100644 --- a/pywr-schema/src/outputs/mod.rs +++ b/pywr-schema/src/outputs/mod.rs @@ -16,12 +16,12 @@ pub enum Output { impl Output { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, output_path: Option<&Path>, ) -> Result<(), SchemaError> { match self { - Self::CSV(o) => o.add_to_model(model, output_path), - Self::HDF5(o) => o.add_to_model(model, output_path), + Self::CSV(o) => o.add_to_model(network, output_path), + Self::HDF5(o) => o.add_to_model(network, output_path), } } } diff --git a/pywr-schema/src/parameters/aggregated.rs b/pywr-schema/src/parameters/aggregated.rs index 0be50de7..928cc6f5 100644 --- a/pywr-schema/src/parameters/aggregated.rs +++ b/pywr-schema/src/parameters/aggregated.rs @@ -1,9 +1,11 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::parameters::{ DynamicFloatValue, DynamicFloatValueType, DynamicIndexValue, IntoV2Parameter, ParameterMeta, TryFromV1Parameter, TryIntoV2Parameter, }; +use pywr_core::models::ModelDomain; use pywr_core::parameters::{IndexParameterIndex, ParameterIndex}; use pywr_v1_schema::parameters::{ AggFunc as AggFuncV1, AggregatedIndexParameter as AggregatedIndexParameterV1, @@ -49,7 +51,7 @@ impl From for AggFunc { /// Each time-step the aggregation is updated using the current values of the referenced metrics. /// The available aggregation functions are defined by the [`AggFunc`] enum. /// -/// This parameter definition is applied to a model using [`crate::parameters::AggregatedParameter`]. +/// This parameter definition is applied to a network using [`crate::parameters::AggregatedParameter`]. /// /// See also [`AggregatedIndexParameter`] for aggregation of integer values. /// @@ -91,19 +93,21 @@ impl AggregatedParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { let metrics = self .metrics .iter() - .map(|v| v.load(model, tables, data_path)) + .map(|v| v.load(network, domain, tables, data_path, inter_network_transfers)) .collect::, _>>()?; let p = pywr_core::parameters::AggregatedParameter::new(&self.meta.name, &metrics, self.agg_func.into()); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } @@ -195,19 +199,21 @@ impl AggregatedIndexParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { let parameters = self .parameters .iter() - .map(|v| v.load(model, tables, data_path)) + .map(|v| v.load(network, domain, tables, data_path, inter_network_transfers)) .collect::, _>>()?; let p = pywr_core::parameters::AggregatedIndexParameter::new(&self.meta.name, parameters, self.agg_func.into()); - Ok(model.add_index_parameter(Box::new(p))?) + Ok(network.add_index_parameter(Box::new(p))?) } } diff --git a/pywr-schema/src/parameters/asymmetric_switch.rs b/pywr-schema/src/parameters/asymmetric_switch.rs index e27aad0b..46c5eeff 100644 --- a/pywr-schema/src/parameters/asymmetric_switch.rs +++ b/pywr-schema/src/parameters/asymmetric_switch.rs @@ -1,8 +1,10 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::parameters::{ DynamicFloatValueType, DynamicIndexValue, IntoV2Parameter, ParameterMeta, TryFromV1Parameter, TryIntoV2Parameter, }; +use pywr_core::models::ModelDomain; use pywr_core::parameters::IndexParameterIndex; use pywr_v1_schema::parameters::AsymmetricSwitchIndexParameter as AsymmetricSwitchIndexParameterV1; use std::collections::HashMap; @@ -26,12 +28,18 @@ impl AsymmetricSwitchIndexParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let on_index_parameter = self.on_index_parameter.load(model, tables, data_path)?; - let off_index_parameter = self.off_index_parameter.load(model, tables, data_path)?; + let on_index_parameter = + self.on_index_parameter + .load(network, domain, tables, data_path, inter_network_transfers)?; + let off_index_parameter = + self.off_index_parameter + .load(network, domain, tables, data_path, inter_network_transfers)?; let p = pywr_core::parameters::AsymmetricSwitchIndexParameter::new( &self.meta.name, @@ -39,7 +47,7 @@ impl AsymmetricSwitchIndexParameter { off_index_parameter, ); - Ok(model.add_index_parameter(Box::new(p))?) + Ok(network.add_index_parameter(Box::new(p))?) } } diff --git a/pywr-schema/src/parameters/control_curves.rs b/pywr-schema/src/parameters/control_curves.rs index c5570f3d..c700eb12 100644 --- a/pywr-schema/src/parameters/control_curves.rs +++ b/pywr-schema/src/parameters/control_curves.rs @@ -1,8 +1,10 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::parameters::{ DynamicFloatValue, DynamicFloatValueType, IntoV2Parameter, ParameterMeta, TryFromV1Parameter, TryIntoV2Parameter, }; +use pywr_core::models::ModelDomain; use pywr_core::parameters::{IndexParameterIndex, ParameterIndex}; use pywr_v1_schema::parameters::{ ControlCurveIndexParameter as ControlCurveIndexParameterV1, @@ -38,22 +40,24 @@ impl ControlCurveInterpolatedParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let metric = model.get_storage_node_metric(&self.storage_node, None, true)?; + let metric = network.get_storage_node_metric(&self.storage_node, None, true)?; let control_curves = self .control_curves .iter() - .map(|cc| cc.load(model, tables, data_path)) + .map(|cc| cc.load(network, domain, tables, data_path, inter_network_transfers)) .collect::>()?; let values = self .values .iter() - .map(|val| val.load(model, tables, data_path)) + .map(|val| val.load(network, domain, tables, data_path, inter_network_transfers)) .collect::>()?; let p = pywr_core::parameters::ControlCurveInterpolatedParameter::new( @@ -62,7 +66,7 @@ impl ControlCurveInterpolatedParameter { control_curves, values, ); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } @@ -145,20 +149,22 @@ impl ControlCurveIndexParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let metric = model.get_storage_node_metric(&self.storage_node, None, true)?; + let metric = network.get_storage_node_metric(&self.storage_node, None, true)?; let control_curves = self .control_curves .iter() - .map(|cc| cc.load(model, tables, data_path)) + .map(|cc| cc.load(network, domain, tables, data_path, inter_network_transfers)) .collect::>()?; let p = pywr_core::parameters::ControlCurveIndexParameter::new(&self.meta.name, metric, control_curves); - Ok(model.add_index_parameter(Box::new(p))?) + Ok(network.add_index_parameter(Box::new(p))?) } } @@ -256,26 +262,28 @@ impl ControlCurveParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let metric = model.get_storage_node_metric(&self.storage_node, None, true)?; + let metric = network.get_storage_node_metric(&self.storage_node, None, true)?; let control_curves = self .control_curves .iter() - .map(|cc| cc.load(model, tables, data_path)) + .map(|cc| cc.load(network, domain, tables, data_path, inter_network_transfers)) .collect::>()?; let values = self .values .iter() - .map(|val| val.load(model, tables, data_path)) + .map(|val| val.load(network, domain, tables, data_path, inter_network_transfers)) .collect::>()?; let p = pywr_core::parameters::ControlCurveParameter::new(&self.meta.name, metric, control_curves, values); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } @@ -354,16 +362,18 @@ impl ControlCurvePiecewiseInterpolatedParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let metric = model.get_storage_node_metric(&self.storage_node, None, true)?; + let metric = network.get_storage_node_metric(&self.storage_node, None, true)?; let control_curves = self .control_curves .iter() - .map(|cc| cc.load(model, tables, data_path)) + .map(|cc| cc.load(network, domain, tables, data_path, inter_network_transfers)) .collect::>()?; let values = match &self.values { @@ -379,7 +389,7 @@ impl ControlCurvePiecewiseInterpolatedParameter { self.maximum.unwrap_or(1.0), self.minimum.unwrap_or(0.0), ); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } diff --git a/pywr-schema/src/parameters/core.rs b/pywr-schema/src/parameters/core.rs index 2c4e4288..324a69b6 100644 --- a/pywr-schema/src/parameters/core.rs +++ b/pywr-schema/src/parameters/core.rs @@ -1,9 +1,11 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::parameters::{ ConstantValue, DynamicFloatValue, DynamicFloatValueType, IntoV2Parameter, ParameterMeta, TryFromV1Parameter, TryIntoV2Parameter, }; +use pywr_core::models::ModelDomain; use pywr_core::parameters::ParameterIndex; use pywr_v1_schema::parameters::{ ConstantParameter as ConstantParameterV1, DivisionParameter as DivisionParameterV1, MaxParameter as MaxParameterV1, @@ -15,7 +17,7 @@ use std::path::Path; /// Activation function or transformation to apply to variable value. /// /// These different functions are used to specify how a variable value is transformed -/// before being used in a model. These transformations can be useful for optimisation +/// before being used in a network. These transformations can be useful for optimisation /// algorithms to represent a, for example, binary-like variable in a continuous domain. Each /// activation function requires different data to parameterize the function's behaviour. /// @@ -146,7 +148,7 @@ pub struct ConstantParameter { pub meta: ParameterMeta, /// The value the parameter should return. /// - /// In the simple case this will be the value used by the model. However, if an activation + /// In the simple case this will be the value used by the network. However, if an activation /// function is specified this value will be the `x` value for that activation function. pub value: ConstantValue, /// Definition of optional variable settings. @@ -164,7 +166,7 @@ impl ConstantParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, tables: &LoadedTableCollection, ) -> Result { let variable = match &self.variable { @@ -180,7 +182,7 @@ impl ConstantParameter { }; let p = pywr_core::parameters::ConstantParameter::new(&self.meta.name, self.value.load(tables)?, variable); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } @@ -229,15 +231,19 @@ impl MaxParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let idx = self.parameter.load(model, tables, data_path)?; + let idx = self + .parameter + .load(network, domain, tables, data_path, inter_network_transfers)?; let threshold = self.threshold.unwrap_or(0.0); let p = pywr_core::parameters::MaxParameter::new(&self.meta.name, idx, threshold); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } @@ -299,15 +305,21 @@ impl DivisionParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let n = self.numerator.load(model, tables, data_path)?; - let d = self.denominator.load(model, tables, data_path)?; + let n = self + .numerator + .load(network, domain, tables, data_path, inter_network_transfers)?; + let d = self + .denominator + .load(network, domain, tables, data_path, inter_network_transfers)?; let p = pywr_core::parameters::DivisionParameter::new(&self.meta.name, n, d); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } @@ -367,15 +379,19 @@ impl MinParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let idx = self.parameter.load(model, tables, data_path)?; + let idx = self + .parameter + .load(network, domain, tables, data_path, inter_network_transfers)?; let threshold = self.threshold.unwrap_or(0.0); let p = pywr_core::parameters::MinParameter::new(&self.meta.name, idx, threshold); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } @@ -419,14 +435,18 @@ impl NegativeParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let idx = self.parameter.load(model, tables, data_path)?; + let idx = self + .parameter + .load(network, domain, tables, data_path, inter_network_transfers)?; let p = pywr_core::parameters::NegativeParameter::new(&self.meta.name, idx); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } diff --git a/pywr-schema/src/parameters/data_frame.rs b/pywr-schema/src/parameters/data_frame.rs index 532f3b9a..57be6a7d 100644 --- a/pywr-schema/src/parameters/data_frame.rs +++ b/pywr-schema/src/parameters/data_frame.rs @@ -9,6 +9,7 @@ use pyo3::prelude::PyModule; use pyo3::types::{PyDict, PyTuple}; use pyo3::{IntoPy, PyErr, PyObject, Python, ToPyObject}; use pyo3_polars::PyDataFrame; +use pywr_core::models::ModelDomain; use pywr_core::parameters::{Array1Parameter, Array2Parameter, ParameterIndex}; use pywr_v1_schema::parameters::DataFrameParameter as DataFrameParameterV1; use std::collections::HashMap; @@ -71,7 +72,8 @@ impl DataFrameParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, data_path: Option<&Path>, ) -> Result { // Handle the case of an optional data path with a relative url. @@ -126,10 +128,14 @@ impl DataFrameParameter { // 3. Create an ArrayParameter using the loaded array. match &self.columns { DataFrameColumns::Scenario(scenario) => { - let scenario_group = model.get_scenario_group_index_by_name(scenario)?; + let scenario_group_index = domain + .scenarios() + .group_index(scenario) + .ok_or(SchemaError::ScenarioGroupNotFound(scenario.to_string()))?; + let array: Array2 = df.to_ndarray::(IndexOrder::default()).unwrap(); - let p = Array2Parameter::new(&self.meta.name, array, scenario_group, self.timestep_offset); - Ok(model.add_parameter(Box::new(p))?) + let p = Array2Parameter::new(&self.meta.name, array, scenario_group_index, self.timestep_offset); + Ok(network.add_parameter(Box::new(p))?) } DataFrameColumns::Column(column) => { let series = df.column(column).unwrap(); @@ -143,7 +149,7 @@ impl DataFrameParameter { .to_owned(); let p = Array1Parameter::new(&self.meta.name, array, self.timestep_offset); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } } diff --git a/pywr-schema/src/parameters/delay.rs b/pywr-schema/src/parameters/delay.rs index 706b0a0e..ca656837 100644 --- a/pywr-schema/src/parameters/delay.rs +++ b/pywr-schema/src/parameters/delay.rs @@ -1,11 +1,13 @@ use crate::data_tables::LoadedTableCollection; use crate::error::SchemaError; +use crate::model::PywrMultiNetworkTransfer; use crate::parameters::{DynamicFloatValue, DynamicFloatValueType, ParameterMeta}; +use pywr_core::models::ModelDomain; use pywr_core::parameters::ParameterIndex; use std::collections::HashMap; use std::path::Path; -/// A parameter that delays a value from the model by a number of time-steps. +/// A parameter that delays a value from the network by a number of time-steps. #[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] pub struct DelayParameter { #[serde(flatten)] @@ -31,12 +33,16 @@ impl DelayParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let metric = self.metric.load(model, tables, data_path)?; + let metric = self + .metric + .load(network, domain, tables, data_path, inter_network_transfers)?; let p = pywr_core::parameters::DelayParameter::new(&self.meta.name, metric, self.delay, self.initial_value); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } diff --git a/pywr-schema/src/parameters/discount_factor.rs b/pywr-schema/src/parameters/discount_factor.rs index 6b7b1ed1..5124a561 100644 --- a/pywr-schema/src/parameters/discount_factor.rs +++ b/pywr-schema/src/parameters/discount_factor.rs @@ -1,7 +1,9 @@ use crate::data_tables::LoadedTableCollection; use crate::error::SchemaError; +use crate::model::PywrMultiNetworkTransfer; use crate::parameters::{DynamicFloatValue, DynamicFloatValueType, IntoV2Parameter, ParameterMeta, TryFromV1Parameter}; use crate::ConversionError; +use pywr_core::models::ModelDomain; use pywr_core::parameters::ParameterIndex; use pywr_v1_schema::parameters::DiscountFactorParameter as DiscountFactorParameterV1; use std::collections::HashMap; @@ -32,13 +34,17 @@ impl DiscountFactorParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let discount_rate = self.discount_rate.load(model, tables, data_path)?; + let discount_rate = self + .discount_rate + .load(network, domain, tables, data_path, inter_network_transfers)?; let p = pywr_core::parameters::DiscountFactorParameter::new(&self.meta.name, discount_rate, self.base_year); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } diff --git a/pywr-schema/src/parameters/indexed_array.rs b/pywr-schema/src/parameters/indexed_array.rs index ce37a416..cadf1aeb 100644 --- a/pywr-schema/src/parameters/indexed_array.rs +++ b/pywr-schema/src/parameters/indexed_array.rs @@ -1,9 +1,11 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::parameters::{ DynamicFloatValue, DynamicFloatValueType, DynamicIndexValue, IntoV2Parameter, ParameterMeta, TryFromV1Parameter, TryIntoV2Parameter, }; +use pywr_core::models::ModelDomain; use pywr_core::parameters::ParameterIndex; use pywr_v1_schema::parameters::IndexedArrayParameter as IndexedArrayParameterV1; use std::collections::HashMap; @@ -34,21 +36,25 @@ impl IndexedArrayParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let index_parameter = self.index_parameter.load(model, tables, data_path)?; + let index_parameter = self + .index_parameter + .load(network, domain, tables, data_path, inter_network_transfers)?; let metrics = self .metrics .iter() - .map(|v| v.load(model, tables, data_path)) + .map(|v| v.load(network, domain, tables, data_path, inter_network_transfers)) .collect::, _>>()?; let p = pywr_core::parameters::IndexedArrayParameter::new(&self.meta.name, index_parameter, &metrics); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } diff --git a/pywr-schema/src/parameters/interpolated.rs b/pywr-schema/src/parameters/interpolated.rs index 9b80de49..3c97890f 100644 --- a/pywr-schema/src/parameters/interpolated.rs +++ b/pywr-schema/src/parameters/interpolated.rs @@ -1,10 +1,12 @@ use crate::data_tables::LoadedTableCollection; use crate::error::SchemaError; +use crate::model::PywrMultiNetworkTransfer; use crate::parameters::{ - DynamicFloatValue, DynamicFloatValueType, IntoV2Parameter, MetricFloatValue, NodeReference, ParameterMeta, - TryFromV1Parameter, TryIntoV2Parameter, + DynamicFloatValue, DynamicFloatValueType, IntoV2Parameter, MetricFloatReference, MetricFloatValue, NodeReference, + ParameterMeta, TryFromV1Parameter, TryIntoV2Parameter, }; use crate::ConversionError; +use pywr_core::models::ModelDomain; use pywr_core::parameters::ParameterIndex; use pywr_v1_schema::parameters::{ InterpolatedFlowParameter as InterpolatedFlowParameterV1, @@ -50,11 +52,15 @@ impl InterpolatedParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let x = self.x.load(model, tables, data_path)?; + let x = self + .x + .load(network, domain, tables, data_path, inter_network_transfers)?; // Sense check the points if self.xp.len() != self.fp.len() { @@ -67,12 +73,12 @@ impl InterpolatedParameter { let xp = self .xp .iter() - .map(|p| p.load(model, tables, data_path)) + .map(|p| p.load(network, domain, tables, data_path, inter_network_transfers)) .collect::, _>>()?; let fp = self .fp .iter() - .map(|p| p.load(model, tables, data_path)) + .map(|p| p.load(network, domain, tables, data_path, inter_network_transfers)) .collect::, _>>()?; let points = xp @@ -87,7 +93,7 @@ impl InterpolatedParameter { points, self.error_on_bounds.unwrap_or(true), ); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } @@ -107,7 +113,7 @@ impl TryFromV1Parameter for InterpolatedParameter { sub_name: None, }; // This defaults to the node's inflow; not sure if we can do better than that. - let x = DynamicFloatValue::Dynamic(MetricFloatValue::NodeInFlow(node_ref)); + let x = DynamicFloatValue::Dynamic(MetricFloatValue::Reference(MetricFloatReference::NodeInFlow(node_ref))); let xp = v1 .flows @@ -170,7 +176,7 @@ impl TryFromV1Parameter for InterpolatedParameter sub_name: None, }; // This defaults to the node's inflow; not sure if we can do better than that. - let x = DynamicFloatValue::Dynamic(MetricFloatValue::NodeVolume(node_ref)); + let x = DynamicFloatValue::Dynamic(MetricFloatValue::Reference(MetricFloatReference::NodeInFlow(node_ref))); let xp = v1 .volumes diff --git a/pywr-schema/src/parameters/mod.rs b/pywr-schema/src/parameters/mod.rs index 0795f8a8..4843d445 100644 --- a/pywr-schema/src/parameters/mod.rs +++ b/pywr-schema/src/parameters/mod.rs @@ -1,9 +1,9 @@ //! Parameter schema definitions. //! //! The enum [`Parameter`] contains all of the valid Pywr parameter schemas. The parameter -//! variants define separate schemas for different parameter types. When a model is generated -//! from a schema the parameter schemas are added to the model using [`Parameter::add_to_model`]. -//! This typically adds a struct from [`crate::parameters`] to the model using the data +//! variants define separate schemas for different parameter types. When a network is generated +//! from a schema the parameter schemas are added to the network using [`Parameter::add_to_model`]. +//! This typically adds a struct from [`crate::parameters`] to the network using the data //! defined in the schema. //! //! Serializing and deserializing is accomplished using [`serde`]. @@ -45,12 +45,14 @@ pub use super::parameters::python::PythonParameter; pub use super::parameters::tables::TablesArrayParameter; pub use super::parameters::thresholds::ParameterThresholdParameter; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::parameters::core::DivisionParameter; pub use crate::parameters::data_frame::DataFrameParameter; use crate::parameters::interpolated::InterpolatedParameter; pub use offset::OffsetParameter; use pywr_core::derived_metric::DerivedMetric; use pywr_core::metric::Metric; +use pywr_core::models::{ModelDomain, MultiNetworkTransferIndex}; use pywr_core::node::NodeIndex; use pywr_core::parameters::{IndexParameterIndex, IndexValue, ParameterType}; use pywr_v1_schema::parameters::{ @@ -287,39 +289,73 @@ impl Parameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { let ty = match self { - Self::Constant(p) => ParameterType::Parameter(p.add_to_model(model, tables)?), - Self::ControlCurveInterpolated(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::Aggregated(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::AggregatedIndex(p) => ParameterType::Index(p.add_to_model(model, tables, data_path)?), - Self::AsymmetricSwitchIndex(p) => ParameterType::Index(p.add_to_model(model, tables, data_path)?), + Self::Constant(p) => ParameterType::Parameter(p.add_to_model(network, tables)?), + Self::ControlCurveInterpolated(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::Aggregated(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::AggregatedIndex(p) => { + ParameterType::Index(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::AsymmetricSwitchIndex(p) => { + ParameterType::Index(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } Self::ControlCurvePiecewiseInterpolated(p) => { - ParameterType::Parameter(p.add_to_model(model, tables, data_path)?) + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::ControlCurveIndex(p) => { + ParameterType::Index(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::ControlCurve(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::DailyProfile(p) => ParameterType::Parameter(p.add_to_model(network, tables)?), + Self::IndexedArray(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::MonthlyProfile(p) => ParameterType::Parameter(p.add_to_model(network, tables)?), + Self::UniformDrawdownProfile(p) => ParameterType::Parameter(p.add_to_model(network, tables)?), + Self::Max(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::Min(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::Negative(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::Polynomial1D(p) => ParameterType::Parameter(p.add_to_model(network)?), + Self::ParameterThreshold(p) => { + ParameterType::Index(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) } - Self::ControlCurveIndex(p) => ParameterType::Index(p.add_to_model(model, tables, data_path)?), - Self::ControlCurve(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::DailyProfile(p) => ParameterType::Parameter(p.add_to_model(model, tables)?), - Self::IndexedArray(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::MonthlyProfile(p) => ParameterType::Parameter(p.add_to_model(model, tables)?), - Self::UniformDrawdownProfile(p) => ParameterType::Parameter(p.add_to_model(model, tables)?), - Self::Max(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::Min(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::Negative(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::Polynomial1D(p) => ParameterType::Parameter(p.add_to_model(model)?), - Self::ParameterThreshold(p) => ParameterType::Index(p.add_to_model(model, tables, data_path)?), - Self::TablesArray(p) => ParameterType::Parameter(p.add_to_model(model, data_path)?), - Self::Python(p) => p.add_to_model(model, tables, data_path)?, - Self::DataFrame(p) => ParameterType::Parameter(p.add_to_model(model, data_path)?), - Self::Delay(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::Division(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::Offset(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::DiscountFactor(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::Interpolated(p) => ParameterType::Parameter(p.add_to_model(model, tables, data_path)?), - Self::RbfProfile(p) => ParameterType::Parameter(p.add_to_model(model)?), + Self::TablesArray(p) => ParameterType::Parameter(p.add_to_model(network, domain, data_path)?), + Self::Python(p) => p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?, + Self::DataFrame(p) => ParameterType::Parameter(p.add_to_model(network, domain, data_path)?), + Self::Delay(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::Division(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::Offset(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::DiscountFactor(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::Interpolated(p) => { + ParameterType::Parameter(p.add_to_model(network, domain, tables, data_path, inter_network_transfers)?) + } + Self::RbfProfile(p) => ParameterType::Parameter(p.add_to_model(network)?), }; Ok(ty) @@ -508,54 +544,84 @@ pub struct NodeReference { } impl NodeReference { - fn get_node_index(&self, model: &pywr_core::model::Model) -> Result { - Ok(model.get_node_index_by_name(&self.name, self.sub_name.as_deref())?) + fn get_node_index(&self, network: &pywr_core::network::Network) -> Result { + Ok(network.get_node_index_by_name(&self.name, self.sub_name.as_deref())?) } } -/// A floating-point(f64) value from a metric in the model. +/// A floating-point(f64) value from a metric in the network. #[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] #[serde(tag = "type")] -pub enum MetricFloatValue { +pub enum MetricFloatReference { NodeInFlow(NodeReference), NodeOutFlow(NodeReference), NodeVolume(NodeReference), NodeProportionalVolume(NodeReference), Parameter { name: String, key: Option }, - InlineParameter { definition: Box }, + InterNetworkTransfer { name: String }, } -impl MetricFloatValue { +impl MetricFloatReference { /// Load the metric definition into a `Metric` containing the appropriate internal references. pub fn load( &self, - model: &mut pywr_core::model::Model, - tables: &LoadedTableCollection, - data_path: Option<&Path>, + network: &mut pywr_core::network::Network, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { match self { - Self::NodeInFlow(node_ref) => Ok(Metric::NodeInFlow(node_ref.get_node_index(model)?)), - Self::NodeOutFlow(node_ref) => Ok(Metric::NodeOutFlow(node_ref.get_node_index(model)?)), - Self::NodeVolume(node_ref) => Ok(Metric::NodeVolume(node_ref.get_node_index(model)?)), + Self::NodeInFlow(node_ref) => Ok(Metric::NodeInFlow(node_ref.get_node_index(network)?)), + Self::NodeOutFlow(node_ref) => Ok(Metric::NodeOutFlow(node_ref.get_node_index(network)?)), + Self::NodeVolume(node_ref) => Ok(Metric::NodeVolume(node_ref.get_node_index(network)?)), Self::NodeProportionalVolume(node_ref) => { - let dm = DerivedMetric::NodeProportionalVolume(node_ref.get_node_index(model)?); - Ok(Metric::DerivedMetric(model.add_derived_metric(dm))) + let dm = DerivedMetric::NodeProportionalVolume(node_ref.get_node_index(network)?); + Ok(Metric::DerivedMetric(network.add_derived_metric(dm))) } Self::Parameter { name, key } => { match key { Some(key) => { // Key given; this should be a multi-valued parameter Ok(Metric::MultiParameterValue(( - model.get_multi_valued_parameter_index_by_name(name)?, + network.get_multi_valued_parameter_index_by_name(name)?, key.clone(), ))) } None => { // This should be an existing parameter - Ok(Metric::ParameterValue(model.get_parameter_index_by_name(name)?)) + Ok(Metric::ParameterValue(network.get_parameter_index_by_name(name)?)) } } } + Self::InterNetworkTransfer { name } => { + // Find the matching inter model transfer + match inter_network_transfers.iter().position(|t| &t.name == name) { + Some(idx) => Ok(Metric::InterNetworkTransfer(MultiNetworkTransferIndex(idx))), + None => Err(SchemaError::InterNetworkTransferNotFound(name.to_string())), + } + } + } + } +} + +/// A floating-point(f64) value from a metric in the network. +#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] +#[serde(untagged)] +pub enum MetricFloatValue { + Reference(MetricFloatReference), + InlineParameter { definition: Box }, +} + +impl MetricFloatValue { + /// Load the metric definition into a `Metric` containing the appropriate internal references. + pub fn load( + &self, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, + tables: &LoadedTableCollection, + data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], + ) -> Result { + match self { + Self::Reference(reference) => Ok(reference.load(network, inter_network_transfers)?), Self::InlineParameter { definition } => { // This inline parameter could already have been loaded on a previous attempt // Let's see if exists first. @@ -564,14 +630,14 @@ impl MetricFloatValue { // assume it is the correct one for future references to that name. This could be // improved by checking the parameter returned by name matches the definition here. - match model.get_parameter_index_by_name(definition.name()) { + match network.get_parameter_index_by_name(definition.name()) { Ok(p) => { // Found a parameter with the name; assume it is the right one! Ok(Metric::ParameterValue(p)) } Err(_) => { // An error retrieving a parameter with this name; assume it needs creating. - match definition.add_to_model(model, tables, data_path)? { + match definition.add_to_model(network, &domain, tables, data_path, inter_network_transfers)? { ParameterType::Parameter(idx) => Ok(Metric::ParameterValue(idx)), ParameterType::Index(_) => Err(SchemaError::UnexpectedParameterType(format!( "Found index parameter of type '{}' with name '{}' where an float parameter was expected.", @@ -602,18 +668,20 @@ pub enum ParameterIndexValue { impl ParameterIndexValue { pub fn load( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { match self { Self::Reference(name) => { // This should be an existing parameter - Ok(model.get_index_parameter_index_by_name(name)?) + Ok(network.get_index_parameter_index_by_name(name)?) } Self::Inline(parameter) => { // Inline parameter needs to be added - match parameter.add_to_model(model, tables, data_path)? { + match parameter.add_to_model(network, domain, tables, data_path, inter_network_transfers)? { ParameterType::Index(idx) => Ok(idx), ParameterType::Parameter(_) => Err(SchemaError::UnexpectedParameterType(format!( "Found float parameter of type '{}' with name '{}' where an index parameter was expected.", @@ -655,13 +723,15 @@ impl DynamicFloatValue { pub fn load( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { let parameter_ref = match self { DynamicFloatValue::Constant(v) => Metric::Constant(v.load(tables)?), - DynamicFloatValue::Dynamic(v) => v.load(model, tables, data_path)?, + DynamicFloatValue::Dynamic(v) => v.load(network, domain, tables, data_path, inter_network_transfers)?, }; Ok(parameter_ref) } @@ -677,10 +747,12 @@ impl TryFromV1Parameter for DynamicFloatValue { ) -> Result { let p = match v1 { ParameterValueV1::Constant(v) => Self::Constant(ConstantValue::Literal(v)), - ParameterValueV1::Reference(p_name) => Self::Dynamic(MetricFloatValue::Parameter { - name: p_name, - key: None, - }), + ParameterValueV1::Reference(p_name) => { + Self::Dynamic(MetricFloatValue::Reference(MetricFloatReference::Parameter { + name: p_name, + key: None, + })) + } ParameterValueV1::Table(tbl) => Self::Constant(ConstantValue::Table(tbl.try_into()?)), ParameterValueV1::Inline(param) => Self::Dynamic(MetricFloatValue::InlineParameter { definition: Box::new((*param).try_into_v2_parameter(parent_node, unnamed_count)?), @@ -709,13 +781,17 @@ impl DynamicIndexValue { /// pub fn load( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { let parameter_ref = match self { DynamicIndexValue::Constant(v) => IndexValue::Constant(v.load(tables)?), - DynamicIndexValue::Dynamic(v) => IndexValue::Dynamic(v.load(model, tables, data_path)?), + DynamicIndexValue::Dynamic(v) => { + IndexValue::Dynamic(v.load(network, domain, tables, data_path, inter_network_transfers)?) + } }; Ok(parameter_ref) } diff --git a/pywr-schema/src/parameters/offset.rs b/pywr-schema/src/parameters/offset.rs index 97951408..0d808929 100644 --- a/pywr-schema/src/parameters/offset.rs +++ b/pywr-schema/src/parameters/offset.rs @@ -3,6 +3,8 @@ use crate::parameters::{ConstantValue, DynamicFloatValue, DynamicFloatValueType, use pywr_core::parameters::ParameterIndex; use crate::error::SchemaError; +use crate::model::PywrMultiNetworkTransfer; +use pywr_core::models::ModelDomain; use std::collections::HashMap; use std::path::Path; @@ -29,7 +31,7 @@ pub struct OffsetParameter { pub meta: ParameterMeta, /// The offset value applied to the metric. /// - /// In the simple case this will be the value used by the model. However, if an activation + /// In the simple case this will be the value used by the network. However, if an activation /// function is specified this value will be the `x` value for that activation function. pub offset: ConstantValue, /// The metric from which to apply the offset. @@ -49,9 +51,11 @@ impl OffsetParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { let variable = match &self.variable { None => None, @@ -65,9 +69,11 @@ impl OffsetParameter { } }; - let idx = self.metric.load(model, tables, data_path)?; + let idx = self + .metric + .load(network, domain, tables, data_path, inter_network_transfers)?; let p = pywr_core::parameters::OffsetParameter::new(&self.meta.name, idx, self.offset.load(tables)?, variable); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } diff --git a/pywr-schema/src/parameters/polynomial.rs b/pywr-schema/src/parameters/polynomial.rs index 05a71839..377058b3 100644 --- a/pywr-schema/src/parameters/polynomial.rs +++ b/pywr-schema/src/parameters/polynomial.rs @@ -23,9 +23,9 @@ impl Polynomial1DParameter { HashMap::new() } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result { + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result { let metric = - model.get_storage_node_metric(&self.storage_node, None, self.use_proportional_volume.unwrap_or(true))?; + network.get_storage_node_metric(&self.storage_node, None, self.use_proportional_volume.unwrap_or(true))?; let p = pywr_core::parameters::Polynomial1DParameter::new( &self.meta.name, @@ -34,7 +34,7 @@ impl Polynomial1DParameter { self.scale.unwrap_or(1.0), self.offset.unwrap_or(0.0), ); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } diff --git a/pywr-schema/src/parameters/profiles.rs b/pywr-schema/src/parameters/profiles.rs index 6ece1d48..8bb3dc7d 100644 --- a/pywr-schema/src/parameters/profiles.rs +++ b/pywr-schema/src/parameters/profiles.rs @@ -28,12 +28,12 @@ impl DailyProfileParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, tables: &LoadedTableCollection, ) -> Result { let values = &self.values.load(tables)?[..366]; let p = pywr_core::parameters::DailyProfileParameter::new(&self.meta.name, values.try_into().expect("")); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } @@ -98,7 +98,7 @@ impl MonthlyProfileParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, tables: &LoadedTableCollection, ) -> Result { let values = &self.values.load(tables)?[..12]; @@ -107,7 +107,7 @@ impl MonthlyProfileParameter { values.try_into().expect(""), self.interp_day.map(|id| id.into()), ); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } @@ -172,7 +172,7 @@ impl UniformDrawdownProfileParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, tables: &LoadedTableCollection, ) -> Result { let reset_day = match &self.reset_day { @@ -194,7 +194,7 @@ impl UniformDrawdownProfileParameter { reset_month, residual_days, ); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } @@ -364,7 +364,7 @@ impl RbfProfileParameter { HashMap::new() } - pub fn add_to_model(&self, model: &mut pywr_core::model::Model) -> Result { + pub fn add_to_model(&self, network: &mut pywr_core::network::Network) -> Result { let variable = match self.variable { None => None, Some(v) => { @@ -381,7 +381,7 @@ impl RbfProfileParameter { let p = pywr_core::parameters::RbfProfileParameter::new(&self.meta.name, self.points.clone(), function, variable); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } diff --git a/pywr-schema/src/parameters/python.rs b/pywr-schema/src/parameters/python.rs index 426153c6..91087369 100644 --- a/pywr-schema/src/parameters/python.rs +++ b/pywr-schema/src/parameters/python.rs @@ -1,9 +1,11 @@ use crate::data_tables::{make_path, LoadedTableCollection}; use crate::error::SchemaError; +use crate::model::PywrMultiNetworkTransfer; use crate::parameters::{DynamicFloatValue, DynamicFloatValueType, DynamicIndexValue, ParameterMeta}; use pyo3::prelude::PyModule; use pyo3::types::{PyDict, PyTuple}; use pyo3::{IntoPy, PyErr, PyObject, Python, ToPyObject}; +use pywr_core::models::ModelDomain; use pywr_core::parameters::{ParameterType, PyParameter}; use serde_json::Value; use std::collections::HashMap; @@ -25,9 +27,9 @@ pub enum PythonModule { /// is initialised with user provided positional and/or keyword arguments that can be provided /// here. /// -/// In additions `metrics` and `indices` can be specified. These dependent values from the model +/// In additions `metrics` and `indices` can be specified. These dependent values from the network /// are provided to the calculation method of the Python object. This allows a custom Python -/// parameter to use information from the current model simulation (e.g. current storage volume, +/// parameter to use information from the current network simulation (e.g. current storage volume, /// other parameter value or index). /// /// ``` @@ -122,9 +124,11 @@ impl PythonParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { pyo3::prepare_freethreaded_python(); @@ -163,7 +167,12 @@ impl PythonParameter { let metrics = match &self.metrics { Some(metrics) => metrics .iter() - .map(|(k, v)| Ok((k.to_string(), v.load(model, tables, data_path)?))) + .map(|(k, v)| { + Ok(( + k.to_string(), + v.load(network, domain, tables, data_path, inter_network_transfers)?, + )) + }) .collect::, SchemaError>>()?, None => HashMap::new(), }; @@ -171,16 +180,21 @@ impl PythonParameter { let indices = match &self.indices { Some(indices) => indices .iter() - .map(|(k, v)| Ok((k.to_string(), v.load(model, tables, data_path)?))) + .map(|(k, v)| { + Ok(( + k.to_string(), + v.load(network, domain, tables, data_path, inter_network_transfers)?, + )) + }) .collect::, SchemaError>>()?, None => HashMap::new(), }; let p = PyParameter::new(&self.meta.name, object, args, kwargs, &metrics, &indices); let pt = if self.multi { - ParameterType::Multi(model.add_multi_value_parameter(Box::new(p))?) + ParameterType::Multi(network.add_multi_value_parameter(Box::new(p))?) } else { - ParameterType::Parameter(model.add_parameter(Box::new(p))?) + ParameterType::Parameter(network.add_parameter(Box::new(p))?) }; Ok(pt) @@ -191,7 +205,9 @@ impl PythonParameter { mod tests { use crate::data_tables::LoadedTableCollection; use crate::parameters::python::PythonParameter; - use pywr_core::model::Model; + use pywr_core::models::ModelDomain; + use pywr_core::network::Network; + use pywr_core::test_utils::default_time_domain; use serde_json::json; use std::fs::File; use std::io::Write; @@ -234,10 +250,11 @@ class MyParameter: pyo3::prepare_freethreaded_python(); // Load the schema ... let param: PythonParameter = serde_json::from_str(data.as_str()).unwrap(); - // ... add it to an empty model + // ... add it to an empty network // this should trigger loading the module and extracting the class - let mut model = Model::default(); + let domain: ModelDomain = default_time_domain().into(); + let mut network = Network::default(); let tables = LoadedTableCollection::from_schema(None, None).unwrap(); - param.add_to_model(&mut model, &tables, None).unwrap(); + param.add_to_model(&mut network, &domain, &tables, None, &[]).unwrap(); } } diff --git a/pywr-schema/src/parameters/tables.rs b/pywr-schema/src/parameters/tables.rs index f9dcca19..6fe16f8b 100644 --- a/pywr-schema/src/parameters/tables.rs +++ b/pywr-schema/src/parameters/tables.rs @@ -1,6 +1,7 @@ use crate::error::{ConversionError, SchemaError}; use crate::parameters::{DynamicFloatValueType, IntoV2Parameter, ParameterMeta, TryFromV1Parameter}; use ndarray::s; +use pywr_core::models::ModelDomain; use pywr_core::parameters::ParameterIndex; use pywr_v1_schema::parameters::TablesArrayParameter as TablesArrayParameterV1; use std::collections::HashMap; @@ -29,7 +30,8 @@ impl TablesArrayParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, data_path: Option<&Path>, ) -> Result { // 1. Load the file from the HDF5 file (NB this is not Pandas format). @@ -60,18 +62,22 @@ impl TablesArrayParameter { // 3. Create an ArrayParameter using the loaded array. if let Some(scenario) = &self.scenario { - let scenario_group = model.get_scenario_group_index_by_name(scenario)?; + let scenario_group_index = domain + .scenarios() + .group_index(scenario) + .ok_or(SchemaError::ScenarioGroupNotFound(scenario.to_string()))?; + let p = pywr_core::parameters::Array2Parameter::new( &self.meta.name, array, - scenario_group, + scenario_group_index, self.timestep_offset, ); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } else { let array = array.slice_move(s![.., 0]); let p = pywr_core::parameters::Array1Parameter::new(&self.meta.name, array, self.timestep_offset); - Ok(model.add_parameter(Box::new(p))?) + Ok(network.add_parameter(Box::new(p))?) } } } diff --git a/pywr-schema/src/parameters/thresholds.rs b/pywr-schema/src/parameters/thresholds.rs index f1310ab4..d610f832 100644 --- a/pywr-schema/src/parameters/thresholds.rs +++ b/pywr-schema/src/parameters/thresholds.rs @@ -1,8 +1,10 @@ use crate::data_tables::LoadedTableCollection; use crate::error::{ConversionError, SchemaError}; +use crate::model::PywrMultiNetworkTransfer; use crate::parameters::{ DynamicFloatValue, DynamicFloatValueType, IntoV2Parameter, ParameterMeta, TryFromV1Parameter, TryIntoV2Parameter, }; +use pywr_core::models::ModelDomain; use pywr_core::parameters::IndexParameterIndex; use pywr_v1_schema::parameters::{ ParameterThresholdParameter as ParameterThresholdParameterV1, Predicate as PredicateV1, @@ -69,12 +71,18 @@ impl ParameterThresholdParameter { pub fn add_to_model( &self, - model: &mut pywr_core::model::Model, + network: &mut pywr_core::network::Network, + domain: &ModelDomain, tables: &LoadedTableCollection, data_path: Option<&Path>, + inter_network_transfers: &[PywrMultiNetworkTransfer], ) -> Result { - let metric = self.parameter.load(model, tables, data_path)?; - let threshold = self.threshold.load(model, tables, data_path)?; + let metric = self + .parameter + .load(network, domain, tables, data_path, inter_network_transfers)?; + let threshold = self + .threshold + .load(network, domain, tables, data_path, inter_network_transfers)?; let p = pywr_core::parameters::ThresholdParameter::new( &self.meta.name, @@ -83,7 +91,7 @@ impl ParameterThresholdParameter { self.predicate.into(), self.ratchet, ); - Ok(model.add_index_parameter(Box::new(p))?) + Ok(network.add_index_parameter(Box::new(p))?) } } diff --git a/pywr-schema/src/test_models/csv1.json b/pywr-schema/src/test_models/csv1.json index a98017b5..d971866a 100644 --- a/pywr-schema/src/test_models/csv1.json +++ b/pywr-schema/src/test_models/csv1.json @@ -9,51 +9,59 @@ "end": "2015-12-31", "timestep": 1 }, - "nodes": [ - { - "name": "supply1", - "type": "Input", - "max_flow": 15 - }, - { - "name": "link1", - "type": "Link" - }, - { - "name": "demand1", - "type": "Output", - "max_flow": { - "type": "Parameter", - "name": "demand" + "network": { + "nodes": [ + { + "name": "supply1", + "type": "Input", + "max_flow": 15 }, - "cost": -10 - } - ], - "edges": [ - { - "from_node": "supply1", - "to_node": "link1" - }, - { - "from_node": "link1", - "to_node": "demand1" - } - ], - "parameters": [{"name": "demand", "type": "Constant", "value": 10.0}], - "metric_sets": [ - { - "name": "nodes", - "metrics": [ - "demand1" - ] - } - ], - "outputs": [ - { - "name": "my-outputs", - "type": "CSV", - "filename": "outputs.csv", - "metric_set": "nodes" - } - ] + { + "name": "link1", + "type": "Link" + }, + { + "name": "demand1", + "type": "Output", + "max_flow": { + "type": "Parameter", + "name": "demand" + }, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "supply1", + "to_node": "link1" + }, + { + "from_node": "link1", + "to_node": "demand1" + } + ], + "parameters": [ + { + "name": "demand", + "type": "Constant", + "value": 10.0 + } + ], + "metric_sets": [ + { + "name": "nodes", + "metrics": [ + "demand1" + ] + } + ], + "outputs": [ + { + "name": "my-outputs", + "type": "CSV", + "filename": "outputs.csv", + "metric_set": "nodes" + } + ] + } } diff --git a/pywr-schema/src/test_models/delay1.json b/pywr-schema/src/test_models/delay1.json index 5cda1499..fa445c0a 100644 --- a/pywr-schema/src/test_models/delay1.json +++ b/pywr-schema/src/test_models/delay1.json @@ -9,33 +9,35 @@ "end": "2015-12-31", "timestep": 1 }, - "nodes": [ - { - "name": "input1", - "type": "Catchment", - "flow": 15 - }, - { - "name": "link1", - "type": "Delay", - "delay": 3, - "initial_value": 0.0 - }, - { - "name": "demand1", - "type": "Output", - "max_flow": 20.0, - "cost": 1.0 - } - ], - "edges": [ - { - "from_node": "input1", - "to_node": "link1" - }, - { - "from_node": "link1", - "to_node": "demand1" - } - ] + "network": { + "nodes": [ + { + "name": "input1", + "type": "Catchment", + "flow": 15 + }, + { + "name": "link1", + "type": "Delay", + "delay": 3, + "initial_value": 0.0 + }, + { + "name": "demand1", + "type": "Output", + "max_flow": 20.0, + "cost": 1.0 + } + ], + "edges": [ + { + "from_node": "input1", + "to_node": "link1" + }, + { + "from_node": "link1", + "to_node": "demand1" + } + ] + } } diff --git a/pywr-schema/src/test_models/hdf1.json b/pywr-schema/src/test_models/hdf1.json index 9b97fd91..95a53771 100644 --- a/pywr-schema/src/test_models/hdf1.json +++ b/pywr-schema/src/test_models/hdf1.json @@ -9,51 +9,59 @@ "end": "2015-12-31", "timestep": 1 }, - "nodes": [ - { - "name": "supply1", - "type": "Input", - "max_flow": 15 - }, - { - "name": "link1", - "type": "Link" - }, - { - "name": "demand1", - "type": "Output", - "max_flow": { - "type": "Parameter", - "name": "demand" + "network": { + "nodes": [ + { + "name": "supply1", + "type": "Input", + "max_flow": 15 }, - "cost": -10 - } - ], - "edges": [ - { - "from_node": "supply1", - "to_node": "link1" - }, - { - "from_node": "link1", - "to_node": "demand1" - } - ], - "parameters": [{"name": "demand", "type": "Constant", "value": 10.0}], - "metric_sets": [ - { - "name": "nodes", - "metrics": [ - "demand1" - ] - } - ], - "outputs": [ - { - "name": "my-outputs", - "type": "HDF5", - "filename": "outputs.h5", - "metric_set": "nodes" - } - ] + { + "name": "link1", + "type": "Link" + }, + { + "name": "demand1", + "type": "Output", + "max_flow": { + "type": "Parameter", + "name": "demand" + }, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "supply1", + "to_node": "link1" + }, + { + "from_node": "link1", + "to_node": "demand1" + } + ], + "parameters": [ + { + "name": "demand", + "type": "Constant", + "value": 10.0 + } + ], + "metric_sets": [ + { + "name": "nodes", + "metrics": [ + "demand1" + ] + } + ], + "outputs": [ + { + "name": "my-outputs", + "type": "HDF5", + "filename": "outputs.h5", + "metric_set": "nodes" + } + ] + } } diff --git a/pywr-schema/src/test_models/multi1/model.json b/pywr-schema/src/test_models/multi1/model.json new file mode 100644 index 00000000..77a24a96 --- /dev/null +++ b/pywr-schema/src/test_models/multi1/model.json @@ -0,0 +1,34 @@ +{ + "metadata": { + "title": "Multi-model 1", + "description": "A simple multi-model that passes data from sub-model1 to sub-model2.", + "minimum_version": "0.1" + }, + "timestepper": { + "start": "2015-01-01", + "end": "2015-12-31", + "timestep": 1 + }, + "networks": [ + { + "name": "network1", + "network": "network1.json", + "transfers": [] + }, + { + "name": "network2", + "network": "network2.json", + "transfers": [ + { + "from_network": "network1", + "metric": { + "type": "NodeInFlow", + "name": "demand1" + }, + "name": "inflow" + } + ] + } + ] + +} diff --git a/pywr-schema/src/test_models/multi1/network1.json b/pywr-schema/src/test_models/multi1/network1.json new file mode 100644 index 00000000..b08b2905 --- /dev/null +++ b/pywr-schema/src/test_models/multi1/network1.json @@ -0,0 +1,39 @@ +{ + "nodes": [ + { + "name": "supply1", + "type": "Input", + "max_flow": 15 + }, + { + "name": "link1", + "type": "Link" + }, + { + "name": "demand1", + "type": "Output", + "max_flow": { + "type": "Parameter", + "name": "demand" + }, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "supply1", + "to_node": "link1" + }, + { + "from_node": "link1", + "to_node": "demand1" + } + ], + "parameters": [ + { + "name": "demand", + "type": "Constant", + "value": 10.0 + } + ] +} diff --git a/pywr-schema/src/test_models/multi1/network2.json b/pywr-schema/src/test_models/multi1/network2.json new file mode 100644 index 00000000..ef47e9b1 --- /dev/null +++ b/pywr-schema/src/test_models/multi1/network2.json @@ -0,0 +1,42 @@ +{ + "nodes": [ + { + "name": "supply2", + "type": "Input", + "max_flow": { + "type": "InterNetworkTransfer", + "name": "inflow" + } + }, + { + "name": "link2", + "type": "Link" + }, + { + "name": "demand2", + "type": "Output", + "max_flow": { + "type": "Parameter", + "name": "demand" + }, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "supply2", + "to_node": "link2" + }, + { + "from_node": "link2", + "to_node": "demand2" + } + ], + "parameters": [ + { + "name": "demand", + "type": "Constant", + "value": 20.0 + } + ] +} diff --git a/pywr-schema/src/test_models/multi2/model.json b/pywr-schema/src/test_models/multi2/model.json new file mode 100644 index 00000000..bd246a95 --- /dev/null +++ b/pywr-schema/src/test_models/multi2/model.json @@ -0,0 +1,44 @@ +{ + "metadata": { + "title": "Multi-model 1", + "description": "A simple multi-model that passes data from sub-model1 to sub-model2, and back again", + "minimum_version": "0.1" + }, + "timestepper": { + "start": "2015-01-01", + "end": "2015-12-31", + "timestep": 1 + }, + "networks": [ + { + "name": "network1", + "network": "network1.json", + "transfers": [ + { + "from_network": "network2", + "metric": { + "type": "Parameter", + "name": "demand" + }, + "name": "inflow", + "initial_value": 10.0 + } + ] + }, + { + "name": "network2", + "network": "network2.json", + "transfers": [ + { + "from_network": "network1", + "metric": { + "type": "NodeInFlow", + "name": "demand1" + }, + "name": "inflow" + } + ] + } + ] + +} diff --git a/pywr-schema/src/test_models/multi2/network1.json b/pywr-schema/src/test_models/multi2/network1.json new file mode 100644 index 00000000..c5328072 --- /dev/null +++ b/pywr-schema/src/test_models/multi2/network1.json @@ -0,0 +1,42 @@ +{ + "nodes": [ + { + "name": "supply1", + "type": "Input", + "max_flow": { + "type": "InterNetworkTransfer", + "name": "inflow" + } + }, + { + "name": "link1", + "type": "Link" + }, + { + "name": "demand1", + "type": "Output", + "max_flow": { + "type": "Parameter", + "name": "demand" + }, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "supply1", + "to_node": "link1" + }, + { + "from_node": "link1", + "to_node": "demand1" + } + ], + "parameters": [ + { + "name": "demand", + "type": "Constant", + "value": 10.0 + } + ] +} diff --git a/pywr-schema/src/test_models/multi2/network2.json b/pywr-schema/src/test_models/multi2/network2.json new file mode 100644 index 00000000..ef47e9b1 --- /dev/null +++ b/pywr-schema/src/test_models/multi2/network2.json @@ -0,0 +1,42 @@ +{ + "nodes": [ + { + "name": "supply2", + "type": "Input", + "max_flow": { + "type": "InterNetworkTransfer", + "name": "inflow" + } + }, + { + "name": "link2", + "type": "Link" + }, + { + "name": "demand2", + "type": "Output", + "max_flow": { + "type": "Parameter", + "name": "demand" + }, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "supply2", + "to_node": "link2" + }, + { + "from_node": "link2", + "to_node": "demand2" + } + ], + "parameters": [ + { + "name": "demand", + "type": "Constant", + "value": 20.0 + } + ] +} diff --git a/pywr-schema/src/test_models/piecewise_link1.json b/pywr-schema/src/test_models/piecewise_link1.json index 8bcb0a77..1d7174d0 100644 --- a/pywr-schema/src/test_models/piecewise_link1.json +++ b/pywr-schema/src/test_models/piecewise_link1.json @@ -9,44 +9,46 @@ "end": "2015-12-31", "timestep": 1 }, - "nodes": [ - { - "name": "input1", - "type": "Input", - "max_flow": 15 - }, - { - "name": "link1", - "type": "PiecewiseLink", - "steps": [ - { - "cost": 1.0, - "max_flow": 1.0 - }, - { - "cost": 5.0, - "max_flow": 3.0 - }, - { - "cost": 15.0 - } - ] - }, - { - "name": "demand1", - "type": "Output", - "max_flow": 15.0, - "cost": -10 - } - ], - "edges": [ - { - "from_node": "input1", - "to_node": "link1" - }, - { - "from_node": "link1", - "to_node": "demand1" - } - ] + "network": { + "nodes": [ + { + "name": "input1", + "type": "Input", + "max_flow": 15 + }, + { + "name": "link1", + "type": "PiecewiseLink", + "steps": [ + { + "cost": 1.0, + "max_flow": 1.0 + }, + { + "cost": 5.0, + "max_flow": 3.0 + }, + { + "cost": 15.0 + } + ] + }, + { + "name": "demand1", + "type": "Output", + "max_flow": 15.0, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "input1", + "to_node": "link1" + }, + { + "from_node": "link1", + "to_node": "demand1" + } + ] + } } diff --git a/pywr-schema/src/test_models/piecewise_storage1.json b/pywr-schema/src/test_models/piecewise_storage1.json index f746d552..4e326778 100644 --- a/pywr-schema/src/test_models/piecewise_storage1.json +++ b/pywr-schema/src/test_models/piecewise_storage1.json @@ -9,43 +9,45 @@ "end": "2015-12-31", "timestep": 1 }, - "nodes": [ - { - "name": "input1", - "type": "Input", - "max_flow": 5, - "cost": 2.0 - }, - { - "name": "storage1", - "type": "PiecewiseStorage", - "max_volume": 1000.0, - "steps": [ - { - "cost": -15.0, - "control_curve": 0.25 - }, - { - "cost": -5.0, - "control_curve": 0.5 - } - ] - }, - { - "name": "demand1", - "type": "Output", - "max_flow": 15.0, - "cost": -10 - } - ], - "edges": [ - { - "from_node": "input1", - "to_node": "storage1" - }, - { - "from_node": "storage1", - "to_node": "demand1" - } - ] + "network": { + "nodes": [ + { + "name": "input1", + "type": "Input", + "max_flow": 5, + "cost": 2.0 + }, + { + "name": "storage1", + "type": "PiecewiseStorage", + "max_volume": 1000.0, + "steps": [ + { + "cost": -15.0, + "control_curve": 0.25 + }, + { + "cost": -5.0, + "control_curve": 0.5 + } + ] + }, + { + "name": "demand1", + "type": "Output", + "max_flow": 15.0, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "input1", + "to_node": "storage1" + }, + { + "from_node": "storage1", + "to_node": "demand1" + } + ] + } } diff --git a/pywr-schema/src/test_models/piecewise_storage2.json b/pywr-schema/src/test_models/piecewise_storage2.json index a081341a..0ea27ac5 100644 --- a/pywr-schema/src/test_models/piecewise_storage2.json +++ b/pywr-schema/src/test_models/piecewise_storage2.json @@ -9,68 +9,83 @@ "end": "2015-12-31", "timestep": 1 }, - "nodes": [ - { - "name": "input1", - "type": "Input", - "max_flow": 3.0, - "cost": 2.0 - }, - { - "name": "storage1", - "type": "PiecewiseStorage", - "max_volume": 1000.0, - "steps": [ - { - "cost": -15.0, - "control_curve": 0.25 - }, - { - "cost": -5.0, - "control_curve": { - "type": "InlineParameter", - "definition": { - "type": "MonthlyProfile", - "name": "storage1-control-curve", - "values": [0.75, 0.75, 0.75, 0.5, 0.5, 0.5, 0.3, 0.3, 0.3, 0.5, 0.5, 0.5] + "network": { + "nodes": [ + { + "name": "input1", + "type": "Input", + "max_flow": 3.0, + "cost": 2.0 + }, + { + "name": "storage1", + "type": "PiecewiseStorage", + "max_volume": 1000.0, + "steps": [ + { + "cost": -15.0, + "control_curve": 0.25 + }, + { + "cost": -5.0, + "control_curve": { + "type": "InlineParameter", + "definition": { + "type": "MonthlyProfile", + "name": "storage1-control-curve", + "values": [ + 0.75, + 0.75, + 0.75, + 0.5, + 0.5, + 0.5, + 0.3, + 0.3, + 0.3, + 0.5, + 0.5, + 0.5 + ] + } } } - } - ] - }, - { - "name": "demand1", - "type": "Output", - "max_flow": 5.0, - "cost": -10 - } - ], - "edges": [ - { - "from_node": "input1", - "to_node": "storage1" - }, - { - "from_node": "storage1", - "to_node": "demand1" - } - ], - "parameters": [ - { - "name": "storage1-drought-curve", - "type": "Constant", - "value": 0.5 - }, - { - "name": "storage1-drought-index", - "type": "ControlCurveIndex", - "storage_node": "storage1", - "control_curves": [ - { - "type": "Parameter", - "name": "storage1-drought-curve" - } - ] - } - ] + ] + }, + { + "name": "demand1", + "type": "Output", + "max_flow": 5.0, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "input1", + "to_node": "storage1" + }, + { + "from_node": "storage1", + "to_node": "demand1" + } + ], + "parameters": [ + { + "name": "storage1-drought-curve", + "type": "Constant", + "value": 0.5 + }, + { + "name": "storage1-drought-index", + "type": "ControlCurveIndex", + "storage_node": "storage1", + "control_curves": [ + { + "type": "Parameter", + "name": "storage1-drought-curve" + } + ] + } + ] + } } diff --git a/pywr-schema/src/test_models/river_split_with_gauge1.json b/pywr-schema/src/test_models/river_split_with_gauge1.json index c3030cd5..0184250b 100644 --- a/pywr-schema/src/test_models/river_split_with_gauge1.json +++ b/pywr-schema/src/test_models/river_split_with_gauge1.json @@ -9,41 +9,43 @@ "end": "2015-12-31", "timestep": 1 }, - "nodes": [ - { - "name": "catchment1", - "type": "Catchment", - "flow": 15 - }, - { - "name": "gauge1", - "type": "RiverGauge", - "mrf": 5.0, - "mrf_cost": -20.0 - }, - { - "name": "term1", - "type": "Output" - }, - { - "name": "demand1", - "type": "Output", - "max_flow": 15.0, - "cost": -10 - } - ], - "edges": [ - { - "from_node": "catchment1", - "to_node": "gauge1" - }, - { - "from_node": "gauge1", - "to_node": "term1" - }, - { - "from_node": "gauge1", - "to_node": "demand1" - } - ] + "network": { + "nodes": [ + { + "name": "catchment1", + "type": "Catchment", + "flow": 15 + }, + { + "name": "gauge1", + "type": "RiverGauge", + "mrf": 5.0, + "mrf_cost": -20.0 + }, + { + "name": "term1", + "type": "Output" + }, + { + "name": "demand1", + "type": "Output", + "max_flow": 15.0, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "catchment1", + "to_node": "gauge1" + }, + { + "from_node": "gauge1", + "to_node": "term1" + }, + { + "from_node": "gauge1", + "to_node": "demand1" + } + ] + } } diff --git a/pywr-schema/src/test_models/simple1.json b/pywr-schema/src/test_models/simple1.json index 1ea65300..646df996 100644 --- a/pywr-schema/src/test_models/simple1.json +++ b/pywr-schema/src/test_models/simple1.json @@ -9,35 +9,43 @@ "end": "2015-12-31", "timestep": 1 }, - "nodes": [ - { - "name": "supply1", - "type": "Input", - "max_flow": 15 - }, - { - "name": "link1", - "type": "Link" - }, - { - "name": "demand1", - "type": "Output", - "max_flow": { - "type": "Parameter", - "name": "demand" + "network": { + "nodes": [ + { + "name": "supply1", + "type": "Input", + "max_flow": 15 }, - "cost": -10 - } - ], - "edges": [ - { - "from_node": "supply1", - "to_node": "link1" - }, - { - "from_node": "link1", - "to_node": "demand1" - } - ], - "parameters": [{"name": "demand", "type": "Constant", "value": 10.0}] + { + "name": "link1", + "type": "Link" + }, + { + "name": "demand1", + "type": "Output", + "max_flow": { + "type": "Parameter", + "name": "demand" + }, + "cost": -10 + } + ], + "edges": [ + { + "from_node": "supply1", + "to_node": "link1" + }, + { + "from_node": "link1", + "to_node": "demand1" + } + ], + "parameters": [ + { + "name": "demand", + "type": "Constant", + "value": 10.0 + } + ] + } }