Skip to content

Commit

Permalink
[Minor] Move max_lags, prediction_freq to config_model and n_lags to …
Browse files Browse the repository at this point in the history
…config_ar (#1644)

* updated dataset get_item

* fixed linting issues

* make targets contiguous

* fixed ruff warnings

* Unpack incrementally when needed

* adjust forecaster

* separate unpacking logic

* added featureExtractor class

* separate packing logic

* fixed liniting issues

* fixed covariates

* remove lagged_reg_layers from model_config

* remove n_lags from forecaster

* remove model.n_lags references

* fix typo

* fix 2

* fixes

* do not init max_lags

* set max lags in add_lagged_reg

* fix test

* fix testz

* fix testz2

* fix predict_seasonality

* improve predic_seasonal_components

* uncomment None configs

* save previous settings

* fix failing test

* move prediction_frequency to model_config

* remove lagged layers merge issue

* remove packer

* rm Extractor

* rm Extractor2

* fix stacker in forecaster

* fix test

* fix tests

* remove commented code

* cleanup

* ruff

* remove unused fourier funcs and document new func

* fix new func

* retain OG fourier

* move fourier helper to plotting utils

* ruff

---------

Co-authored-by: MaiBe-ctrl <[email protected]>
Co-authored-by: Maisa Ben Salah <[email protected]>
  • Loading branch information
3 people authored Sep 3, 2024
1 parent 9c4963c commit ca0952f
Show file tree
Hide file tree
Showing 13 changed files with 241 additions and 206 deletions.
32 changes: 30 additions & 2 deletions neuralprophet/configure.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import types
from collections import OrderedDict
from dataclasses import dataclass, field
from typing import Callable, List, Optional
from typing import Callable, Dict, List, Optional
from typing import OrderedDict as OrderedDictType
from typing import Type, Union

Expand All @@ -23,8 +23,10 @@
@dataclass
class Model:
features_map: dict

Check failure on line 25 in neuralprophet/configure.py

View workflow job for this annotation

GitHub Actions / pyright

Declaration "features_map" is obscured by a declaration of the same name (reportRedeclaration)
lagged_reg_layers: Optional[List[int]]
quantiles: Optional[List[float]] = None
prediction_frequency: Optional[Dict[str]] = None

Check failure on line 27 in neuralprophet/configure.py

View workflow job for this annotation

GitHub Actions / pyright

Too few type arguments provided for "Dict"; expected 2 but received 1 (reportInvalidTypeArguments)
features_map: Optional[dict] = field(default_factory=dict)
max_lags: Optional[int] = field(init=False)

def setup_quantiles(self):
# convert quantiles to empty list [] if None
Expand All @@ -43,6 +45,32 @@ def setup_quantiles(self):
# 0 is the median quantile index
self.quantiles.insert(0, 0.5)

def set_max_num_lags(self, n_lags: int, config_lagged_regressors: Optional[ConfigLaggedRegressors] = None) -> int:

Check failure on line 48 in neuralprophet/configure.py

View workflow job for this annotation

GitHub Actions / pyright

Function with declared return type "int" must return value on all code paths   "None" is not assignable to "int" (reportReturnType)
"""Get the greatest number of lags between the autoregression lags and the covariates lags.
Parameters
----------
n_lags : int
number of autoregressive lagged values of series to include as model inputs
config_lagged_regressors : configure.ConfigLaggedRegressors
Configurations for lagged regressors
Returns
-------
int
Maximum number of lags between the autoregression lags and the covariates lags.
"""
if (
config_lagged_regressors is not None
and config_lagged_regressors.regressors is not None
and len(config_lagged_regressors.regressors) > 0
):
lagged_regressor_lags = [val.n_lags for key, val in config_lagged_regressors.regressors.items()]
max_lagged_regressor_lags = max(lagged_regressor_lags)
self.max_lags = max(n_lags, max_lagged_regressor_lags)
else:
self.max_lags = n_lags


ConfigModel = Model

Expand Down
8 changes: 4 additions & 4 deletions neuralprophet/data/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,12 +276,12 @@ def _prepare_dataframe_to_predict(model, df: pd.DataFrame, max_lags: int, freq:
raise ValueError("only datestamps provided but y values needed for auto-regression.")
df_i = _check_dataframe(model, df_i, check_y=False, exogenous=False)
else:
df_i = _check_dataframe(model, df_i, check_y=model.max_lags > 0, exogenous=False)
df_i = _check_dataframe(model, df_i, check_y=model.config_model.max_lags > 0, exogenous=False)
# fill in missing nans except for nans at end
df_i = _handle_missing_data(
df=df_i,
freq=freq,
n_lags=model.n_lags,
n_lags=model.config_ar.n_lags,
n_forecasts=model.n_forecasts,
config_missing=model.config_missing,
config_regressors=model.config_regressors,
Expand Down Expand Up @@ -401,7 +401,7 @@ def _check_dataframe(
pd.DataFrame
checked dataframe
"""
if len(df) < (model.n_forecasts + model.n_lags) and not future:
if len(df) < (model.n_forecasts + model.config_ar.n_lags) and not future:
raise ValueError(
"Dataframe has less than n_forecasts + n_lags rows. "
"Forecasting not possible. Please either use a larger dataset, or adjust the model parameters."
Expand Down Expand Up @@ -616,7 +616,7 @@ def _create_dataset(model, df, predict_mode, prediction_frequency=None, componen
return time_dataset.GlobalTimeDataset(
df,
predict_mode=predict_mode,
n_lags=model.n_lags,
n_lags=model.config_ar.n_lags,
n_forecasts=model.n_forecasts,
prediction_frequency=prediction_frequency,
predict_steps=model.predict_steps,
Expand Down
23 changes: 0 additions & 23 deletions neuralprophet/df_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,29 +88,6 @@ def return_df_in_original_format(df, received_ID_col=False, received_single_time
return new_df


def get_max_num_lags(n_lags: int, config_lagged_regressors: Optional[ConfigLaggedRegressors]) -> int:
"""Get the greatest number of lags between the autoregression lags and the covariates lags.
Parameters
----------
n_lags : int
number of lagged values of series to include as model inputs
config_lagged_regressors : configure.ConfigLaggedRegressors
Configurations for lagged regressors
Returns
-------
int
Maximum number of lags between the autoregression lags and the covariates lags.
"""
if config_lagged_regressors is not None and config_lagged_regressors.regressors is not None:
# log.debug("config_lagged_regressors exists")
return max([n_lags] + [val.n_lags for key, val in config_lagged_regressors.regressors.items()])
else:
# log.debug("config_lagged_regressors.regressors does not exist")
return n_lags


def merge_dataframes(df: pd.DataFrame) -> pd.DataFrame:
"""Join dataframes for procedures such as splitting data, set auto seasonalities, and others.
Expand Down
Loading

0 comments on commit ca0952f

Please sign in to comment.