Skip to content

Commit

Permalink
fixed covariates
Browse files Browse the repository at this point in the history
  • Loading branch information
MaiBe-ctrl committed Aug 30, 2024
1 parent c0bfe97 commit 2f5bb4a
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 9 deletions.
6 changes: 3 additions & 3 deletions neuralprophet/forecaster.py
Original file line number Diff line number Diff line change
Expand Up @@ -1070,7 +1070,7 @@ def fit(
or any(value != 1 for value in self.num_seasonalities_modelled_dict.values())
)

##### Data Setup, and Training Setup #####
# Data Setup, and Training Setup
# Train Configuration: overwrite self.config_train with user provided values
if learning_rate is not None:
self.config_train.learning_rate = learning_rate
Expand Down Expand Up @@ -1213,8 +1213,8 @@ def fit(
if self.config_train.learning_rate is None:
assert not self.fitted, "Learning rate must be provided for re-training a fitted model."

## Init a separate Model, Loader and Trainer copy for LR finder (optional, done for safety)
## Note Leads to a CUDA issue. Needs to be fixed before enabling this feature.
# Init a separate Model, Loader and Trainer copy for LR finder (optional, done for safety)
# Note Leads to a CUDA issue. Needs to be fixed before enabling this feature.
# model_lr_finder = self._init_model()
# loader_lr_finder = DataLoader(
# dataset,
Expand Down
1 change: 1 addition & 0 deletions neuralprophet/time_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -599,6 +599,7 @@ def sort_regressor_names(self, config):
multiplicative_regressors_names.append(reg)
return additive_regressors_names, multiplicative_regressors_names


class GlobalTimeDataset(TimeDataset):
def __init__(
self,
Expand Down
2 changes: 1 addition & 1 deletion neuralprophet/time_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -609,7 +609,7 @@ def forward(

# Unpack and process covariates
covariates_input = None
if self.config_lagged_regressors:
if self.config_lagged_regressors and self.config_lagged_regressors.regressors is not None:
covariates_input = self.features_extractor.extract_component(component_name="lagged_regressors")
covariates = self.forward_covar_net(covariates=covariates_input)
additive_components += covariates
Expand Down
12 changes: 7 additions & 5 deletions neuralprophet/utils_time_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,8 @@ def extract_lags(self):

def extract_lagged_regressors(self):
lagged_regressors = OrderedDict()
if self.lagged_regressor_config:
for name, lagged_regressor in self.lagged_regressor_config.items():
if self.lagged_regressor_config is not None and self.lagged_regressor_config.regressors is not None:
for name, lagged_regressor in self.lagged_regressor_config.regressors.items():
lagged_regressor_key = f"lagged_regressor_{name}"
if lagged_regressor_key in self.feature_indices:
lagged_regressor_start_idx, _ = self.feature_indices[lagged_regressor_key]
Expand Down Expand Up @@ -217,12 +217,14 @@ def pack_lagged_regerssors_component(df_tensors, feature_list, feature_indices,
"""
Stack the lagged regressor features.
"""
if config_lagged_regressors:
lagged_regressor_tensors = [df_tensors[name].unsqueeze(-1) for name in config_lagged_regressors.keys()]
if config_lagged_regressors is not None and config_lagged_regressors.regressors is not None:
lagged_regressor_tensors = [
df_tensors[name].unsqueeze(-1) for name in config_lagged_regressors.regressors.keys()
]
stacked_lagged_regressor_tensor = torch.cat(lagged_regressor_tensors, dim=-1)
feature_list.append(stacked_lagged_regressor_tensor)
num_features = stacked_lagged_regressor_tensor.size(-1)
for i, name in enumerate(config_lagged_regressors.keys()):
for i, name in enumerate(config_lagged_regressors.regressors.keys()):
feature_indices[f"lagged_regressor_{name}"] = (
current_idx + i,
current_idx + i + 1,
Expand Down

0 comments on commit 2f5bb4a

Please sign in to comment.