Ricardo Barros Lourenço
05/14/2024, 8:18 PM[rank: 0] Seed set to 1
---------------------------------------------------------------------------
MemoryError Traceback (most recent call last)
Cell In[18], line 3
1 # Fit and predict with NBEATS and NHITS models
2 horizon = len(df_long_test)
----> 3 models = [NBEATS(input_size=2 * horizon, h=horizon, max_steps=50, batch_size=1),
4 # NHITS(input_size=2 * horizon, h=horizon, max_steps=50)
5 ]
6 nf = NeuralForecast(models=models, freq='M')
7 nf.fit(df=df_long_train)
File /opt/conda/envs/pytorch_env/lib/python3.11/site-packages/neuralforecast/models/nbeats.py:310, in NBEATS.__init__(self, h, input_size, n_harmonics, n_polynomials, stack_types, n_blocks, mlp_units, dropout_prob_theta, activation, shared_weights, loss, valid_loss, max_steps, learning_rate, num_lr_decays, early_stop_patience_steps, val_check_steps, batch_size, valid_batch_size, windows_batch_size, inference_windows_batch_size, start_padding_enabled, step_size, scaler_type, random_seed, num_workers_loader, drop_last_loader, optimizer, optimizer_kwargs, **trainer_kwargs)
284 super(NBEATS, self).__init__(
285 h=h,
286 input_size=input_size,
(...)
306 **trainer_kwargs,
307 )
309 # Architecture
--> 310 blocks = self.create_stack(
311 h=h,
312 input_size=input_size,
313 stack_types=stack_types,
314 n_blocks=n_blocks,
315 mlp_units=mlp_units,
316 dropout_prob_theta=dropout_prob_theta,
317 activation=activation,
318 shared_weights=shared_weights,
319 n_polynomials=n_polynomials,
320 n_harmonics=n_harmonics,
321 )
322 self.blocks = torch.nn.ModuleList(blocks)
File /opt/conda/envs/pytorch_env/lib/python3.11/site-packages/neuralforecast/models/nbeats.py:352, in NBEATS.create_stack(self, stack_types, n_blocks, input_size, h, mlp_units, dropout_prob_theta, activation, shared_weights, n_polynomials, n_harmonics)
346 if stack_types[i] == "seasonality":
347 n_theta = (
348 2
349 * (self.loss.outputsize_multiplier + 1)
350 * int(np.ceil(n_harmonics / 2 * h) - (n_harmonics - 1))
351 )
--> 352 basis = SeasonalityBasis(
353 harmonics=n_harmonics,
354 backcast_size=input_size,
355 forecast_size=h,
356 out_features=self.loss.outputsize_multiplier,
357 )
359 elif stack_types[i] == "trend":
360 n_theta = (self.loss.outputsize_multiplier + 1) * (
361 n_polynomials + 1
362 )
File /opt/conda/envs/pytorch_env/lib/python3.11/site-packages/neuralforecast/models/nbeats.py:99, in SeasonalityBasis.__init__(self, harmonics, backcast_size, forecast_size, out_features)
92 self.out_features = out_features
93 frequency = np.append(
94 np.zeros(1, dtype=float),
95 np.arange(harmonics, harmonics / 2 * forecast_size, dtype=float)
96 / harmonics,
97 )[None, :]
98 backcast_grid = (
---> 99 -2
100 * np.pi
101 * (np.arange(backcast_size, dtype=float)[:, None] / forecast_size)
102 * frequency
103 )
104 forecast_grid = (
105 2
106 * np.pi
107 * (np.arange(forecast_size, dtype=float)[:, None] / forecast_size)
108 * frequency
109 )
111 backcast_cos_template = torch.tensor(
112 np.transpose(np.cos(backcast_grid)), dtype=torch.float32
113 )
MemoryError: Unable to allocate 3.71 TiB for an array with shape (1009884, 504941) and data type float64
Marco
05/14/2024, 8:39 PMval_batch_size
too? It might help! Otherwise, you can also try to reduce the precision of your data, like float32 instead of float64.
I hope this helps!Ricardo Barros Lourenço
05/14/2024, 8:47 PMMarco
05/14/2024, 8:49 PMMarco
05/14/2024, 8:51 PMRicardo Barros Lourenço
05/14/2024, 8:54 PM