Hi everyone, I get the following error when using...
# general
l
Hi everyone, I get the following error when using the code below within the hierarchicalforecast package. How do I resolve the Exception?
Copy code
Exception: Please include `NBEATS-median` prediction intervals in `Y_hat_df`
Copy code
if __name__ == '__main__':
    import numpy as np
    import pandas as pd

    from datasetsforecast.hierarchical import HierarchicalData

    from neuralforecast import NeuralForecast
    from neuralforecast.models import NBEATS
    from neuralforecast.losses.pytorch import GMM

    from mlforecast import MLForecast
    from window_ops.expanding import expanding_mean
    from mlforecast.utils import PredictionIntervals
    from mlforecast.target_transforms import Differences
    import xgboost as xgb

    # obtain hierarchical reconciliation methods and evaluation
    from hierarchicalforecast.methods import BottomUp, MinTrace
    from hierarchicalforecast.utils import HierarchicalPlot
    from hierarchicalforecast.core import HierarchicalReconciliation
    from hierarchicalforecast.evaluation import scaled_crps

    Y_df, S_df, tags = HierarchicalData.load('./data', 'TourismLarge')
    Y_df['ds'] = pd.to_datetime(Y_df['ds'])

    Y_df.head()

    hplot = HierarchicalPlot(S=S_df, tags=tags)
    hplot.plot_summing_matrix()


    def sort_hier_df(Y_df, S_df):
        # sorts unique_id lexicographically
        Y_df.unique_id = Y_df.unique_id.astype('category')
        Y_df.unique_id = Y_df.unique_id.cat.set_categories(S_df.index)
        Y_df = Y_df.sort_values(by=['unique_id', 'ds'])
        return Y_df


    Y_df = sort_hier_df(Y_df, S_df)

    horizon = 12
    Y_test_df = Y_df.groupby('unique_id').tail(horizon)
    Y_train_df = Y_df.drop(Y_test_df.index)

    level = np.arange(0, 100, 2)
    qs = [[50 - lv / 2, 50 + lv / 2] for lv in level]
    quantiles = np.sort(np.concatenate(qs) / 100)

    # fit/predict NBEATS from NeuralForecast
    nbeats = NBEATS(h=horizon,
                    input_size=2 * horizon,
                    loss=GMM(n_components=10, quantiles=quantiles),
                    scaler_type='robust',
                    max_steps=2000)
    nf = NeuralForecast(models=[nbeats], freq='MS')
    nf.fit(df=Y_train_df)
    Y_hat_nf = nf.predict()

    # fit/predict XGBRegressor from MLForecast
    mf = MLForecast(models=[xgb.XGBRegressor()],
                    freq='MS',
                    lags=[1, 2, 12, 24],
                    date_features=['month'],
                    )
    mf.fit(Y_train_df, prediction_intervals=PredictionIntervals(n_windows=10, window_size=horizon))
    Y_hat_mf = mf.predict(horizon, level=level).set_index('unique_id')

    Y_hat_nf

    Y_hat_mf

    reconcilers = [
        BottomUp(),
        MinTrace('ols')
    ]
    hrec = HierarchicalReconciliation(reconcilers=reconcilers)

    Y_rec_nf = hrec.reconcile(Y_hat_df=Y_hat_nf, Y_df=Y_train_df, S=S_df, tags=tags, level=level)
    Y_rec_mf = hrec.reconcile(Y_hat_df=Y_hat_mf, Y_df=Y_train_df, S=S_df, tags=tags, level=level)

    rec_model_names_nf = ['NBEATS/BottomUp', 'NBEATS/MinTrace_method-ols']
    rec_model_names_mf = ['XGBRegressor/BottomUp', 'XGBRegressor/MinTrace_method-ols']

    n_quantiles = len(quantiles)
    n_series = len(S_df)

    for name in rec_model_names_nf:
        quantile_columns = [col for col in Y_rec_nf.columns if (name + '-lo') in col or (name + '-hi') in col]
        y_rec = Y_rec_nf[quantile_columns].values
        y_test = Y_test_df['y'].values

        y_rec = y_rec.reshape(n_series, horizon, n_quantiles)
        y_test = y_test.reshape(n_series, horizon)
        scrps = scaled_crps(y=y_test, y_hat=y_rec, quantiles=quantiles)
        print("{:<40} {:.5f}".format(name + ":", scrps))

    for name in rec_model_names_mf:
        quantile_columns = [col for col in Y_rec_mf.columns if (name + '-lo') in col or (name + '-hi') in col]
        y_rec = Y_rec_mf[quantile_columns].values
        y_test = Y_test_df['y'].values

        y_rec = y_rec.reshape(n_series, horizon, n_quantiles)
        y_test = y_test.reshape(n_series, horizon)
        scrps = scaled_crps(y=y_test, y_hat=y_rec, quantiles=quantiles)
        print("{:<40} {:.5f}".format(name + ":", scrps))

    plot_nf = pd.concat([Y_df.set_index(['unique_id', 'ds']),
                         Y_rec_nf.set_index('ds', append=True)], axis=1)
    plot_nf = plot_nf.reset_index('ds')

    plot_mf = pd.concat([Y_df.set_index(['unique_id', 'ds']),
                         Y_rec_mf.set_index('ds', append=True)], axis=1)
    plot_mf = plot_mf.reset_index('ds')

    hplot.plot_series(
        series='TotalVis',
        Y_df=plot_nf,
        models=['y', 'NBEATS', 'NBEATS/BottomUp', 'NBEATS/MinTrace_method-ols'],
        level=[80]
    )

    hplot.plot_series(
        series='TotalVis',
        Y_df=plot_mf,
        models=['y', 'XGBRegressor', 'XGBRegressor/BottomUp', 'XGBRegressor/MinTrace_method-ols'],
        level=[80]
    )