Bersu T
03/11/2025, 7:12 AMjan rathfelder
03/11/2025, 11:51 AMimport lightgbm as lgb
params = {
'objective': 'quantile',
'alpha': 0.5, # 0.5 corresponds to the median (pinball loss at 50% quantile)
'learning_rate': 0.1,
'num_leaves': 31,
'metric': 'quantile'
}
# Create dataset
train_data = lgb.Dataset(X_train, label=y_train)
# Train model
model = lgb.train(params, train_data, num_boost_round=100)
Bersu T
03/11/2025, 4:46 PMdef loss_fn(df, train_df): return mae(df, models=['model'])['model'].mean()
auto_mlf_large.fit(
grote_agg_df,
n_windows=n_windows,
h=h,
step_size=step_size,
fitted=True,
num_samples=40,
loss=loss_fn
)
jan rathfelder
03/11/2025, 7:35 PMdef my_lgb_config(trial: optuna.Trial):
return {
'learning_rate': 0.05,
'verbosity': -1,
'num_leaves': trial.suggest_int('num_leaves', 2, 128, log=True),
'objective': trial.suggest_categorical('objective', ['l1', 'l2', 'mape']),
}
my_lgb = AutoModel(
model=lgb.LGBMRegressor(),
config=my_lgb_config,
)
auto_mlf = AutoMLForecast(
models={'my_lgb': my_lgb},
freq=1,
season_length=24,
).fit(
train,
n_windows=2,
h=horizon,
num_samples=2,
)
preds = auto_mlf.predict(horizon)
evaluate(preds, group)
Bersu T
03/11/2025, 9:50 PMoptuna.logging.set_verbosity(optuna.logging.ERROR)
auto_mlf = AutoMLForecast(
models={'lgb': AutoLightGBM(), 'ridge': AutoRidge()},
freq=1,
season_length=24,
)
auto_mlf.fit(
train,
n_windows=2,
h=horizon,
num_samples=2, # number of trials to run
)
jan rathfelder
03/12/2025, 6:22 AMBersu T
03/12/2025, 9:08 AMjan rathfelder
03/12/2025, 10:48 AMBersu T
03/12/2025, 10:53 AMseason_length=12
. I already added date features myself (month, Fourier terms) in the dataset. I’ll add my own lags and check again.jan rathfelder
03/12/2025, 11:00 AM'date_features': None,
Bersu T
03/12/2025, 12:12 PMjan rathfelder
03/12/2025, 12:41 PMBersu T
03/12/2025, 1:56 PMBersu T
03/12/2025, 1:56 PMjan rathfelder
03/12/2025, 2:07 PM