Florian W
05/10/2024, 9:26 AMJosé Morales
05/10/2024, 4:34 PMFlorian W
05/13/2024, 7:40 AMJosé Morales
05/13/2024, 3:59 PMFlorian W
05/13/2024, 8:02 PMJosé Morales
05/13/2024, 8:05 PMJosé Morales
05/13/2024, 8:06 PMjan rathfelder
05/27/2024, 9:41 AMdef early_stopping(self, study, trial):
current_value = trial.value
improvement = self.best_value / current_value - 1
if improvement > self.percentage:
self.best_value = current_value
self.best_step = trial.number
elif trial.number - self.best_step >= self.patience:
print(f"Early stopping triggered at trial {trial.number}.")
study.stop()
jan rathfelder
05/27/2024, 9:42 AMdef run_optuna(self, n_trials: int = 20) -> Dict[str, Any]:
"""
Initiates Optuna optimization to find the best hyperparameters.
Parameters:
- n_trials (int, optional): The number of optimization trials. Default is 20.
Returns:
- Dict[str, Any]: A dictionary containing the best hyperparameters found during optimization.
"""
study = optuna.create_study(
direction="minimize", pruner=optuna.pruners.MedianPruner()
)
study.optimize(self.objective, n_trials=n_trials, callbacks=[self.early_stopping])
#study.optimize(self.objective, n_trials=self.n_trials, )
return study.best_params