From e66d325de840a46b8d8cc4421a9387bc56fa7552 Mon Sep 17 00:00:00 2001 From: klemen1999 Date: Mon, 13 May 2024 22:33:44 +0200 Subject: [PATCH] added logging to Tuner --- luxonis_train/core/tuner.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/luxonis_train/core/tuner.py b/luxonis_train/core/tuner.py index b5e61632..cdad5cc2 100644 --- a/luxonis_train/core/tuner.py +++ b/luxonis_train/core/tuner.py @@ -1,5 +1,6 @@ import os.path as osp from typing import Any +from logging import getLogger import lightning.pytorch as pl import optuna @@ -13,6 +14,8 @@ from .core import Core +logger = getLogger(__name__) + class Tuner(Core): def __init__(self, cfg: str | dict, args: list[str] | tuple[str, ...] | None): @@ -32,6 +35,7 @@ def __init__(self, cfg: str | dict, args: list[str] | tuple[str, ...] | None): def tune(self) -> None: """Runs Optuna tunning of hyperparameters.""" + logger.info("Starting tuning...") pruner = ( optuna.pruners.MedianPruner() @@ -66,6 +70,8 @@ def tune(self) -> None: timeout=self.tune_cfg.timeout, ) + logger.info(f"Best study parameters: {study.best_params}") + def _objective(self, trial: optuna.trial.Trial) -> float: """Objective function used to optimize Optuna study.""" rank = rank_zero_only.rank