Skip to content

Commit

Permalink
updated optuna to newer version, added continue_existing_study parame…
Browse files Browse the repository at this point in the history
…ter to config
  • Loading branch information
klemen1999 committed May 7, 2024
1 parent 2e82131 commit 7f6aa1f
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 5 deletions.
1 change: 1 addition & 0 deletions configs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,7 @@ Here you can specify options for tuning.
| Key | Type | Default value | Description |
| ---------- | ----------------- | ------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| study_name | str | "test-study" | Name of the study. |
| continue_existing_study | bool | True | Weather to continue existing study if `study_name` already exists.
| use_pruner | bool | True | Whether to use the MedianPruner. |
| n_trials | int \| None | 15 | Number of trials for each process. `None` represents no limit in terms of numbner of trials. |
| timeout | int \| None | None | Stop study after the given number of seconds. |
Expand Down
6 changes: 2 additions & 4 deletions luxonis_train/core/tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def tune(self) -> None:
storage=storage,
direction="minimize",
pruner=pruner,
load_if_exists=True,
load_if_exists=self.tune_cfg.continue_existing_study,
)

study.optimize(
Expand Down Expand Up @@ -94,9 +94,7 @@ def _objective(self, trial: optuna.trial.Trial) -> float:
save_dir=run_save_dir,
input_shape=self.loader_train.input_shape,
)
pruner_callback = PyTorchLightningPruningCallback(
trial, monitor="val_loss/loss"
)
pruner_callback = PyTorchLightningPruningCallback(trial, monitor="val/loss")
callbacks: list[pl.Callback] = (
[LuxonisProgressBar()] if self.cfg.use_rich_text else []
)
Expand Down
1 change: 1 addition & 0 deletions luxonis_train/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,7 @@ class StorageConfig(BaseModel):

class TunerConfig(BaseModel):
study_name: str = "test-study"
continue_existing_study: bool = True
use_pruner: bool = True
n_trials: int | None = 15
timeout: int | None = None
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ luxonis-ml[all]>=0.0.1
onnx>=1.12.0
onnxruntime>=1.13.1
onnxsim>=0.4.10
optuna>=3.2.0
optuna>=3.6.0
optuna-integration>=3.6.0
psycopg2-binary>=2.9.1
pycocotools>=2.0.7
rich>=13.0.0
Expand Down

0 comments on commit 7f6aa1f

Please sign in to comment.