Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: refactor metrics and switch to ruff #64

Merged
merged 4 commits into from
Jan 24, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 9 additions & 30 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,49 +4,28 @@ repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-ast
- id: check-toml
- id: check-yaml
args:
- --unsafe
#- id: check-json
- id: check-case-conflict
- id: check-merge-conflict
- id: end-of-file-fixer
- id: trailing-whitespace
- id: detect-private-key
- id: requirements-txt-fixer

# - repo: https://github.com/pre-commit/mirrors-mypy
# rev: v0.991
# hooks:
# - id: mypy
# args: [--ignore-missing-imports, --pretty, --show-error-codes]
- repo: https://github.com/nbQA-dev/nbQA # run lint on notebook
rev: 1.9.1
hooks:
- id: nbqa-black
- id: nbqa-pyupgrade
# - id: nbqa-isort
- repo: https://github.com/asottile/pyupgrade
rev: v3.19.1
hooks:
- id: pyupgrade
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
name: isort (python)
args: [--profile, black]
- repo: https://github.com/psf/black
rev: 24.10.0
hooks:
- id: black
- repo: https://github.com/kynan/nbstripout
rev: 0.6.0
hooks:
- id: nbstripout
- repo: https://github.com/hadialqattan/pycln # remove unused import
rev: v2.5.0

- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.9.2
hooks:
- id: pycln
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format
5 changes: 4 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,8 @@
"tests"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
"python.testing.pytestEnabled": true,
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff"
},
}
11 changes: 6 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,24 +1,25 @@
.PHONY: test install install-dev install-pre-commit run-pre-commit .uv .pre-commit tox
.PHONY: venv test install install-dev install-pre-commit run-pre-commit .uv .pre-commit tox

.uv: ## Check that uv is installed
@uv --version || echo 'Please install uv: https://docs.astral.sh/uv/getting-started/installation/'

.pre-commit: ## Check that pre-commit is installed
@pre-commit -V || echo 'Please install pre-commit: https://pre-commit.com/'

venv:
@uv venv --python=python3.12

install: .uv .pre-commit
@uv venv
@uv pip install -e ".[cpu,dev]"
@pre-commit install

install-gpu: .uv .pre-commit
@uv venv
@uv pip install -e ".[dev,gpu]"
@pre-commit install

lint:
@isort ./focoos ./tests --profile=black
@black ./focoos ./tests
@ruff check ./focoos ./tests ./notebooks --fix
@ruff format ./focoos ./tests ./notebooks

run-pre-commit: .pre-commit
@pre-commit run --all-files
Expand Down
30 changes: 14 additions & 16 deletions docs/how_to/cloud_training.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ pprint(datasets)
```

##  Initiating a Cloud Training Job

To start training, configure the model, dataset, and training parameters as shown below:

```python
Expand All @@ -29,7 +28,6 @@ from focoos.ports import Hyperparameters, TrainInstance
model = focoos.get_remote_model("<YOUR-MODEL-ID>")

res = model.train(
anyma_version="0.11.1",
dataset_ref="<YOUR-DATASET-ID>",
instance_type=TrainInstance.ML_G4DN_XLARGE,
volume_size=50,
Expand All @@ -42,29 +40,19 @@ res = model.train(
resolution=640,
), # type: ignore
)
pprint(res)
```

##  Monitoring Training Progress
##  Monitoring Training Progress on jupyter notebook

Once the training job is initiated, monitor its progress by polling the training status. Use the following code:

```python
import time
from pprint import pprint
from focoos.utils.logger import get_logger
from focoos import Focoos

completed_status = ["Completed", "Failed"]
logger = get_logger(__name__)
focoos = Focoos(api_key=os.getenv("FOCOOS_API_KEY"))

model = focoos.get_remote_model("<YOUR-MODEL-ID>")
status = model.train_status()

while status["main_status"] not in completed_status:
status = model.train_status()
logger.info(f"Training status: {status['main_status']}")
pprint(f"Training progress: {status['status_transitions']}")
time.sleep(30)
model.notebook_monitor_train(interval=30, plot_metrics=True)
```

##  Retrieving Training Logs
Expand All @@ -75,3 +63,13 @@ After the training process is complete, retrieve the logs for detailed insights:
logs = model.train_logs()
pprint(logs)
```

## Retrieve and Visualize Training Metrics

```python
from focoos.utils.metrics import MetricsVisualizer

metrics = model.metrics()
visualizer = MetricsVisualizer(metrics)
visualizer.log_metrics()
```
6 changes: 4 additions & 2 deletions focoos/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,12 @@
OnnxEngineOpts,
RuntimeTypes,
SystemInfo,
TraininingInfo,
TrainingInfo,
TrainInstance,
)
from .remote_model import RemoteModel
from .runtime import ONNXRuntime, get_runtime
from .utils.logger import get_logger
from .utils.system import get_system_info
from .utils.vision import (
base64mask_to_mask,
Expand Down Expand Up @@ -59,7 +60,7 @@
"OnnxEngineOpts",
"RuntimeTypes",
"SystemInfo",
"TraininingInfo",
"TrainingInfo",
"TrainInstance",
"get_system_info",
"ONNXRuntime",
Expand All @@ -75,4 +76,5 @@
"image_preprocess",
"index_to_class",
"sv_to_focoos_detections",
"get_logger",
]
28 changes: 8 additions & 20 deletions focoos/focoos.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
from focoos.ports import (
DatasetMetadata,
ModelMetadata,
ModelNotFound,
ModelPreview,
Quotas,
RuntimeTypes,
User,
)
Expand Down Expand Up @@ -92,9 +92,7 @@ def __init__(
self.http_client = HttpClient(self.api_key, host_url)
self.user_info = self.get_user_info()
self.cache_dir = os.path.join(os.path.expanduser("~"), ".cache", "focoos")
logger.info(
f"Currently logged as: {self.user_info.email} environment: {host_url}"
)
logger.info(f"Currently logged as: {self.user_info.email} environment: {host_url}")

def get_user_info(self) -> User:
"""
Expand Down Expand Up @@ -160,9 +158,7 @@ def list_focoos_models(self) -> list[ModelPreview]:
res = self.http_client.get("models/focoos-models")
if res.status_code != 200:
logger.error(f"Failed to list focoos models: {res.status_code} {res.text}")
raise ValueError(
f"Failed to list focoos models: {res.status_code} {res.text}"
)
raise ValueError(f"Failed to list focoos models: {res.status_code} {res.text}")
return [ModelPreview.from_json(r) for r in res.json()]

def get_local_model(
Expand Down Expand Up @@ -207,9 +203,7 @@ def get_remote_model(self, model_ref: str) -> RemoteModel:
"""
return RemoteModel(model_ref, self.http_client)

def new_model(
self, name: str, focoos_model: str, description: str
) -> Optional[RemoteModel]:
def new_model(self, name: str, focoos_model: str, description: str) -> RemoteModel:
"""
Creates a new model in the Focoos system.

Expand Down Expand Up @@ -238,7 +232,6 @@ def new_model(
logger.warning(f"Model already exists: {name}")
return self.get_model_by_name(name, remote=True)
logger.warning(f"Failed to create new model: {res.status_code} {res.text}")
return None

def list_shared_datasets(self) -> list[DatasetMetadata]:
"""
Expand Down Expand Up @@ -291,12 +284,8 @@ def _download_model(self, model_ref: str) -> str:
logger.info("📥 Downloading model from Focoos Cloud.. ")
response = self.http_client.get_external_url(download_uri, stream=True)
if response.status_code != 200:
logger.error(
f"Failed to download model: {response.status_code} {response.text}"
)
raise ValueError(
f"Failed to download model: {response.status_code} {response.text}"
)
logger.error(f"Failed to download model: {response.status_code} {response.text}")
raise ValueError(f"Failed to download model: {response.status_code} {response.text}")
total_size = int(response.headers.get("content-length", 0))
logger.info(f"📥 Size: {total_size / (1024**2):.2f} MB")

Expand Down Expand Up @@ -339,9 +328,7 @@ def get_dataset_by_name(self, name: str) -> Optional[DatasetMetadata]:
if name_lower == dataset.name.lower():
return dataset

def get_model_by_name(
self, name: str, remote: bool = True
) -> Optional[Union[RemoteModel, LocalModel]]:
def get_model_by_name(self, name: str, remote: bool = True) -> Union[RemoteModel, LocalModel]:
"""
Retrieves a model by its name.

Expand All @@ -360,3 +347,4 @@ def get_model_by_name(
return self.get_remote_model(model.ref)
else:
return self.get_local_model(model.ref)
raise ModelNotFound(f"Model not found: {name}")
20 changes: 6 additions & 14 deletions focoos/local_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,24 +126,18 @@ def _annotate(self, im: np.ndarray, detections: sv.Detections) -> np.ndarray:
"""
classes = self.metadata.classes
labels = [
f"{classes[int(class_id)] if classes is not None else str(class_id)}: {confid*100:.0f}%"
f"{classes[int(class_id)] if classes is not None else str(class_id)}: {confid * 100:.0f}%"
for class_id, confid in zip(detections.class_id, detections.confidence) # type: ignore
]
if self.metadata.task == FocoosTask.DETECTION:
annotated_im = self.box_annotator.annotate(
scene=im.copy(), detections=detections
)
annotated_im = self.box_annotator.annotate(scene=im.copy(), detections=detections)

annotated_im = self.label_annotator.annotate(
scene=annotated_im, detections=detections, labels=labels
)
annotated_im = self.label_annotator.annotate(scene=annotated_im, detections=detections, labels=labels)
elif self.metadata.task in [
FocoosTask.SEMSEG,
FocoosTask.INSTANCE_SEGMENTATION,
]:
annotated_im = self.mask_annotator.annotate(
scene=im.copy(), detections=detections
)
annotated_im = self.mask_annotator.annotate(scene=im.copy(), detections=detections)
return annotated_im

def infer(
Expand Down Expand Up @@ -185,10 +179,8 @@ def infer(
detections = self.runtime(im1.astype(np.float32), threshold)
t2 = perf_counter()
if resize:
detections = scale_detections(
detections, (resize, resize), (im0.shape[1], im0.shape[0])
)
logger.debug(f"Inference time: {t2-t1:.3f} seconds")
detections = scale_detections(detections, (resize, resize), (im0.shape[1], im0.shape[0]))
logger.debug(f"Inference time: {t2 - t1:.3f} seconds")
im = None
if annotate:
im = self._annotate(im0, detections)
Expand Down
Loading