Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

KF-6468 feat: Adding options to start server with artifacts options #288

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,3 +37,13 @@ options:
description: Deploy the NodePort service for MLFlow
type: boolean
default: true
serve_artifacts:
description: |
Enables serving of artifacts requests by routing these request to ./mlartifacts directory.
type: boolean
default: false
artifacts_destination:
description: |
Base location URI of resolve artifact requests.
type: string
default: './mlartifacts'
9 changes: 8 additions & 1 deletion src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ def __init__(self, *args):

self.logger = logging.getLogger(__name__)
self._port = self.model.config["mlflow_port"]
self._serve_artifacts = self.model.config.get("serve_artifacts", False)
self._artifacts_destination = self.model.config.get("artifacts_destination", "")
self._exporter_port = self.model.config["mlflow_prometheus_exporter_port"]
self._container_name = "mlflow-server"
self._exporter_container_name = "mlflow-prometheus-exporter"
Expand Down Expand Up @@ -199,7 +201,11 @@ def _get_env_vars(self, relational_db_data, object_storage_data):

def _charmed_mlflow_layer(self, env_vars, default_artifact_root) -> Layer:
"""Create and return Pebble framework layer."""

serve_artifacts = ""
if self._serve_artifacts:
serve_artifacts = (
f"--serve-artifacts --artifacts-destination {self._artifacts_destination}"
)
layer_config = {
"summary": "mlflow-server layer",
"description": "Pebble config layer for mlflow-server",
Expand All @@ -214,6 +220,7 @@ def _charmed_mlflow_layer(self, env_vars, default_artifact_root) -> Layer:
"0.0.0.0 "
"--port "
f"{self._port} "
f"{serve_artifacts} "
"--backend-store-uri "
f"{env_vars['MLFLOW_TRACKING_URI']} "
"--default-artifact-root "
Expand Down
32 changes: 31 additions & 1 deletion tests/unit/test_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,19 @@
"summary": "Entrypoint of mlflow-server image",
"startup": "enabled",
"override": "replace",
"command": "mlflow server --host 0.0.0.0 --port 5000 --backend-store-uri test --default-artifact-root s3:/// --expose-prometheus /metrics", # noqa: E501
"command": "mlflow server --host 0.0.0.0 --port 5000 --backend-store-uri test --default-artifact-root s3:/// --expose-prometheus /metrics", # noqa: E501
"environment": {"MLFLOW_TRACKING_URI": "test"},
},
)
}
EXPECTED_SERVICE_ARTIFACTS = {
"mlflow-server": Service(
"mlflow-server",
raw={
"summary": "Entrypoint of mlflow-server image",
"startup": "enabled",
"override": "replace",
"command": "mlflow server --host 0.0.0.0 --port 5000 --serve-artifacts --artifacts-destination s3:/// --backend-store-uri test --default-artifact-root s3:/// --expose-prometheus /metrics", # noqa: E501
"environment": {"MLFLOW_TRACKING_URI": "test"},
},
)
Expand Down Expand Up @@ -425,6 +437,24 @@ def test_update_layer_success(
)
assert harness.charm.container.get_plan().services == EXPECTED_SERVICE

@patch(
"charm.KubernetesServicePatch",
lambda x, y, service_name, service_type, refresh_event: None,
)
def test_config_artifact_success(
self,
harness: Harness,
):
harness.update_config({"serve_artifacts": True, "artifacts_destination": "s3:///"})
harness.begin()
harness.charm._update_layer(
harness.charm.container,
harness.charm._container_name,
harness.charm._charmed_mlflow_layer({"MLFLOW_TRACKING_URI": "test"}, ""),
)
updated_plan = harness.get_container_pebble_plan('mlflow-server').to_dict()
assert harness.charm.container.get_plan().services == EXPECTED_SERVICE_ARTIFACTS

@patch(
"charm.KubernetesServicePatch",
lambda x, y, service_name, service_type, refresh_event: None,
Expand Down
Loading