From d7c0d2c46f0aa3aed0c86483b273e2694db34bdf Mon Sep 17 00:00:00 2001 From: Yuan Tang Date: Thu, 10 Oct 2024 21:08:19 -0400 Subject: [PATCH] Fix precommit Signed-off-by: Yuan Tang --- docs/getting_started.md | 2 +- .../providers/adapters/inference/databricks/__init__.py | 3 ++- .../providers/adapters/inference/databricks/config.py | 2 +- llama_stack/providers/impls/vllm/__init__.py | 6 ++++++ 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/docs/getting_started.md b/docs/getting_started.md index 32f4d2d15a..6c8c902c03 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -73,7 +73,7 @@ docker run -it -p 5000:5000 -v ~/.llama:/root/.llama --gpus=all llamastack-local ``` > [!NOTE] -> `~/.llama` should be the path containing downloaded weights of Llama models. +> `~/.llama` should be the path containing downloaded weights of Llama models. #### Via conda diff --git a/llama_stack/providers/adapters/inference/databricks/__init__.py b/llama_stack/providers/adapters/inference/databricks/__init__.py index 097579d250..ca2a0a1036 100644 --- a/llama_stack/providers/adapters/inference/databricks/__init__.py +++ b/llama_stack/providers/adapters/inference/databricks/__init__.py @@ -7,10 +7,11 @@ from .config import DatabricksImplConfig from .databricks import DatabricksInferenceAdapter + async def get_adapter_impl(config: DatabricksImplConfig, _deps): assert isinstance( config, DatabricksImplConfig ), f"Unexpected config type: {type(config)}" impl = DatabricksInferenceAdapter(config) await impl.initialize() - return impl \ No newline at end of file + return impl diff --git a/llama_stack/providers/adapters/inference/databricks/config.py b/llama_stack/providers/adapters/inference/databricks/config.py index 927bb474c9..bb93a0a722 100644 --- a/llama_stack/providers/adapters/inference/databricks/config.py +++ b/llama_stack/providers/adapters/inference/databricks/config.py @@ -19,4 +19,4 @@ class DatabricksImplConfig(BaseModel): api_token: str = Field( default=None, description="The Databricks API token", - ) \ No newline at end of file + ) diff --git a/llama_stack/providers/impls/vllm/__init__.py b/llama_stack/providers/impls/vllm/__init__.py index 3d5a81ad9e..aa0c4b1012 100644 --- a/llama_stack/providers/impls/vllm/__init__.py +++ b/llama_stack/providers/impls/vllm/__init__.py @@ -1,3 +1,9 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + from typing import Any from .config import VLLMConfig