Skip to content

Commit

Permalink
Fix precommit
Browse files Browse the repository at this point in the history
Signed-off-by: Yuan Tang <[email protected]>
  • Loading branch information
terrytangyuan committed Oct 11, 2024
1 parent 59b4a4b commit d7c0d2c
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 3 deletions.
2 changes: 1 addition & 1 deletion docs/getting_started.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ docker run -it -p 5000:5000 -v ~/.llama:/root/.llama --gpus=all llamastack-local
```

> [!NOTE]
> `~/.llama` should be the path containing downloaded weights of Llama models.
> `~/.llama` should be the path containing downloaded weights of Llama models.

#### Via conda
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@
from .config import DatabricksImplConfig
from .databricks import DatabricksInferenceAdapter


async def get_adapter_impl(config: DatabricksImplConfig, _deps):
assert isinstance(
config, DatabricksImplConfig
), f"Unexpected config type: {type(config)}"
impl = DatabricksInferenceAdapter(config)
await impl.initialize()
return impl
return impl
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,4 @@ class DatabricksImplConfig(BaseModel):
api_token: str = Field(
default=None,
description="The Databricks API token",
)
)
6 changes: 6 additions & 0 deletions llama_stack/providers/impls/vllm/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

from typing import Any

from .config import VLLMConfig
Expand Down

0 comments on commit d7c0d2c

Please sign in to comment.