diff --git a/copilot_embedding_models.json b/copilot_embedding_models.json new file mode 100644 index 0000000..5256290 --- /dev/null +++ b/copilot_embedding_models.json @@ -0,0 +1,19 @@ +{ + "copilot_models": [ + { + "provider": "Amazon Bedrock", + "model": "amazon.titan-embed-text-v2:0", + "provider_id": "bedrock" + }, + { + "provider": "Amazon Bedrock", + "model": "cohere.embed-english-v3", + "provider_id": "bedrock" + }, + { + "provider": "Amazon Bedrock", + "model": "cohere.embed-multilingual-v3", + "provider_id": "bedrock" + } + ] +} diff --git a/copilot_models.json b/copilot_models.json index 467c7cc..ef46221 100644 --- a/copilot_models.json +++ b/copilot_models.json @@ -7,37 +7,32 @@ }, { "provider": "Amazon Bedrock", - "model": "anthropic.claude-3-sonnet-20240229-v1:0", + "model": "anthropic.claude-3-5-haiku-20241022-v1:0", "provider_id": "bedrock-chat" }, { "provider": "Amazon Bedrock", - "model": "anthropic.claude-3-haiku-20240307-v1:0", + "model": "anthropic.claude-3-5-sonnet-20241022-v2:0", "provider_id": "bedrock-chat" }, { "provider": "Amazon Bedrock", - "model": "anthropic.claude-3-opus-20240229-v1:0", - "provider_id": "bedrock-chat" - }, - { - "provider": "Amazon Bedrock", - "model": "meta.llama3-70b-instruct-v1:0", + "model": "meta.llama3-1-70b-instruct-v1:0", "provider_id": "bedrock" }, { "provider": "Amazon Bedrock", - "model": "mistral.mixtral-8x7b-instruct-v0:1", + "model": "meta.llama3-1-405b-instruct-v1:0", "provider_id": "bedrock" }, { "provider": "Amazon Bedrock", - "model": "amazon.titan-embed-text-v1", + "model": "mistral.mixtral-8x7b-instruct-v0:1", "provider_id": "bedrock" }, { "provider": "Amazon Bedrock", - "model": "amazon.titan-embed-text-v2:0", + "model": "mistral.mistral-large-2402-v1:0", "provider_id": "bedrock" } ] diff --git a/packages/cloudera-ai-inference-package/cloudera_ai_inference_package/cloudera_ai_embedding_provider.py b/packages/cloudera-ai-inference-package/cloudera_ai_inference_package/cloudera_ai_embedding_provider.py index 74a4fd8..68eb152 100644 --- a/packages/cloudera-ai-inference-package/cloudera_ai_inference_package/cloudera_ai_embedding_provider.py +++ b/packages/cloudera-ai-inference-package/cloudera_ai_inference_package/cloudera_ai_embedding_provider.py @@ -22,6 +22,15 @@ class ClouderaAIInferenceEmbeddingModelProvider(BaseEmbeddingsProvider, Embeddin os.getenv("COPILOT_CONFIG_DIR", ""), model_type="embedding" ) + # Read from both config files, as embedding models could still be in the old config file for an older CML version. + embedding_ai_inference_models, embedding_models = getCopilotModels( + os.getenv("COPILOT_EMBEDDING_CONFIG_DIR", ""), model_type="embedding" + ) + + # Merge lists, removing duplicates. + ai_inference_models = ai_inference_models + embedding_ai_inference_models + models = models + embedding_models + def __init__(self, **kwargs): super().__init__(**kwargs) self.model_endpoint = self._get_inference_endpoint() diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index 3f594a4..61d9f07 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -493,6 +493,13 @@ def getConfiguredThirdPartyModels(self): if copilot_config and "thirdPartyModels" in copilot_config and copilot_config["thirdPartyModels"]: third_party_models = copilot_config["thirdPartyModels"] + copilot_embedding_config_dir = os.getenv("COPILOT_EMBEDDING_CONFIG_DIR") + if copilot_embedding_config_dir and os.path.exists(copilot_embedding_config_dir): + f = open(copilot_embedding_config_dir) + copilot_embedding_config = json.load(f) + if copilot_embedding_config and "thirdPartyModels" in copilot_embedding_config and copilot_embedding_config["thirdPartyModels"]: + third_party_models += copilot_embedding_config["thirdPartyModels"] + # Fill in provider_id if it is missing. for third_party_model in third_party_models: print(third_party_model)