From a441b82ad71b502a1d0c451f9ea45c89eb8ed339 Mon Sep 17 00:00:00 2001 From: Alex Acebo Date: Tue, 11 Jun 2024 18:08:36 -0400 Subject: [PATCH] [JS] feat: add managed identity auth support for AOAI and update AI Search sample (#1725) ## Linked issues closes: #1714 #1665 #1664 #1666 ## Details - add managed identity auth support for AOAI and update AI Search sample - validate bot tenant id on incoming activity - fix citations parsing error when title is null --- js/packages/teams-ai/src/AI.ts | 182 +++++++++--------- .../teams-ai/src/actions/HttpError.spec.ts | 10 +- js/packages/teams-ai/src/actions/HttpError.ts | 10 +- .../teams-ai/src/actions/SayCommand.ts | 8 +- .../teams-ai/src/models/OpenAIModel.ts | 72 ++++--- js/packages/teams-ai/src/prompts/Message.ts | 6 +- .../h.datasource-azureOpenAI/.gitignore | 3 +- .../h.datasource-azureOpenAI/README.md | 29 +-- .../appPackage/color.png | 4 +- .../appPackage/manifest.json | 4 +- .../env/.env.dev.user | 5 +- .../h.datasource-azureOpenAI/env/.env.local | 7 +- .../env/.env.local.user | 5 +- .../infra/azure.bicep | 22 +-- .../infra/azure.parameters.json | 13 +- .../h.datasource-azureOpenAI/package.json | 3 +- .../h.datasource-azureOpenAI/sample.env | 3 +- .../h.datasource-azureOpenAI/src/app.ts | 64 +++++- .../h.datasource-azureOpenAI/src/index.ts | 4 + .../src/prompts/chat/config.json | 15 +- .../teamsapp.local.yml | 8 +- 21 files changed, 267 insertions(+), 210 deletions(-) diff --git a/js/packages/teams-ai/src/AI.ts b/js/packages/teams-ai/src/AI.ts index 734096c91..c15d9a0cf 100644 --- a/js/packages/teams-ai/src/AI.ts +++ b/js/packages/teams-ai/src/AI.ts @@ -347,106 +347,116 @@ export class AI { * @returns {Promise} True if the plan was completely executed, otherwise false. */ public async run(context: TurnContext, state: TState, start_time?: number, step_count?: number): Promise { - // Initialize start time and action count - const { max_steps, max_time } = this._options; - if (start_time === undefined) { - start_time = Date.now(); - } - if (step_count === undefined) { - step_count = 0; - } - - // Review input on first loop - let plan: Plan | undefined = - step_count == 0 ? await this._options.moderator.reviewInput(context, state) : undefined; - - // Generate plan - if (!plan) { - if (step_count == 0) { - plan = await this._options.planner.beginTask(context, state, this); - } else { - plan = await this._options.planner.continueTask(context, state, this); + try { + // Initialize start time and action count + const { max_steps, max_time } = this._options; + if (start_time === undefined) { + start_time = Date.now(); + } + if (step_count === undefined) { + step_count = 0; } - // Review the plans output - plan = await this._options.moderator.reviewOutput(context, state, plan); - } + // Review input on first loop + let plan: Plan | undefined = + step_count == 0 ? await this._options.moderator.reviewInput(context, state) : undefined; - // Process generated plan - let completed = false; - const response = await this._actions - .get(AI.PlanReadyActionName)! - .handler(context, state, plan, AI.PlanReadyActionName); - if (response == AI.StopCommandName) { - return false; - } + // Generate plan + if (!plan) { + if (step_count == 0) { + plan = await this._options.planner.beginTask(context, state, this); + } else { + plan = await this._options.planner.continueTask(context, state, this); + } - // Run predicted commands - // - If the plan ends on a SAY command then the plan is considered complete, otherwise we'll loop - completed = true; - let should_loop = false; - for (let i = 0; i < plan.commands.length; i++) { - // Check for timeout - if (Date.now() - start_time! > max_time || ++step_count! > max_steps) { - completed = false; - const parameters: actions.TooManyStepsParameters = { - max_steps, - max_time, - start_time: start_time!, - step_count: step_count! - }; - await this._actions - .get(AI.TooManyStepsActionName)! - .handler(context, state, parameters, AI.TooManyStepsActionName); - break; + // Review the plans output + plan = await this._options.moderator.reviewOutput(context, state, plan); } - let output: string; - const cmd = plan.commands[i]; - switch (cmd.type) { - case 'DO': { - const { action } = cmd as PredictedDoCommand; - if (this._actions.has(action)) { - // Call action handler - const handler = this._actions.get(action)!.handler; - output = await this._actions - .get(AI.DoCommandActionName)! - .handler(context, state, { handler, ...(cmd as PredictedDoCommand) }, action); - should_loop = output.length > 0; - state.temp.actionOutputs[action] = output; - } else { - // Redirect to UnknownAction handler - output = await this._actions.get(AI.UnknownActionName)!.handler(context, state, plan, action); - } + // Process generated plan + let completed = false; + const response = await this._actions + .get(AI.PlanReadyActionName)! + .handler(context, state, plan, AI.PlanReadyActionName); + if (response == AI.StopCommandName) { + return false; + } + + // Run predicted commands + // - If the plan ends on a SAY command then the plan is considered complete, otherwise we'll loop + completed = true; + let should_loop = false; + for (let i = 0; i < plan.commands.length; i++) { + // Check for timeout + if (Date.now() - start_time! > max_time || ++step_count! > max_steps) { + completed = false; + const parameters: actions.TooManyStepsParameters = { + max_steps, + max_time, + start_time: start_time!, + step_count: step_count! + }; + await this._actions + .get(AI.TooManyStepsActionName)! + .handler(context, state, parameters, AI.TooManyStepsActionName); break; } - case 'SAY': - should_loop = false; - output = await this._actions - .get(AI.SayCommandActionName)! - .handler(context, state, cmd, AI.SayCommandActionName); + + let output: string; + const cmd = plan.commands[i]; + switch (cmd.type) { + case 'DO': { + const { action } = cmd as PredictedDoCommand; + if (this._actions.has(action)) { + // Call action handler + const handler = this._actions.get(action)!.handler; + output = await this._actions + .get(AI.DoCommandActionName)! + .handler(context, state, { handler, ...(cmd as PredictedDoCommand) }, action); + should_loop = output.length > 0; + state.temp.actionOutputs[action] = output; + } else { + // Redirect to UnknownAction handler + output = await this._actions.get(AI.UnknownActionName)!.handler(context, state, plan, action); + } + break; + } + case 'SAY': + should_loop = false; + output = await this._actions + .get(AI.SayCommandActionName)! + .handler(context, state, cmd, AI.SayCommandActionName); + break; + default: + throw new Error(`AI.run(): unknown command of '${cmd.type}' predicted.`); + } + + // Check for stop command + if (output == AI.StopCommandName) { + completed = false; break; - default: - throw new Error(`AI.run(): unknown command of '${cmd.type}' predicted.`); - } + } - // Check for stop command - if (output == AI.StopCommandName) { - completed = false; - break; + // Copy the actions output to the input + state.temp.lastOutput = output; + state.temp.input = output; + state.temp.inputFiles = []; } - // Copy the actions output to the input - state.temp.lastOutput = output; - state.temp.input = output; - state.temp.inputFiles = []; - } + // Check for looping + if (completed && should_loop && this._options.allow_looping) { + return await this.run(context, state, start_time, step_count); + } - // Check for looping - if (completed && should_loop && this._options.allow_looping) { - return await this.run(context, state, start_time, step_count); - } else { return completed; + } catch (err) { + const onHttpError = this._actions.get(AI.HttpErrorActionName); + + if (onHttpError) { + await onHttpError.handler(context, state, err, AI.HttpErrorActionName); + } + + return false; } } } diff --git a/js/packages/teams-ai/src/actions/HttpError.spec.ts b/js/packages/teams-ai/src/actions/HttpError.spec.ts index 58a76f995..d22219d94 100644 --- a/js/packages/teams-ai/src/actions/HttpError.spec.ts +++ b/js/packages/teams-ai/src/actions/HttpError.spec.ts @@ -5,9 +5,15 @@ import { httpError } from './HttpError'; describe('actions.httpError', () => { const handler = httpError(); - it('should throw', async () => { + it('should throw default error', async () => { assert.rejects(async () => { - await handler(); + await handler({} as any, {} as any); }, 'An AI http request failed'); }); + + it('should throw given error', async () => { + assert.rejects(async () => { + await handler({} as any, {} as any, new Error('a given error')); + }, 'a given error'); + }); }); diff --git a/js/packages/teams-ai/src/actions/HttpError.ts b/js/packages/teams-ai/src/actions/HttpError.ts index d928861ca..e2ff2b3b8 100644 --- a/js/packages/teams-ai/src/actions/HttpError.ts +++ b/js/packages/teams-ai/src/actions/HttpError.ts @@ -6,11 +6,15 @@ * Licensed under the MIT License. */ +import { TurnContext } from 'botbuilder-core'; + +import { TurnState } from '../TurnState'; + /** * @private */ -export function httpError() { - return async (): Promise => { - throw new Error(`An AI http request failed`); +export function httpError() { + return async (_context: TurnContext, _state: TState, err?: Error): Promise => { + throw err || new Error(`An AI http request failed`); }; } diff --git a/js/packages/teams-ai/src/actions/SayCommand.ts b/js/packages/teams-ai/src/actions/SayCommand.ts index 0fa316a52..106c174ac 100644 --- a/js/packages/teams-ai/src/actions/SayCommand.ts +++ b/js/packages/teams-ai/src/actions/SayCommand.ts @@ -173,15 +173,17 @@ export function sayCommand(feedbackLoopEna if (data.response.context && data.response.context.citations.length > 0) { citations = data.response.context!.citations.map((citation, i) => { - return { + const clientCitation: ClientCitation = { '@type': 'Claim', position: `${i + 1}`, appearance: { '@type': 'DigitalDocument', - name: citation.title, + name: citation.title || `Document #${i + 1}`, abstract: Utilities.snippet(citation.content, 500) } - } as ClientCitation; + }; + + return clientCitation; }); } diff --git a/js/packages/teams-ai/src/models/OpenAIModel.ts b/js/packages/teams-ai/src/models/OpenAIModel.ts index f5dd9c2ce..8486c436b 100644 --- a/js/packages/teams-ai/src/models/OpenAIModel.ts +++ b/js/packages/teams-ai/src/models/OpenAIModel.ts @@ -132,7 +132,7 @@ export interface AzureOpenAIModelOptions extends BaseOpenAIModelOptions { /** * API key to use when making requests to Azure OpenAI. */ - azureApiKey: string; + azureApiKey?: string; /** * Default name of the Azure OpenAI deployment (model) to use. @@ -148,6 +148,12 @@ export interface AzureOpenAIModelOptions extends BaseOpenAIModelOptions { * Optional. Version of the API being called. Defaults to `2023-05-15`. */ azureApiVersion?: string; + + /** + * Optional. A function that returns an access token for Microsoft Entra (formerly known as Azure Active Directory), + * which will be invoked on every request. + */ + azureADTokenProvider?: () => Promise; } /** @@ -170,7 +176,7 @@ export class OpenAIModel implements PromptCompletionModel { */ public constructor(options: OpenAIModelOptions | AzureOpenAIModelOptions | OpenAILikeModelOptions) { // Check for azure config - if ((options as AzureOpenAIModelOptions).azureApiKey) { + if ('azureApiKey' in options || 'azureADTokenProvider' in options) { this._useAzure = true; this.options = Object.assign( { @@ -377,41 +383,57 @@ export class OpenAIModel implements PromptCompletionModel { // Initialize request config const requestConfig: AxiosRequestConfig = Object.assign({}, this.options.requestConfig); - // Initialize request headers if (!requestConfig.headers) { requestConfig.headers = {}; } + if (!requestConfig.headers['Content-Type']) { requestConfig.headers['Content-Type'] = 'application/json'; } + if (!requestConfig.headers['User-Agent']) { requestConfig.headers['User-Agent'] = this.UserAgent; } - if (this._useAzure) { - const options = this.options as AzureOpenAIModelOptions; - requestConfig.headers['api-key'] = options.azureApiKey; - } else if ((this.options as OpenAIModelOptions).apiKey) { - const options = this.options as OpenAIModelOptions; - requestConfig.headers['Authorization'] = `Bearer ${options.apiKey}`; - if (options.organization) { - requestConfig.headers['OpenAI-Organization'] = options.organization; + + if ('apiKey' in this.options) { + requestConfig.headers['api-key'] = this.options.apiKey || ''; + } + + if ('azureApiKey' in this.options || 'azureADTokenProvider' in this.options) { + let apiKey = this.options.azureApiKey; + + if (!apiKey && this.options.azureADTokenProvider) { + apiKey = await this.options.azureADTokenProvider(); } + + requestConfig.headers['Authorization'] = `Bearer ${apiKey}`; } - // Send request - const response = await this._httpClient.post(url, body, requestConfig); - - // Check for rate limit error - if ( - response.status == 429 && - Array.isArray(this.options.retryPolicy) && - retryCount < this.options.retryPolicy.length - ) { - const delay = this.options.retryPolicy[retryCount]; - await new Promise((resolve) => setTimeout(resolve, delay)); - return this.post(url, body, retryCount + 1); - } else { - return response; + if ('organization' in this.options && this.options.organization) { + requestConfig.headers['OpenAI-Organization'] = this.options.organization; + } + + try { + const res = await this._httpClient.post(url, body, requestConfig); + + // Check for rate limit error + if ( + res.status == 429 && + Array.isArray(this.options.retryPolicy) && + retryCount < this.options.retryPolicy.length + ) { + const delay = this.options.retryPolicy[retryCount]; + await new Promise((resolve) => setTimeout(resolve, delay)); + return this.post(url, body, retryCount + 1); + } + + return res; + } catch (err) { + if (this.options.logRequests) { + console.error(Colorize.error(err as Error)); + } + + throw err; } } } diff --git a/js/packages/teams-ai/src/prompts/Message.ts b/js/packages/teams-ai/src/prompts/Message.ts index d21545900..1d3082328 100644 --- a/js/packages/teams-ai/src/prompts/Message.ts +++ b/js/packages/teams-ai/src/prompts/Message.ts @@ -90,17 +90,17 @@ export interface Citation { /** * The title of the citation. */ - title: string; + title: string | null; /** * The URL of the citation. */ - url: string; + url: string | null; /** * The filepath of the document. */ - filepath: string; + filepath: string | null; } export interface MessageContext { diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/.gitignore b/js/samples/04.ai-apps/h.datasource-azureOpenAI/.gitignore index f4db1f72b..7a768f27b 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/.gitignore +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/.gitignore @@ -112,4 +112,5 @@ env/.env.*.user env/.env.local appPackage/build .deployment -devTools/ \ No newline at end of file +devTools/ +build \ No newline at end of file diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/README.md b/js/samples/04.ai-apps/h.datasource-azureOpenAI/README.md index 10ebc5b54..30b740285 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/README.md +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/README.md @@ -1,6 +1,7 @@ # Azure OpenAI On Your Data -The following is a conversational bot that uses the Azure OpenAI Chat Completions API `Azure OpenAI on Your Data` feature to facilitate RAG (retrieval augmentation) using Azure AI Search as the Azure data source. +The following is a custom copilot that uses the Azure OpenAI Chat Completions API ‘Azure OpenAI On Your Data’ feature to facilitate RAG (retrieval augmented generation). +You can chat with your data in Azure AI Search, Azure Blob Storage, URL/web address, Azure Cosmos DB for MongoDB vCore, uploaded files, and Elasticsearch. @@ -17,7 +18,9 @@ The following is a conversational bot that uses the Azure OpenAI Chat Completion ## Summary -This sample shows how to integrate your Azure AI Search index as a data source into prompt templates through the Azure Chat Completions API. +This sample shows how to integrate your search index as a data source into prompt templates through the Azure Chat Completions API. + +> Note: this sample uses managed identity, ensure your Azure OpenAI and AI Search services are configured properly https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/use-your-data-securely ### Example Interaction @@ -62,33 +65,15 @@ This sample shows how to integrate your Azure AI Search index as a data source i > [!NOTE] > Please note that at this time, this sample is only supported with Azure OpenAI. -1. Fill the `AZURE_OPENAI_KEY`, `AZURE_OPENAI_ENDPOINT` variables appropriately. +1. Fill the `AZURE_OPENAI_ENDPOINT`, `AZURE_SEARCH_ENDPOINT`, and `AZURE_SEARCH_INDEX` variables appropriately. 1. Follow the [use your data quickstart instructions](https://learn.microsoft.com/en-us/azure/ai-services/openai/use-your-data-quickstart?tabs=command-line%2Cpython-new&pivots=programming-language-studio#add-your-data-using-azure-openai-studio) to add your data using Azure OpenAI Studio. Select `Upload files` as the data source. You can upload the `nba.pdf` file. Take note of the index name. -1. Update `prompts/chat/config.json` by adding the missing fields in the `data_sources` property: - -```json -"data_sources": [ - { - "type": "azure_search", - "parameters": { - "endpoint": "AZURE-AI-SEARCH-ENDPOINT", - "index_name": "nba", - "authentication": { - "type": "api_key", - "key": "AZURE-AI-SEARCH-KEY" - } - } - } -] -``` - ## Testing the sample The easiest and fastest way to get up and running is with Teams Toolkit as your development guide. To use Teams Toolkit to automate setup and debugging, please [continue below](#using-teams-toolkit-for-visual-studio-code). -1. 1. Fill the `AZURE_OPENAI_KEY`, `AZURE_OPENAI_ENDPOINT` in the `./env/.env.local.user` file. +1. Fill the `AZURE_OPENAI_ENDPOINT`, `AZURE_SEARCH_ENDPOINT`, and `AZURE_SEARCH_INDEX` in the `./env/.env.local.user` file. 1. Ensure you have downloaded and installed [Visual Studio Code](https://code.visualstudio.com/docs/setup/setup-overview) 1. Install the [Teams Toolkit extension](https://marketplace.visualstudio.com/items?itemName=TeamsDevApp.ms-teams-vscode-extension) 1. Copy this sample into a new folder outside of teams-ai diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/appPackage/color.png b/js/samples/04.ai-apps/h.datasource-azureOpenAI/appPackage/color.png index 8a9a4a005..ee7960032 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/appPackage/color.png +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/appPackage/color.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:57b4440be2e604a74c52f0f38caac7191b9741ef0c48c9fcb2db2dc7feb8b000 -size 5098 +oid sha256:91325633e84d222c2d65ada9b58155394a444628391de9c6b594fd20a293f220 +size 15811 diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/appPackage/manifest.json b/js/samples/04.ai-apps/h.datasource-azureOpenAI/appPackage/manifest.json index eb24c681c..40c882f9b 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/appPackage/manifest.json +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/appPackage/manifest.json @@ -16,8 +16,8 @@ "termsOfUseUrl": "https://www.microsoft.com/legal/terms-of-use" }, "description": { - "short": "Sample bot that can do RAG through Azure AI Search", - "full": "Sample bot that can do RAG through Azure AI Search" + "short": "Custom copilot to chat with your data via RAG through Azure OpenAI On Your Data", + "full": "Custom copilot to chat with your data via RAG (retrieval augmentation generation) through Azure OpenAI On Your Data supported data sources: Azure AI Search, Azure Blob Storage, URL/web address, Azure Cosmos DB for MongoDB vCore, uploaded files, and Elasticsearch." }, "icons": { "outline": "outline.png", diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.dev.user b/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.dev.user index abb5a3f43..6e29825fe 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.dev.user +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.dev.user @@ -1,3 +1,4 @@ SECRET_BOT_PASSWORD= -SECRET_AZURE_OPENAI_KEY= -AZURE_OPENAI_ENDPOINT= \ No newline at end of file +AZURE_OPENAI_ENDPOINT= +AZURE_SEARCH_ENDPOINT= +AZURE_SEARCH_INDEX= \ No newline at end of file diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.local b/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.local index afd6eddd6..ecb89b391 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.local +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.local @@ -1,7 +1,10 @@ BOT_ENDPOINT= BOT_DOMAIN= TEAMSFX_ENV=local -APP_NAME_SUFFIXlocal +APP_NAME_SUFFIX=local TEAMS_APP_ID= TEAMS_APP_TENANT_ID= -BOT_ID= \ No newline at end of file +BOT_ID= +AZURE_OPENAI_ENDPOINT= +AZURE_SEARCH_ENDPOINT= +AZURE_SEARCH_INDEX= \ No newline at end of file diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.local.user b/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.local.user index abb5a3f43..6e29825fe 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.local.user +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/env/.env.local.user @@ -1,3 +1,4 @@ SECRET_BOT_PASSWORD= -SECRET_AZURE_OPENAI_KEY= -AZURE_OPENAI_ENDPOINT= \ No newline at end of file +AZURE_OPENAI_ENDPOINT= +AZURE_SEARCH_ENDPOINT= +AZURE_SEARCH_INDEX= \ No newline at end of file diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/infra/azure.bicep b/js/samples/04.ai-apps/h.datasource-azureOpenAI/infra/azure.bicep index ba1d116f7..c11f44a1c 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/infra/azure.bicep +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/infra/azure.bicep @@ -6,17 +6,9 @@ param resourceBaseName string @description('Required when create Azure Bot service') param botAadAppClientId string -@secure() -param azureOpenAIKey string = '' - -@secure() param azureOpenAIEndpoint string = '' - -@secure() -param azureSearchKey string = '' - -@secure() param azureSearchEndpoint string = '' +param azureSearchIndex string = '' @secure() @description('Required by Bot Framework package in your bot project') @@ -72,22 +64,18 @@ resource webApp 'Microsoft.Web/sites@2021-02-01' = { name: 'BOT_PASSWORD' value: botAadAppClientSecret } - { - name: 'AZURE_OPENAI_KEY' - value: azureOpenAIKey - } { name: 'AZURE_OPENAI_ENDPOINT' value: azureOpenAIEndpoint } - { - name: 'AZURE_SEARCH_KEY' - value: azureSearchKey - } { name: 'AZURE_SEARCH_ENDPOINT' value: azureSearchEndpoint } + { + name: 'AZURE_SEARCH_INDEX' + value: azureSearchIndex + } ] ftpsState: 'FtpsOnly' } diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/infra/azure.parameters.json b/js/samples/04.ai-apps/h.datasource-azureOpenAI/infra/azure.parameters.json index 4b6d3d439..0e2eda3a6 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/infra/azure.parameters.json +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/infra/azure.parameters.json @@ -17,17 +17,14 @@ "botDisplayName": { "value": "TeamsAzureOpenAI" }, - "azureOpenAIKey": { - "value": "${{SECRET_AZURE_OPENAI_KEY}}" - }, "azureOpenAIEndpoint": { - "value": "${{SECRET_AZURE_OPENAI_ENDPOINT}}" - }, - "azureSearchKey": { - "value": "${{SECRET_AZURE_SEARCH_KEY}}" + "value": "${{AZURE_OPENAI_ENDPOINT}}" }, "azureSearchEndpoint": { - "value": "${{SECRET_AZURE_SEARCH_ENDPOINT}}" + "value": "${{AZURE_SEARCH_ENDPOINT}}" + }, + "azureSearchIndex": { + "value": "${{AZURE_SEARCH_INDEX}}" } } } diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/package.json b/js/samples/04.ai-apps/h.datasource-azureOpenAI/package.json index 6182434bd..8eb262741 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/package.json +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/package.json @@ -25,6 +25,7 @@ "url": "https://github.com/microsoft/teams-ai" }, "dependencies": { + "@azure/identity": "^4.2.1", "@azure/search-documents": "12.0.0", "@microsoft/teams-ai": "~1.2.1", "@microsoft/teamsfx": "^2.3.2", @@ -46,7 +47,7 @@ "env-cmd": "^10.1.0", "eslint": "^8.57.0", "nodemon": "~3.0.1", - "prettier": "^3.3.1", + "prettier": "^3.3.2", "rimraf": "^5.0.7", "ts-node": "^10.9.2", "typescript": "^5.4.5" diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/sample.env b/js/samples/04.ai-apps/h.datasource-azureOpenAI/sample.env index 7cb80aa15..d315e6223 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/sample.env +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/sample.env @@ -1,5 +1,6 @@ # This is an example file of how to set up environment variables. You can duplicate this file and add the appropriate keys. -AZURE_OPENAI_KEY= AZURE_OPENAI_ENDPOINT= +AZURE_SEARCH_ENDPOINT= +AZURE_SEARCH_INDEX= DEBUG='' # set to * to enable all debugging \ No newline at end of file diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/app.ts b/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/app.ts index cf2b7e217..a38b6bea0 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/app.ts +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/app.ts @@ -1,7 +1,10 @@ import { OpenAIModel, PromptManager, ActionPlanner, Application, TurnState, TeamsAdapter } from '@microsoft/teams-ai'; +import { DefaultAzureCredential, getBearerTokenProvider } from '@azure/identity'; import { ConfigurationServiceClientCredentialFactory, MemoryStorage, TurnContext } from 'botbuilder'; +import axios from 'axios'; import path from 'path'; import debug from 'debug'; +import fs from 'fs'; const error = debug('azureopenai:app:error'); error.log = console.log.bind(console); @@ -9,20 +12,20 @@ error.log = console.log.bind(console); interface ConversationState {} type ApplicationTurnState = TurnState; -if (!process.env.AZURE_OPENAI_KEY || !process.env.AZURE_OPENAI_ENDPOINT) { - throw new Error('Missing environment variables - please check that AZURE_OPENAI_KEY and AZURE_OPENAI_ENDPOINT'); -} - // Create AI components const model = new OpenAIModel({ // Azure OpenAI Support - azureApiKey: process.env.AZURE_OPENAI_KEY!, azureDefaultDeployment: 'gpt-35-turbo', azureEndpoint: process.env.AZURE_OPENAI_ENDPOINT!, azureApiVersion: '2024-02-15-preview', + azureADTokenProvider: getBearerTokenProvider( + new DefaultAzureCredential(), + 'https://cognitiveservices.azure.com/.default' + ), // Request logging - logRequests: true + logRequests: true, + useSystemMessages: true }); const prompts = new PromptManager({ @@ -32,17 +35,43 @@ const prompts = new PromptManager({ const planner = new ActionPlanner({ model, prompts, - defaultPrompt: 'chat' + defaultPrompt: async () => { + const prompt = await prompts.getPrompt('chat'); + + prompt.config.completion.model = 'gpt-4o'; + + if (process.env.AZURE_SEARCH_ENDPOINT) { + (prompt.config.completion as any).data_sources = [{ + type: 'azure_search', + parameters: { + endpoint: process.env.AZURE_SEARCH_ENDPOINT, + index_name: process.env.AZURE_SEARCH_INDEX, + semantic_configuration: 'default', + query_type: 'simple', + fields_mapping: { }, + in_scope: true, + strictness: 3, + top_n_documents: 5, + role_information: fs.readFileSync(path.join(__dirname, '../src/prompts/chat/skprompt.txt')).toString('utf-8'), + authentication: { + type: 'system_assigned_managed_identity' + } + } + }]; + } + + return prompt; + } }); // Define storage and application const storage = new MemoryStorage(); export const app = new Application({ + storage, ai: { planner: planner, enable_feedback_loop: true }, - storage: storage, adapter: new TeamsAdapter( {}, new ConfigurationServiceClientCredentialFactory({ @@ -53,12 +82,27 @@ export const app = new Application({ ) }); +app.conversationUpdate('membersAdded', async (context) => { + await context.sendActivity('Welcome! I\'m a conversational bot that can tell you about your data. You can also type `/clear` to clear the conversation history.'); +}); + +app.message('/clear', async (context, state) => { + state.deleteConversationState(); + await context.sendActivity('New chat session started: Previous messages won\'t be used as context for new queries.'); +}); + app.error(async (context: TurnContext, err: any) => { // This check writes out errors to console log .vs. app insights. // NOTE: In production environment, you should consider logging this to Azure // application insights. error(`[onTurnError] unhandled error: ${err}`); - error(err); + + if (err instanceof axios.AxiosError) { + error(err.toJSON()); + error(err.response?.data); + } else { + error(err); + } // Send a trace activity, which will be displayed in Bot Framework Emulator await context.sendTraceActivity( @@ -73,7 +117,7 @@ app.error(async (context: TurnContext, err: any) => { await context.sendActivity('To continue to run this bot, please fix the bot source code.'); }); -app.feedbackLoop(async (context, state, feedbackLoopData) => { +app.feedbackLoop(async (_context, _state, feedbackLoopData) => { if (feedbackLoopData.actionValue.reaction === 'like') { console.log('👍'); } else { diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/index.ts b/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/index.ts index 1eed3d71f..33d7702b4 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/index.ts +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/index.ts @@ -30,6 +30,10 @@ server.listen(port, () => { server.post('/api/messages', async (req, res) => { // Route received a request to adapter for processing await (app.adapter as TeamsAdapter).process(req, res as any, async (context) => { + if (context.activity.conversation.tenantId !== process.env.BOT_TENANT_ID) { + return res.send(401, 'invalid tenant'); + } + // Dispatch to application for routing await app.run(context); }); diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/prompts/chat/config.json b/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/prompts/chat/config.json index 9d2326760..2aff53237 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/prompts/chat/config.json +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/src/prompts/chat/config.json @@ -13,20 +13,7 @@ "top_p": 0.0, "presence_penalty": 0.6, "frequency_penalty": 0.0, - "stop_sequences": [], - "data_sources": [ - { - "type": "azure_search", - "parameters": { - "endpoint": "AZURE-AI-SEARCH-ENDPOINT", - "index_name": "nba", - "authentication": { - "type": "api_key", - "key": "AZURE-AI-SEARCH-KEY" - } - } - } - ] + "stop_sequences": [] }, "augmentation": { "augmentation_type": "none" diff --git a/js/samples/04.ai-apps/h.datasource-azureOpenAI/teamsapp.local.yml b/js/samples/04.ai-apps/h.datasource-azureOpenAI/teamsapp.local.yml index 89a0363fe..58a736acb 100644 --- a/js/samples/04.ai-apps/h.datasource-azureOpenAI/teamsapp.local.yml +++ b/js/samples/04.ai-apps/h.datasource-azureOpenAI/teamsapp.local.yml @@ -53,7 +53,7 @@ deploy: envs: BOT_ID: ${{BOT_ID}} BOT_PASSWORD: ${{SECRET_BOT_PASSWORD}} - AZURE_OPENAI_KEY: ${{SECRET_AZURE_OPENAI_KEY}} - AZURE_OPENAI_ENDPOINT: ${{SECRET_AZURE_OPENAI_ENDPOINT}} - AZURE_SEARCH_KEY: ${{SECRET_AZURE_SEARCH_KEY}} - AZURE_SEARCH_ENDPOINT: ${{SECRET_AZURE_SEARCH_ENDPOINT}} + BOT_TENANT_ID: ${{TEAMS_APP_TENANT_ID}} + AZURE_OPENAI_ENDPOINT: ${{AZURE_OPENAI_ENDPOINT}} + AZURE_SEARCH_ENDPOINT: ${{AZURE_SEARCH_ENDPOINT}} + AZURE_SEARCH_INDEX: ${{AZURE_SEARCH_INDEX}}