From f75b6be87e916d12dda4ea53135c7ab0e1cd71bb Mon Sep 17 00:00:00 2001 From: Chris Rickman Date: Wed, 8 Jan 2025 14:33:15 -0800 Subject: [PATCH 1/5] Checkpoint --- dotnet/Directory.Packages.props | 11 +- dotnet/SK-dotnet.sln | 21 +- dotnet/samples/Concepts/Concepts.csproj | 1 + .../GettingStartedWithAgents.csproj | 1 + .../GettingStartedWithAgents/Step12_Azure.cs | 141 +++ .../AzureAI/AgentAIThreadCreationOptions.cs | 31 + .../src/Agents/AzureAI/Agents.AzureAI.csproj | 45 + dotnet/src/Agents/AzureAI/AzureAIAgent.cs | 317 +++++++ dotnet/src/Agents/AzureAI/AzureAIChannel.cs | 63 ++ .../Agents/AzureAI/AzureAIClientProvider.cs | 111 +++ .../AzureAI/AzureAIInvocationOptions.cs | 104 ++ .../AzureAI/Extensions/AgentExtensions.cs | 19 + .../AzureAI/Extensions/AgentRunExtensions.cs | 68 ++ .../Extensions/KernelFunctionExtensions.cs | 93 ++ .../AzureAI/Internal/AgentMessageFactory.cs | 79 ++ .../AzureAI/Internal/AgentThreadActions.cs | 894 ++++++++++++++++++ .../Agents/AzureAI/Properties/AssemblyInfo.cs | 6 + .../src/Agents/AzureAI/RunPollingOptions.cs | 73 ++ .../OpenAI/Internal/AddHeaderRequestPolicy.cs | 13 - .../OpenAI/Internal/AssistantThreadActions.cs | 17 - .../Agents/UnitTests/Agents.UnitTests.csproj | 4 + .../Azure/AddHeaderRequestPolicyTests.cs | 37 - .../azure/AzureAIUtilities.props | 5 + .../Policies/GeneratedActionPipelinePolicy.cs | 32 + .../samples/AgentUtilities/BaseAgentsTest.cs | 22 +- .../Contents/AnnotationContent.cs | 2 +- .../Contents/KernelContent.cs | 2 +- .../Contents/StreamingAnnotationContent.cs | 2 +- .../Contents/AnnotationContentTests.cs | 2 +- .../Contents/ChatMessageContentTests.cs | 2 +- .../StreamingAnnotationContentTests.cs | 2 +- 31 files changed, 2135 insertions(+), 85 deletions(-) create mode 100644 dotnet/samples/GettingStartedWithAgents/Step12_Azure.cs create mode 100644 dotnet/src/Agents/AzureAI/AgentAIThreadCreationOptions.cs create mode 100644 dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj create mode 100644 dotnet/src/Agents/AzureAI/AzureAIAgent.cs create mode 100644 dotnet/src/Agents/AzureAI/AzureAIChannel.cs create mode 100644 dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs create mode 100644 dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs create mode 100644 dotnet/src/Agents/AzureAI/Extensions/AgentExtensions.cs create mode 100644 dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs create mode 100644 dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs create mode 100644 dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs create mode 100644 dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs create mode 100644 dotnet/src/Agents/AzureAI/Properties/AssemblyInfo.cs create mode 100644 dotnet/src/Agents/AzureAI/RunPollingOptions.cs delete mode 100644 dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs delete mode 100644 dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs create mode 100644 dotnet/src/InternalUtilities/azure/AzureAIUtilities.props create mode 100644 dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index b59131230513..b743f25187eb 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -8,7 +8,13 @@ + + + + + + @@ -21,11 +27,6 @@ - - - - - diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index 0a711f84f5f3..76bb693a61b2 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -439,6 +439,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "sk-chatgpt-azure-function", EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "kernel-functions-generator", "samples\Demos\CreateChatGptPlugin\MathPlugin\kernel-functions-generator\kernel-functions-generator.csproj", "{78785CB1-66CF-4895-D7E5-A440DD84BE86}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Agents.AzureAI", "src\Agents\AzureAI\Agents.AzureAI.csproj", "{EA35F1B5-9148-4189-BE34-5E00AED56D65}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -1076,12 +1078,6 @@ Global {6F591D05-5F7F-4211-9042-42D8BCE60415}.Publish|Any CPU.Build.0 = Debug|Any CPU {6F591D05-5F7F-4211-9042-42D8BCE60415}.Release|Any CPU.ActiveCfg = Release|Any CPU {6F591D05-5F7F-4211-9042-42D8BCE60415}.Release|Any CPU.Build.0 = Release|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.Build.0 = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.Build.0 = Debug|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.ActiveCfg = Release|Any CPU - {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.Build.0 = Release|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Debug|Any CPU.Build.0 = Debug|Any CPU {E82B640C-1704-430D-8D71-FD8ED3695468}.Publish|Any CPU.ActiveCfg = Debug|Any CPU @@ -1100,6 +1096,12 @@ Global {39EAB599-742F-417D-AF80-95F90376BB18}.Publish|Any CPU.Build.0 = Publish|Any CPU {39EAB599-742F-417D-AF80-95F90376BB18}.Release|Any CPU.ActiveCfg = Release|Any CPU {39EAB599-742F-417D-AF80-95F90376BB18}.Release|Any CPU.Build.0 = Release|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Debug|Any CPU.Build.0 = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Publish|Any CPU.Build.0 = Debug|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.ActiveCfg = Release|Any CPU + {232E1153-6366-4175-A982-D66B30AAD610}.Release|Any CPU.Build.0 = Release|Any CPU {DAC54048-A39A-4739-8307-EA5A291F2EA0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {DAC54048-A39A-4739-8307-EA5A291F2EA0}.Debug|Any CPU.Build.0 = Debug|Any CPU {DAC54048-A39A-4739-8307-EA5A291F2EA0}.Publish|Any CPU.ActiveCfg = Debug|Any CPU @@ -1172,6 +1174,12 @@ Global {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Publish|Any CPU.Build.0 = Debug|Any CPU {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Release|Any CPU.ActiveCfg = Release|Any CPU {78785CB1-66CF-4895-D7E5-A440DD84BE86}.Release|Any CPU.Build.0 = Release|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Publish|Any CPU.Build.0 = Publish|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EA35F1B5-9148-4189-BE34-5E00AED56D65}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -1333,6 +1341,7 @@ Global {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {2EB6E4C2-606D-B638-2E08-49EA2061C428} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} {78785CB1-66CF-4895-D7E5-A440DD84BE86} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} + {EA35F1B5-9148-4189-BE34-5E00AED56D65} = {6823CD5E-2ABE-41EB-B865-F86EC13F0CF9} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83} diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index 746d5fbb73cf..ed5a4a0782c7 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -52,6 +52,7 @@ + diff --git a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj index 3a061b4fb4a0..c6c490634d76 100644 --- a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj +++ b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj @@ -40,6 +40,7 @@ + diff --git a/dotnet/samples/GettingStartedWithAgents/Step12_Azure.cs b/dotnet/samples/GettingStartedWithAgents/Step12_Azure.cs new file mode 100644 index 000000000000..362a7bccaf02 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step12_Azure.cs @@ -0,0 +1,141 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.ComponentModel; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.AzureAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Resources; +using AzureAIP = Azure.AI.Projects; + +namespace GettingStarted; + +/// +/// This example demonstrates similarity between using +/// and (see: Step 2). +/// +public class Step12_Azure(ITestOutputHelper output) : BaseAgentsTest(output) +{ + private const string HostName = "Host"; + private const string HostInstructions = "Answer questions about the menu."; + + [Fact] + public async Task UseSingleAssistantAgentAsync() + { + // Define the agent + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AzureAIP.AgentsClient client = clientProvider.Client.GetAgentsClient(); + AzureAIP.Agent definition = await client.CreateAgentAsync( + this.Model, + HostName, + null, + HostInstructions); + AzureAIAgent agent = new(definition, clientProvider) + { + Kernel = new Kernel(), + }; + + // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + agent.Kernel.Plugins.Add(plugin); + + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new AzureAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + + // Respond to user input + try + { + await InvokeAgentAsync("Hello"); + await InvokeAgentAsync("What is the special soup and its price?"); + await InvokeAgentAsync("What is the special drink and its price?"); + await InvokeAgentAsync("Thank you"); + } + finally + { + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(threadId)) + { + this.WriteAgentChatMessage(response); + } + } + } + + [Fact] + public async Task UseTemplateForAssistantAgentAsync() + { + // Define the agent + string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml"); + PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml); + IPromptTemplate promptTemplate = new KernelPromptTemplateFactory().Create(templateConfig); + AzureAIClientProvider clientProvider = this.GetAzureProvider(); + AzureAIP.Agent definition = await clientProvider.Client.GetAgentsClient().CreateAgentAsync(this.Model); + // Instructions, Name and Description properties defined via the config. + AzureAIAgent agent = new(definition, clientProvider, promptTemplate) + { + Kernel = new Kernel(), + Arguments = new KernelArguments() + { + { "topic", "Dog" }, + { "length", "3" }, + }, + }; + + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new AzureAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + + try + { + // Invoke the agent with the default arguments. + await InvokeAgentAsync(); + + // Invoke the agent with the override arguments. + await InvokeAgentAsync( + new() + { + { "topic", "Cat" }, + { "length", "3" }, + }); + } + finally + { + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); + } + + // Local function to invoke agent and display the response. + async Task InvokeAgentAsync(KernelArguments? arguments = null) + { + await foreach (ChatMessageContent response in agent.InvokeAsync(threadId, arguments)) + { + WriteAgentChatMessage(response); + } + } + } + + private sealed class MenuPlugin + { + [KernelFunction, Description("Provides a list of specials from the menu.")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] + public string GetSpecials() => + """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """; + + [KernelFunction, Description("Provides the price of the requested menu item.")] + public string GetItemPrice( + [Description("The name of the menu item.")] + string menuItem) => + "$9.99"; + } +} diff --git a/dotnet/src/Agents/AzureAI/AgentAIThreadCreationOptions.cs b/dotnet/src/Agents/AzureAI/AgentAIThreadCreationOptions.cs new file mode 100644 index 000000000000..5a2a40bf061b --- /dev/null +++ b/dotnet/src/Agents/AzureAI/AgentAIThreadCreationOptions.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using AzureAIP = Azure.AI.Projects; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Thread creation options. +/// +public sealed class AzureAIThreadCreationOptions +{ + /// + /// Optional messages to initialize thread with... + /// + /// + /// Only supports messages with role = User or Assistant + /// + public IReadOnlyList? Messages { get; init; } + + /// + /// A set of up to 16 key/value pairs that can be attached to an agent, used for + /// storing additional information about that object in a structured format.Keys + /// may be up to 64 characters in length and values may be up to 512 characters in length. + /// + public IReadOnlyDictionary? Metadata { get; init; } + + /// + /// Optional file-ids made available to the code_interpreter tool, if enabled. + /// + public AzureAIP.ToolResources? ToolResources { get; init; } +} diff --git a/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj b/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj new file mode 100644 index 000000000000..6ff0ac29ff9a --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Agents.AzureAI.csproj @@ -0,0 +1,45 @@ + + + + + Microsoft.SemanticKernel.Agents.AzureAI + Microsoft.SemanticKernel.Agents.AzureAI + net8.0;netstandard2.0 + $(NoWarn);SKEXP0110 + false + alpha + + + + + + + Semantic Kernel Agents - AzureAI + Defines core a concrete Agent based on the Azure AI Agent API. + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Agents/AzureAI/AzureAIAgent.cs b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs new file mode 100644 index 000000000000..e1231d8582c6 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs @@ -0,0 +1,317 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Agents.AzureAI.Internal; +using Microsoft.SemanticKernel.ChatCompletion; +using AzureAIP = Azure.AI.Projects; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// A specialization based on Open AI Assistant / GPT. +/// +public sealed class AzureAIAgent : KernelAgent +{ + /// + /// Metadata key that identifies code-interpreter content. + /// + public const string CodeInterpreterMetadataKey = "code"; // %%%% RE-EVALUATE + + private readonly AzureAIClientProvider _provider; + private readonly AzureAIP.AgentsClient _client; + private readonly string[] _channelKeys; + + /// + /// The assistant definition. + /// + public AzureAIP.Agent Definition { get; private init; } + + /// + /// Set when the assistant has been deleted via . + /// An assistant removed by other means will result in an exception when invoked. + /// + public bool IsDeleted { get; private set; } + + /// + /// Defines polling behavior for run processing + /// + public RunPollingOptions PollingOptions { get; } = new(); + + /// + /// Create a new assistant thread. + /// + /// The to monitor for cancellation requests. The default is . + /// The thread identifier + public Task CreateThreadAsync(CancellationToken cancellationToken = default) // %%% STATIC + { + return AgentThreadActions.CreateThreadAsync(this._client, options: null, cancellationToken); + } + + /// + /// Create a new assistant thread. + /// + /// The options for creating the thread + /// The to monitor for cancellation requests. The default is . + /// The thread identifier + public Task CreateThreadAsync(AzureAIThreadCreationOptions? options, CancellationToken cancellationToken = default) // %%% STATIC + { + return AgentThreadActions.CreateThreadAsync(this._client, options, cancellationToken); + } + + /// + /// Create a new assistant thread. + /// + /// The thread identifier + /// The to monitor for cancellation requests. The default is . + /// The thread identifier + public async Task DeleteThreadAsync( + string threadId, + CancellationToken cancellationToken = default) + { + // Validate input + Verify.NotNullOrWhiteSpace(threadId, nameof(threadId)); + + bool isDeleted = await this._client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false); + + return isDeleted; + } + + /// + /// Adds a message to the specified thread. + /// + /// The thread identifier + /// A non-system message with which to append to the conversation. + /// The to monitor for cancellation requests. The default is . + /// + /// Only supports messages with role = User or Assistant: + /// https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages + /// + public Task AddChatMessageAsync(string threadId, ChatMessageContent message, CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + return AgentThreadActions.CreateMessageAsync(this._client, threadId, message, cancellationToken); + } + + /// + /// Gets messages for a specified thread. + /// + /// The thread identifier + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + public IAsyncEnumerable GetThreadMessagesAsync(string threadId, CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + return AgentThreadActions.GetMessagesAsync(this._client, threadId, cancellationToken); + } + + /// + /// Delete the assistant definition. + /// + /// The to monitor for cancellation requests. The default is . + /// True if assistant definition has been deleted + /// + /// Assistant based agent will not be useable after deletion. + /// + public async Task DeleteAsync(CancellationToken cancellationToken = default) + { + if (!this.IsDeleted) + { + bool isDeleted = await this._client.DeleteAgentAsync(this.Id, cancellationToken).ConfigureAwait(false); + this.IsDeleted = isDeleted; + } + + return this.IsDeleted; + } + + /// + /// Invoke the assistant on the specified thread. + /// + /// The thread identifier + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of response messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public IAsyncEnumerable InvokeAsync( + string threadId, + KernelArguments? arguments = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + return this.InvokeAsync(threadId, options: null, arguments, kernel, cancellationToken); + } + + /// + /// Invoke the assistant on the specified thread. + /// + /// The thread identifier + /// Optional invocation options + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of response messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public async IAsyncEnumerable InvokeAsync( + string threadId, + AzureAIInvocationOptions? options, + KernelArguments? arguments = null, + Kernel? kernel = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + kernel ??= this.Kernel; + arguments = this.MergeArguments(arguments); + + await foreach ((bool isVisible, ChatMessageContent message) in AgentThreadActions.InvokeAsync(this, this._client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false)) + { + if (isVisible) + { + yield return message; + } + } + } + + /// + /// Invoke the assistant on the specified thread with streaming response. + /// + /// The thread identifier + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// Optional receiver of the completed messages generated + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public IAsyncEnumerable InvokeStreamingAsync( + string threadId, + KernelArguments? arguments = null, + Kernel? kernel = null, + ChatHistory? messages = null, + CancellationToken cancellationToken = default) + { + return this.InvokeStreamingAsync(threadId, options: null, arguments, kernel, messages, cancellationToken); + } + + /// + /// Invoke the assistant on the specified thread with streaming response. + /// + /// The thread identifier + /// Optional invocation options + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// Optional receiver of the completed messages generated + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public IAsyncEnumerable InvokeStreamingAsync( + string threadId, + AzureAIInvocationOptions? options, + KernelArguments? arguments = null, + Kernel? kernel = null, + ChatHistory? messages = null, + CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + kernel ??= this.Kernel; + arguments = this.MergeArguments(arguments); + + // %%% STREAMING + //return AgentThreadActions.InvokeStreamingAsync(this, this._client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken); + return Array.Empty().ToAsyncEnumerable(); + } + + /// + protected override IEnumerable GetChannelKeys() + { + // Distinguish from other channel types. + yield return typeof(AzureAIChannel).FullName!; + + foreach (string key in this._channelKeys) + { + yield return key; + } + } + + /// + protected override async Task CreateChannelAsync(CancellationToken cancellationToken) + { + //this.Logger.LogAzureAIAgentCreatingChannel(nameof(CreateChannelAsync), nameof(AzureAIChannel)); // %%% + + string threadId = await AgentThreadActions.CreateThreadAsync(this._client, options: null, cancellationToken).ConfigureAwait(false); + + this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), threadId); + + AzureAIChannel channel = + new(this._client, threadId) + { + Logger = this.LoggerFactory.CreateLogger() + }; + + //this.Logger.LogAzureAIAgentCreatedChannel(nameof(CreateChannelAsync), nameof(AzureAIChannel), thread.Id); // %%% + + return channel; + } + + internal void ThrowIfDeleted() + { + if (this.IsDeleted) + { + throw new KernelException($"Agent Failure - {nameof(AzureAIAgent)} agent is deleted: {this.Id}."); + } + } + + internal Task GetInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken) + { + return this.FormatInstructionsAsync(kernel, arguments, cancellationToken); + } + + /// + protected override async Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken) + { + string threadId = channelState; + + //this.Logger.LogAzureAIAgentRestoringChannel(nameof(RestoreChannelAsync), nameof(AzureAIChannel), threadId); + + AzureAIP.AgentThread thread = await this._client.GetThreadAsync(threadId, cancellationToken).ConfigureAwait(false); + + //this.Logger.LogAzureAIAgentRestoredChannel(nameof(RestoreChannelAsync), nameof(AzureAIChannel), threadId); + + return new AzureAIChannel(this._client, thread.Id); + } + + /// + /// Initializes a new instance of the class. + /// + public AzureAIAgent( + AzureAIP.Agent model, + AzureAIClientProvider clientProvider, + IPromptTemplate? template = null) // %%% CONFLICTS WITH model + { + this._provider = clientProvider; + this._client = clientProvider.Client.GetAgentsClient(); + this._channelKeys = [.. clientProvider.ConfigurationKeys]; + + this.Definition = model; + this.Description = this.Definition.Description; + this.Id = this.Definition.Id; + this.Name = this.Definition.Name; + this.Instructions = this.Definition.Instructions; + this.Template = template; + } +} diff --git a/dotnet/src/Agents/AzureAI/AzureAIChannel.cs b/dotnet/src/Agents/AzureAI/AzureAIChannel.cs new file mode 100644 index 000000000000..7f6ec379a722 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/AzureAIChannel.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Agents.AzureAI.Internal; +using AzureAIP = Azure.AI.Projects; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// A specialization for use with . +/// +internal sealed class AzureAIChannel(AzureAIP.AgentsClient client, string threadId) + : AgentChannel +{ + /// + protected override async Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken) + { + foreach (ChatMessageContent message in history) + { + await AgentThreadActions.CreateMessageAsync(client, threadId, message, cancellationToken).ConfigureAwait(false); + } + } + + /// + protected override IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync( + AzureAIAgent agent, + CancellationToken cancellationToken) + { + agent.ThrowIfDeleted(); + + // %%% + //return AgentThreadActions.InvokeAsync(agent, client, threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken); + return Array.Empty<(bool, ChatMessageContent)>().ToAsyncEnumerable(); + } + + /// + protected override IAsyncEnumerable InvokeStreamingAsync(AzureAIAgent agent, IList messages, CancellationToken cancellationToken = default) + { + agent.ThrowIfDeleted(); + + // %%% + //return AgentThreadActions.InvokeStreamingAsync(agent, client, threadId, messages, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken); + return Array.Empty().ToAsyncEnumerable(); + } + + /// + protected override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken) + { + return AgentThreadActions.GetMessagesAsync(client, threadId, cancellationToken); + } + + /// + protected override Task ResetAsync(CancellationToken cancellationToken = default) + { + return client.DeleteThreadAsync(threadId, cancellationToken); + } + + /// + protected override string Serialize() { return threadId; } +} diff --git a/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs b/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs new file mode 100644 index 000000000000..ddec300f9c42 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/AzureAIClientProvider.cs @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using Azure.AI.Projects; +using Azure.Core; +using Azure.Core.Pipeline; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Provides an for use by . +/// +public sealed class AzureAIClientProvider // $$$ NEEDED ??? +{ + /// + /// An active client instance. + /// + public AIProjectClient Client { get; } + + /// + /// Configuration keys required for management. + /// + internal IReadOnlyList ConfigurationKeys { get; } + + private AzureAIClientProvider(AIProjectClient client, IEnumerable keys) + { + this.Client = client; + this.ConfigurationKeys = keys.ToArray(); + } + + /// + /// Produce a based on . + /// + /// The service endpoint + /// The credentials + /// Custom for HTTP requests. + public static AzureAIClientProvider ForAzureOpenAI(string connectionString, TokenCredential credential, HttpClient? httpClient = null) + { + Verify.NotNullOrWhiteSpace(connectionString, nameof(connectionString)); + Verify.NotNull(credential, nameof(credential)); + + AIProjectClientOptions clientOptions = CreateAzureClientOptions(httpClient); + + return new(new AIProjectClient(connectionString, credential, clientOptions), CreateConfigurationKeys(connectionString, httpClient)); + } + + /// + /// Directly provide a client instance. + /// + public static AzureAIClientProvider FromClient(AIProjectClient client) + { + return new(client, [client.GetType().FullName!, client.GetHashCode().ToString()]); + } + + private static AIProjectClientOptions CreateAzureClientOptions(HttpClient? httpClient) + { + AIProjectClientOptions options = new() + { + Diagnostics = { // %%% LOGGING ??? + ApplicationId = HttpHeaderConstant.Values.UserAgent + }, + }; + + ConfigureClientOptions(httpClient, options); + + return options; + } + + private static void ConfigureClientOptions(HttpClient? httpClient, ClientOptions options) + { + //options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureAIAgent))), HttpPipelinePosition.PerCall); + + if (httpClient is not null) + { + options.Transport = new HttpClientTransport(httpClient); + options.RetryPolicy = new RetryPolicy(maxRetries: 0); // Disable retry policy if and only if a custom HttpClient is provided. + } + } + + private static GenericActionPipelinePolicy CreateRequestHeaderPolicy(string headerName, string headerValue) + { + return + new((message) => + { + if (message?.Request?.Headers.TryGetValue(headerName, out string? _) == false) + { + message.Request.Headers.Add(headerName, headerValue); + } + }); + } + + private static IEnumerable CreateConfigurationKeys(string connectionString, HttpClient? httpClient) + { + yield return connectionString; + + if (httpClient is not null) + { + if (httpClient.BaseAddress is not null) + { + yield return httpClient.BaseAddress.AbsoluteUri; + } + + foreach (string header in httpClient.DefaultRequestHeaders.SelectMany(h => h.Value)) + { + yield return header; + } + } + } +} diff --git a/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs b/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs new file mode 100644 index 000000000000..8772baf32f7d --- /dev/null +++ b/dotnet/src/Agents/AzureAI/AzureAIInvocationOptions.cs @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Defines per invocation execution options that override the assistant definition. +/// +/// +/// Not applicable to usage. +/// +public sealed class AzureAIInvocationOptions +{ + /// + /// Override the AI model targeted by the agent. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ModelName { get; init; } + + /// + /// Appends additional instructions. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? AdditionalInstructions { get; init; } + + /// + /// Additional messages to add to the thread. + /// + /// + /// Only supports messages with role = User or Assistant: + /// https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-additional_messages + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList? AdditionalMessages { get; init; } + + /// + /// Set if code_interpreter tool is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableCodeInterpreter { get; init; } + + /// + /// Set if file_search tool is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableFileSearch { get; init; } + + /// + /// Set if json response-format is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? EnableJsonResponse { get; init; } + + /// + /// The maximum number of completion tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxCompletionTokens { get; init; } + + /// + /// The maximum number of prompt tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxPromptTokens { get; init; } + + /// + /// Enables parallel function calling during tool use. Enabled by default. + /// Use this property to disable. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? ParallelToolCallsEnabled { get; init; } + + /// + /// When set, the thread will be truncated to the N most recent messages in the thread. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? TruncationMessageCount { get; init; } + + /// + /// The sampling temperature to use, between 0 and 2. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Temperature { get; init; } + + /// + /// An alternative to sampling with temperature, called nucleus sampling, where the model + /// considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// + /// + /// Recommended to set this or temperature but not both. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? TopP { get; init; } + + /// + /// A set of up to 16 key/value pairs that can be attached to an agent, used for + /// storing additional information about that object in a structured format.Keys + /// may be up to 64 characters in length and values may be up to 512 characters in length. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyDictionary? Metadata { get; init; } +} diff --git a/dotnet/src/Agents/AzureAI/Extensions/AgentExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/AgentExtensions.cs new file mode 100644 index 000000000000..112f85dc162f --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Extensions/AgentExtensions.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft. All rights reserved. +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Extension methods for . +/// +internal static class AgentExtensions +{ + /// + /// Provides a name for the agent, even if it's the identifier. + /// (since allows null) + /// + /// The target agent + /// The agent name as a non-empty string + public static string GetName(this Agent agent) + { + return agent.Name ?? agent.Id; + } +} diff --git a/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs new file mode 100644 index 000000000000..d3d5ee6e3aa9 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using Microsoft.SemanticKernel.Agents.AzureAI.Internal; +using System.Threading.Tasks; +using AzureAIP = Azure.AI.Projects; +using System.Linq; + +namespace Microsoft.SemanticKernel.Agents.AzureAI.Extensions; + +/// +/// %%% +/// +/// +/// Improves testability. +/// +internal static class AgentRunExtensions +{ + public static async IAsyncEnumerable GetStepsAsync( + this AzureAIP.AgentsClient client, + AzureAIP.ThreadRun run, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + AzureAIP.PageableList? steps = null; + do + { + steps = await client.GetRunStepsAsync(run, cancellationToken: cancellationToken).ConfigureAwait(false); + foreach (AzureAIP.RunStep step in steps) + { + yield return step; + } + } + while (steps?.HasMore ?? false); + } + + public static async Task CreateAsync( + this AzureAIP.AgentsClient client, + string threadId, + AzureAIAgent agent, + string? instructions, + AzureAIP.ToolDefinition[] tools, + bool isStreaming, + AzureAIInvocationOptions? invocationOptions, + CancellationToken cancellationToken) + { + return + await client.CreateRunAsync( + threadId, + agent.Definition.Id, + overrideModelName: invocationOptions?.ModelName, + instructions, + additionalInstructions: invocationOptions?.AdditionalInstructions, + additionalMessages: AgentMessageFactory.GetThreadMessages(invocationOptions?.AdditionalMessages).ToArray(), + overrideTools: tools, + stream: isStreaming, + temperature: invocationOptions?.Temperature, + topP: invocationOptions?.TopP, + maxPromptTokens: invocationOptions?.MaxPromptTokens, + maxCompletionTokens: invocationOptions?.MaxCompletionTokens, + truncationStrategy: null, // %%% + toolChoice: null, // %%% + responseFormat: null, // %%% + parallelToolCalls: invocationOptions?.ParallelToolCallsEnabled, + metadata: invocationOptions?.Metadata, + cancellationToken).ConfigureAwait(false); + } +} diff --git a/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs new file mode 100644 index 000000000000..d70c33275ce9 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Extensions/KernelFunctionExtensions.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using System.Linq; +using AzureAIP = Azure.AI.Projects; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +internal static class KernelFunctionExtensions +{ + /// + /// Convert to an OpenAI tool model. + /// + /// The source function + /// The plugin name + /// An OpenAI tool definition + public static AzureAIP.FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName) + { + var metadata = function.Metadata; + if (metadata.Parameters.Count > 0) + { + var required = new List(metadata.Parameters.Count); + var parameters = + metadata.Parameters.ToDictionary( + p => p.Name, + p => + { + if (p.IsRequired) + { + required.Add(p.Name); + } + + return + new + { + type = ConvertType(p.ParameterType), + description = p.Description, + }; + }); + + var spec = + new + { + type = "object", + properties = parameters, + required, + }; + + return new AzureAIP.FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName), function.Description, BinaryData.FromObjectAsJson(spec)); + } + + return new AzureAIP.FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName), function.Description); + } + + private static string ConvertType(Type? type) + { + if (type is null || type == typeof(string)) + { + return "string"; + } + + if (type == typeof(bool)) + { + return "boolean"; + } + + if (type.IsEnum) + { + return "enum"; + } + + if (type.IsArray) + { + return "array"; + } + + if (type == typeof(DateTime) || type == typeof(DateTimeOffset)) + { + return "date-time"; + } + + return Type.GetTypeCode(type) switch + { + TypeCode.SByte or TypeCode.Byte or + TypeCode.Int16 or TypeCode.UInt16 or + TypeCode.Int32 or TypeCode.UInt32 or + TypeCode.Int64 or TypeCode.UInt64 or + TypeCode.Single or TypeCode.Double or TypeCode.Decimal => "number", + + _ => "object", + }; + } +} diff --git a/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs b/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs new file mode 100644 index 000000000000..44091a32f693 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using System.Linq; +using Azure.AI.Projects; +using Microsoft.SemanticKernel.Connectors.FunctionCalling; + +namespace Microsoft.SemanticKernel.Agents.AzureAI.Internal; + +/// +/// Factory for creating based on . +/// +/// +/// Improves testability. +/// +internal static class AgentMessageFactory +{ + /// + /// %%% + /// + /// The message content. + public static Dictionary GetMetadata(ChatMessageContent message) + { + return message.Metadata?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value?.ToString() ?? string.Empty) ?? []; + } + + ///// + ///// Translates into enumeration of . + ///// + ///// The message content. + //public static IEnumerable GetMessageContents(ChatMessageContent message) // %%% + //{ + // bool hasTextContent = message.Items.OfType().Any(); + // foreach (KernelContent content in message.Items) + // { + // if (content is TextContent textContent) + // { + // yield return new MessageTextContent(content.ToString()); + // } + // else if (content is ImageContent imageContent) + // { + // if (imageContent.Uri != null) + // { + // yield return MessageContent.FromImageUri(imageContent.Uri); + // } + // else if (!string.IsNullOrWhiteSpace(imageContent.DataUri)) + // { + // yield return MessageContent.FromImageUri(new(imageContent.DataUri!)); + // } + // } + // else if (content is FileReferenceContent fileContent) + // { + // yield return MessageContent.FromImageFileId(fileContent.FileId); + // } + // else if (content is FunctionResultContent resultContent && resultContent.Result != null && !hasTextContent) + // { + // // Only convert a function result when text-content is not already present + // yield return MessageContent.FromText(FunctionCallsProcessor.ProcessFunctionResult(resultContent.Result)); + // } + // } + //} + + internal static IEnumerable GetThreadMessages(IReadOnlyList? messages) + { + //if (options?.Messages is not null) + //{ + // foreach (ChatMessageContent message in options.Messages) + // { + // AzureAIP.ThreadMessageOptions threadMessage = new( + // role: message.Role == AuthorRole.User ? AzureAIP.MessageRole.User : AzureAIP.MessageRole.Agent, + // content: AgentMessageFactory.GetMessageContents(message)); + + // createOptions.InitialMessages.Add(threadMessage); + // } + //} + + throw new NotImplementedException(); + } +} diff --git a/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs b/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs new file mode 100644 index 000000000000..931a3db6e1f2 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Internal/AgentThreadActions.cs @@ -0,0 +1,894 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Runtime.CompilerServices; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Agents.AzureAI.Extensions; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.FunctionCalling; +using AzureAIP = Azure.AI.Projects; + +namespace Microsoft.SemanticKernel.Agents.AzureAI.Internal; + +/// +/// Actions associated with an Open Assistant thread. +/// +internal static class AgentThreadActions +{ + private static readonly HashSet s_pollingStatuses = + [ + AzureAIP.RunStatus.Queued, + AzureAIP.RunStatus.InProgress, + AzureAIP.RunStatus.Cancelling, + ]; + + private static readonly HashSet s_failureStatuses = + [ + AzureAIP.RunStatus.Expired, + AzureAIP.RunStatus.Failed, + AzureAIP.RunStatus.Cancelled, + ]; + + /// + /// Create a new assistant thread. + /// + /// The assistant client + /// The options for creating the thread + /// The to monitor for cancellation requests. The default is . + /// The thread identifier + public static async Task CreateThreadAsync(AzureAIP.AgentsClient client, AzureAIThreadCreationOptions? options, CancellationToken cancellationToken = default) + { + AzureAIP.ThreadMessageOptions[] messages = AgentMessageFactory.GetThreadMessages(options?.Messages).ToArray(); + + AzureAIP.AgentThread thread = await client.CreateThreadAsync(messages, options?.ToolResources, options?.Metadata, cancellationToken).ConfigureAwait(false); + + return thread.Id; + } + + /// + /// Create a message in the specified thread. + /// + /// The assistant client + /// The thread identifier + /// The message to add + /// The to monitor for cancellation requests. The default is . + /// if a system message is present, without taking any other action + public static async Task CreateMessageAsync(AzureAIP.AgentsClient client, string threadId, ChatMessageContent message, CancellationToken cancellationToken) + { + if (message.Items.Any(i => i is FunctionCallContent)) + { + return; + } + + string? content = message.Content; + if (!string.IsNullOrWhiteSpace(content)) + { + return; + } + + await client.CreateMessageAsync( + threadId, + message.Role == AuthorRole.User ? AzureAIP.MessageRole.User : AzureAIP.MessageRole.Agent, + content, + attachments: null, // %%% + AgentMessageFactory.GetMetadata(message), + cancellationToken).ConfigureAwait(false); + } + + /// + /// Retrieves the thread messages. + /// + /// The assistant client + /// The thread identifier + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + public static async IAsyncEnumerable GetMessagesAsync(AzureAIP.AgentsClient client, string threadId, [EnumeratorCancellation] CancellationToken cancellationToken) + { + Dictionary agentNames = []; // Cache agent names by their identifier + + string? lastId = null; + AzureAIP.PageableList? messages = null; + do + { + messages = await client.GetMessagesAsync(threadId, runId: null, limit: null, AzureAIP.ListSortOrder.Descending, after: lastId, before: null, cancellationToken).ConfigureAwait(false); + foreach (AzureAIP.ThreadMessage message in messages) + { + lastId = message.Id; + string? assistantName = null; + if (!string.IsNullOrWhiteSpace(message.AssistantId) && + !agentNames.TryGetValue(message.AssistantId, out assistantName)) + { + AzureAIP.Agent assistant = await client.GetAgentAsync(message.AssistantId, cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(assistant.Name)) + { + agentNames.Add(assistant.Id, assistant.Name); + } + } + + assistantName ??= message.AssistantId; + + ChatMessageContent content = GenerateMessageContent(assistantName, message); + + if (content.Items.Count > 0) + { + yield return content; + } + } + } while (messages?.HasMore ?? false); + } + + /// + /// Invoke the assistant on the specified thread. + /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user. + /// Example of a non-visible message is function-content for functions that are automatically executed. + /// + /// The assistant agent to interact with the thread. + /// The assistant client + /// The thread identifier + /// Options to utilize for the invocation + /// The logger to utilize (might be agent or channel scoped) + /// The plugins and other state. + /// Optional arguments to pass to the agents's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + public static async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync( + AzureAIAgent agent, + AzureAIP.AgentsClient client, + string threadId, + AzureAIInvocationOptions? invocationOptions, + ILogger logger, + Kernel kernel, + KernelArguments? arguments, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + if (agent.IsDeleted) + { + throw new KernelException($"Agent Failure - {nameof(AzureAIAgent)} agent is deleted: {agent.Id}."); + } + + //logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId); + + AzureAIP.ToolDefinition[]? tools = [.. agent.Definition.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))]; + + string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + + //RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions); + + AzureAIP.ThreadRun run = await client.CreateAsync(threadId, agent, instructions, tools, isStreaming: false, invocationOptions, cancellationToken).ConfigureAwait(false); + + //logger.LogOpenAIAssistantCreatedRun(nameof(InvokeAsync), run.Id, threadId); + + FunctionCallsProcessor functionProcessor = new(logger); + // This matches current behavior. Will be configurable upon integrating with `FunctionChoice` (#6795/#5200) + FunctionChoiceBehaviorOptions functionOptions = new() { AllowConcurrentInvocation = true, AllowParallelCalls = true }; + + // Evaluate status and process steps and messages, as encountered. + HashSet processedStepIds = []; + Dictionary functionSteps = []; + do + { + // Check for cancellation + cancellationToken.ThrowIfCancellationRequested(); + + // Poll run and steps until actionable + await PollRunStatusAsync().ConfigureAwait(false); + + // Is in terminal state? + if (s_failureStatuses.Contains(run.Status)) + { + throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}"); + } + + AzureAIP.RunStep[] steps = await client.GetStepsAsync(run, cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false); + + // Is tool action required? + if (run.Status == AzureAIP.RunStatus.RequiresAction) + { + //logger.LogOpenAIAssistantProcessingRunSteps(nameof(InvokeAsync), run.Id, threadId); + + // Execute functions in parallel and post results at once. + FunctionCallContent[] functionCalls = steps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray(); + if (functionCalls.Length > 0) + { + // Emit function-call content + ChatMessageContent functionCallMessage = GenerateFunctionCallContent(agent.GetName(), functionCalls); + yield return (IsVisible: false, Message: functionCallMessage); + + // Invoke functions for each tool-step + FunctionResultContent[] functionResults = + await functionProcessor.InvokeFunctionCallsAsync( + functionCallMessage, + (_) => true, + functionOptions, + kernel, + isStreaming: false, + cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false); + + // Capture function-call for message processing + foreach (FunctionResultContent functionCall in functionResults) + { + functionSteps.Add(functionCall.CallId!, functionCall); + } + + // Process tool output + AzureAIP.ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults); + + await client.SubmitToolOutputsToRunAsync(threadId, run.Id, toolOutputs, stream: false, cancellationToken).ConfigureAwait(false); + } + + //logger.LogOpenAIAssistantProcessedRunSteps(nameof(InvokeAsync), functionCalls.Length, run.Id, threadId); + } + + // Enumerate completed messages + //logger.LogOpenAIAssistantProcessingRunMessages(nameof(InvokeAsync), run.Id, threadId); + + IEnumerable completedStepsToProcess = + steps + .Where(s => s.CompletedAt.HasValue && !processedStepIds.Contains(s.Id)) + .OrderBy(s => s.CreatedAt); + + int messageCount = 0; + foreach (AzureAIP.RunStep completedStep in completedStepsToProcess) + { + if (completedStep.Type == AzureAIP.RunStepType.ToolCalls) + { + AzureAIP.RunStepToolCallDetails toolDetails = (AzureAIP.RunStepToolCallDetails)completedStep.StepDetails; + foreach (AzureAIP.RunStepToolCall toolCall in toolDetails.ToolCalls) + { + bool isVisible = false; + ChatMessageContent? content = null; + + // Process code-interpreter content + if (toolCall is AzureAIP.RunStepCodeInterpreterToolCall codeTool) + { + content = GenerateCodeInterpreterContent(agent.GetName(), codeTool.Input, completedStep); + isVisible = true; + } + // Process function result content + else if (toolCall is AzureAIP.RunStepFunctionToolCall functionTool) + { + FunctionResultContent functionStep = functionSteps[functionTool.Id]; // Function step always captured on invocation + content = GenerateFunctionResultContent(agent.GetName(), [functionStep], completedStep); + } + + if (content is not null) + { + ++messageCount; + + yield return (isVisible, Message: content); + } + } + } + else if (completedStep.Type == AzureAIP.RunStepType.MessageCreation) + { + // Retrieve the message + AzureAIP.RunStepMessageCreationDetails messageDetails = (AzureAIP.RunStepMessageCreationDetails)completedStep.StepDetails; + AzureAIP.ThreadMessage? message = await RetrieveMessageAsync(client, threadId, messageDetails.MessageCreation.MessageId, agent.PollingOptions.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false); + + if (message is not null) + { + ChatMessageContent content = GenerateMessageContent(agent.GetName(), message, completedStep); + + if (content.Items.Count > 0) + { + ++messageCount; + + yield return (IsVisible: true, Message: content); + } + } + } + + processedStepIds.Add(completedStep.Id); + } + + //logger.LogOpenAIAssistantProcessedRunMessages(nameof(InvokeAsync), messageCount, run.Id, threadId); + } + while (AzureAIP.RunStatus.Completed != run.Status); + + //logger.LogOpenAIAssistantCompletedRun(nameof(InvokeAsync), run.Id, threadId); + + // Local function to assist in run polling (participates in method closure). + async Task PollRunStatusAsync() + { + //logger.LogOpenAIAssistantPollingRunStatus(nameof(PollRunStatusAsync), run.Id, threadId); + + int count = 0; + + do + { + cancellationToken.ThrowIfCancellationRequested(); + + if (count > 0) + { + // Reduce polling frequency after a couple attempts + await Task.Delay(agent.PollingOptions.GetPollingInterval(count), cancellationToken).ConfigureAwait(false); + } + + ++count; + + try + { + run = await client.GetRunAsync(threadId, run.Id, cancellationToken).ConfigureAwait(false); + } + // The presence of a `Status` code means the server responded with error...always fail in that case + catch (ClientResultException clientException) when (clientException.Status <= 0) + { + // Check maximum retry count + if (count >= agent.PollingOptions.MaximumRetryCount) + { + throw; + } + + // Retry for potential transient failure + continue; + } + catch (AggregateException aggregateException) when (aggregateException.InnerException is ClientResultException innerClientException) + { + // The presence of a `Status` code means the server responded with error + if (innerClientException.Status > 0) + { + throw; + } + + // Check maximum retry count + if (count >= agent.PollingOptions.MaximumRetryCount) + { + throw; + } + + // Retry for potential transient failure + continue; + } + } + while (s_pollingStatuses.Contains(run.Status)); + + //logger.LogOpenAIAssistantPolledRunStatus(nameof(PollRunStatusAsync), run.Status, run.Id, threadId); + } + } + + ///// + ///// Invoke the assistant on the specified thread using streaming. + ///// + ///// The assistant agent to interact with the thread. + ///// The assistant client + ///// The thread identifier + ///// The receiver for the completed messages generated + ///// Options to utilize for the invocation + ///// The logger to utilize (might be agent or channel scoped) + ///// The plugins and other state. + ///// Optional arguments to pass to the agents's invocation, including any . + ///// The to monitor for cancellation requests. The default is . + ///// Asynchronous enumeration of messages. + ///// + ///// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + ///// + //public static async IAsyncEnumerable InvokeStreamingAsync( + // OpenAIAssistantAgent agent, + // AssistantClient client, + // string threadId, + // IList? messages, + // OpenAIAssistantInvocationOptions? invocationOptions, + // ILogger logger, + // Kernel kernel, + // KernelArguments? arguments, + // [EnumeratorCancellation] CancellationToken cancellationToken) + //{ + // if (agent.IsDeleted) + // { + // throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}."); + // } + + // logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId); + + // ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))]; + + // string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + + // RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions); + + // options.ToolsOverride.AddRange(tools); + + // // Evaluate status and process steps and messages, as encountered. + // HashSet processedStepIds = []; + // Dictionary stepFunctionResults = []; + // List stepsToProcess = []; + // ThreadRun? run = null; + + // FunctionCallsProcessor functionProcessor = new(logger); + // // This matches current behavior. Will be configurable upon integrating with `FunctionChoice` (#6795/#5200) + // FunctionChoiceBehaviorOptions functionOptions = new() { AllowConcurrentInvocation = true, AllowParallelCalls = true }; + + // IAsyncEnumerable asyncUpdates = client.CreateRunStreamingAsync(threadId, agent.Id, options, cancellationToken); + // do + // { + // // Check for cancellation + // cancellationToken.ThrowIfCancellationRequested(); + + // stepsToProcess.Clear(); + + // await foreach (StreamingUpdate update in asyncUpdates.ConfigureAwait(false)) + // { + // if (update is RunUpdate runUpdate) + // { + // run = runUpdate.Value; + + // switch (runUpdate.UpdateKind) + // { + // case StreamingUpdateReason.RunCreated: + // logger.LogOpenAIAssistantCreatedRun(nameof(InvokeAsync), run.Id, threadId); + // break; + // } + // } + // else if (update is MessageContentUpdate contentUpdate) + // { + // switch (contentUpdate.UpdateKind) + // { + // case StreamingUpdateReason.MessageUpdated: + // yield return GenerateStreamingMessageContent(agent.GetName(), contentUpdate); + // break; + // } + // } + // else if (update is RunStepDetailsUpdate detailsUpdate) + // { + // StreamingChatMessageContent? toolContent = GenerateStreamingCodeInterpreterContent(agent.GetName(), detailsUpdate); + // if (toolContent != null) + // { + // yield return toolContent; + // } + // else if (detailsUpdate.FunctionOutput != null) + // { + // yield return + // new StreamingChatMessageContent(AuthorRole.Assistant, null) + // { + // AuthorName = agent.Name, + // Items = [new StreamingFunctionCallUpdateContent(detailsUpdate.ToolCallId, detailsUpdate.FunctionName, detailsUpdate.FunctionArguments)] + // }; + // } + // } + // else if (update is RunStepUpdate stepUpdate) + // { + // switch (stepUpdate.UpdateKind) + // { + // case StreamingUpdateReason.RunStepCompleted: + // stepsToProcess.Add(stepUpdate.Value); + // break; + // default: + // break; + // } + // } + // } + + // if (run == null) + // { + // throw new KernelException($"Agent Failure - Run not created for thread: ${threadId}"); + // } + + // // Is in terminal state? + // if (run.Status.IsTerminal && run.Status != RunStatus.Completed) + // { + // throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}"); + // } + + // if (run.Status == RunStatus.RequiresAction) + // { + // RunStep[] activeSteps = + // await client.GetRunStepsAsync(run.ThreadId, run.Id, cancellationToken: cancellationToken) + // .Where(step => step.Status == RunStepStatus.InProgress) + // .ToArrayAsync(cancellationToken).ConfigureAwait(false); + + // // Capture map between the tool call and its associated step + // Dictionary toolMap = []; + // foreach (RunStep step in activeSteps) + // { + // foreach (RunStepToolCall stepDetails in step.Details.ToolCalls) + // { + // toolMap[stepDetails.ToolCallId] = step.Id; + // } + // } + + // // Execute functions in parallel and post results at once. + // FunctionCallContent[] functionCalls = activeSteps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray(); + // if (functionCalls.Length > 0) + // { + // // Emit function-call content + // ChatMessageContent functionCallMessage = GenerateFunctionCallContent(agent.GetName(), functionCalls); + // messages?.Add(functionCallMessage); + + // FunctionResultContent[] functionResults = + // await functionProcessor.InvokeFunctionCallsAsync( + // functionCallMessage, + // (_) => true, + // functionOptions, + // kernel, + // isStreaming: true, + // cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false); + + // // Process tool output + // ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults); + // asyncUpdates = client.SubmitToolOutputsToRunStreamingAsync(run.ThreadId, run.Id, toolOutputs, cancellationToken); + + // foreach (RunStep step in activeSteps) + // { + // stepFunctionResults.Add(step.Id, functionResults.Where(result => step.Id == toolMap[result.CallId!]).ToArray()); + // } + // } + // } + + // if (stepsToProcess.Count > 0) + // { + // logger.LogOpenAIAssistantProcessingRunMessages(nameof(InvokeAsync), run!.Id, threadId); + + // foreach (RunStep step in stepsToProcess) + // { + // if (!string.IsNullOrEmpty(step.Details.CreatedMessageId)) + // { + // ThreadMessage? message = + // await RetrieveMessageAsync( + // client, + // threadId, + // step.Details.CreatedMessageId, + // agent.PollingOptions.MessageSynchronizationDelay, + // cancellationToken).ConfigureAwait(false); + + // if (message != null) + // { + // ChatMessageContent content = GenerateMessageContent(agent.GetName(), message, step); + // messages?.Add(content); + // } + // } + // else + // { + // foreach (RunStepToolCall toolCall in step.Details.ToolCalls) + // { + // if (toolCall.ToolKind == RunStepToolCallKind.Function) + // { + // messages?.Add(GenerateFunctionResultContent(agent.GetName(), stepFunctionResults[step.Id], step)); + // stepFunctionResults.Remove(step.Id); + // break; + // } + + // if (toolCall.ToolKind == RunStepToolCallKind.CodeInterpreter) + // { + // messages?.Add(GenerateCodeInterpreterContent(agent.GetName(), toolCall.CodeInterpreterInput, step)); + // } + // } + // } + // } + + // logger.LogOpenAIAssistantProcessedRunMessages(nameof(InvokeAsync), stepsToProcess.Count, run!.Id, threadId); + // } + // } + // while (run?.Status != RunStatus.Completed); + + // logger.LogOpenAIAssistantCompletedRun(nameof(InvokeAsync), run?.Id ?? "Failed", threadId); + //} + + private static ChatMessageContent GenerateMessageContent(string? assistantName, AzureAIP.ThreadMessage message, AzureAIP.RunStep? completedStep = null) + { + AuthorRole role = new(message.Role.ToString()); + + Dictionary? metadata = + new() + { + { nameof(AzureAIP.ThreadMessage.CreatedAt), message.CreatedAt }, + { nameof(AzureAIP.ThreadMessage.AssistantId), message.AssistantId }, + { nameof(AzureAIP.ThreadMessage.ThreadId), message.ThreadId }, + { nameof(AzureAIP.ThreadMessage.RunId), message.RunId }, + { nameof(AzureAIP.MessageContentUpdate.MessageId), message.Id }, + }; + + if (completedStep != null) + { + metadata[nameof(AzureAIP.RunStepDetailsUpdate.StepId)] = completedStep.Id; + metadata[nameof(AzureAIP.RunStep.Usage)] = completedStep.Usage; + } + + ChatMessageContent content = + new(role, content: null) + { + AuthorName = assistantName, + Metadata = metadata, + }; + + foreach (AzureAIP.MessageContent itemContent in message.ContentItems) + { + // Process text content + if (itemContent is AzureAIP.MessageTextContent textContent) + { + content.Items.Add(new TextContent(textContent.Text)); + + foreach (AzureAIP.MessageTextAnnotation annotation in textContent.Annotations) + { + content.Items.Add(GenerateAnnotationContent(annotation)); + } + } + // Process image content + else if (itemContent is AzureAIP.MessageImageFileContent imageContent) + { + content.Items.Add(new FileReferenceContent(imageContent.FileId)); + } + } + + return content; + } + + //private static StreamingChatMessageContent GenerateStreamingMessageContent(string? assistantName, MessageContentUpdate update) + //{ + // StreamingChatMessageContent content = + // new(AuthorRole.Assistant, content: null) + // { + // AuthorName = assistantName, + // }; + + // // Process text content + // if (!string.IsNullOrEmpty(update.Text)) + // { + // content.Items.Add(new StreamingTextContent(update.Text)); + // } + // // Process image content + // else if (update.ImageFileId != null) + // { + // content.Items.Add(new StreamingFileReferenceContent(update.ImageFileId)); + // } + // // Process annotations + // else if (update.TextAnnotation != null) + // { + // content.Items.Add(GenerateStreamingAnnotationContent(update.TextAnnotation)); + // } + + // if (update.Role.HasValue && update.Role.Value != MessageRole.User) + // { + // content.Role = new(update.Role.Value.ToString()); + // } + + // return content; + //} + + //private static StreamingChatMessageContent? GenerateStreamingCodeInterpreterContent(string? assistantName, RunStepDetailsUpdate update) + //{ + // StreamingChatMessageContent content = + // new(AuthorRole.Assistant, content: null) + // { + // AuthorName = assistantName, + // }; + + // // Process text content + // if (update.CodeInterpreterInput != null) + // { + // content.Items.Add(new StreamingTextContent(update.CodeInterpreterInput)); + // content.Metadata = new Dictionary { { OpenAIAssistantAgent.CodeInterpreterMetadataKey, true } }; + // } + + // if ((update.CodeInterpreterOutputs?.Count ?? 0) > 0) + // { + // foreach (var output in update.CodeInterpreterOutputs!) + // { + // if (output.ImageFileId != null) + // { + // content.Items.Add(new StreamingFileReferenceContent(output.ImageFileId)); + // } + // } + // } + + // return content.Items.Count > 0 ? content : null; + //} + + private static AnnotationContent GenerateAnnotationContent(AzureAIP.MessageTextAnnotation annotation) + { + string? fileId = null; + + if (annotation is AzureAIP.MessageTextFileCitationAnnotation fileCitationAnnotation) + { + fileId = fileCitationAnnotation.FileId; + } + else if (annotation is AzureAIP.MessageTextFilePathAnnotation filePathAnnotation) + { + fileId = filePathAnnotation.FileId; + } + + return + new(annotation.Text) + { + Quote = annotation.Text, + FileId = fileId, + }; + } + + //private static StreamingAnnotationContent GenerateStreamingAnnotationContent(TextAnnotationUpdate annotation) + //{ + // string? fileId = null; + + // if (!string.IsNullOrEmpty(annotation.OutputFileId)) + // { + // fileId = annotation.OutputFileId; + // } + // else if (!string.IsNullOrEmpty(annotation.InputFileId)) + // { + // fileId = annotation.InputFileId; + // } + + // return + // new(annotation.TextToReplace) + // { + // StartIndex = annotation.StartIndex ?? 0, + // EndIndex = annotation.EndIndex ?? 0, + // FileId = fileId, + // }; + //} + + private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, string pythonCode, AzureAIP.RunStep completedStep) + { + Dictionary metadata = GenerateToolCallMetadata(completedStep); + metadata[AzureAIAgent.CodeInterpreterMetadataKey] = true; + + return + new ChatMessageContent( + AuthorRole.Assistant, + [ + new TextContent(pythonCode) + ]) + { + AuthorName = agentName, + Metadata = metadata, + }; + } + + private static IEnumerable ParseFunctionStep(AzureAIAgent agent, AzureAIP.RunStep step) + { + if (step.Status == AzureAIP.RunStepStatus.InProgress && step.Type == AzureAIP.RunStepType.ToolCalls) + { + AzureAIP.RunStepToolCallDetails toolCallDetails = (AzureAIP.RunStepToolCallDetails)step.StepDetails; + foreach (AzureAIP.RunStepToolCall toolCall in toolCallDetails.ToolCalls) + { + if (toolCall is AzureAIP.RunStepFunctionToolCall functionCall) + { + (FunctionName nameParts, KernelArguments functionArguments) = ParseFunctionCall(functionCall.Name, functionCall.Arguments); + + FunctionCallContent content = new(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments); + + yield return content; + } + } + } + } + + private static (FunctionName functionName, KernelArguments arguments) ParseFunctionCall(string functionName, string? functionArguments) + { + FunctionName nameParts = FunctionName.Parse(functionName); + + KernelArguments arguments = []; + + if (!string.IsNullOrWhiteSpace(functionArguments)) + { + foreach (var argumentKvp in JsonSerializer.Deserialize>(functionArguments!)!) + { + arguments[argumentKvp.Key] = argumentKvp.Value.ToString(); + } + } + + return (nameParts, arguments); + } + + private static ChatMessageContent GenerateFunctionCallContent(string agentName, IList functionCalls) + { + ChatMessageContent functionCallContent = new(AuthorRole.Assistant, content: null) + { + AuthorName = agentName + }; + + functionCallContent.Items.AddRange(functionCalls); + + return functionCallContent; + } + + private static ChatMessageContent GenerateFunctionResultContent(string agentName, IEnumerable functionResults, AzureAIP.RunStep completedStep) + { + ChatMessageContent functionResultContent = new(AuthorRole.Tool, content: null) + { + AuthorName = agentName, + Metadata = GenerateToolCallMetadata(completedStep), + }; + + foreach (FunctionResultContent functionResult in functionResults) + { + functionResultContent.Items.Add( + new FunctionResultContent( + functionResult.FunctionName, + functionResult.PluginName, + functionResult.CallId, + functionResult.Result)); + } + + return functionResultContent; + } + + private static Dictionary GenerateToolCallMetadata(AzureAIP.RunStep completedStep) + { + return new() + { + { nameof(AzureAIP.RunStep.CreatedAt), completedStep.CreatedAt }, + { nameof(AzureAIP.RunStep.AssistantId), completedStep.AssistantId }, + { nameof(AzureAIP.RunStep.ThreadId), completedStep.ThreadId }, + { nameof(AzureAIP.RunStep.RunId), completedStep.RunId }, + { nameof(AzureAIP.RunStepDetailsUpdate.StepId), completedStep.Id }, + { nameof(AzureAIP.RunStep.Usage), completedStep.Usage }, + }; + } + + //private static Task[] ExecuteFunctionSteps(AzureAIAgent agent, FunctionCallContent[] functionCalls, CancellationToken cancellationToken) + //{ + // Task[] functionTasks = new Task[functionCalls.Length]; + + // for (int index = 0; index < functionCalls.Length; ++index) + // { + // functionTasks[index] = ExecuteFunctionStep(agent, functionCalls[index], cancellationToken); + // } + + // return functionTasks; + //} + + //private static Task ExecuteFunctionStep(AzureAIAgent agent, FunctionCallContent functionCall, CancellationToken cancellationToken) + //{ + // return functionCall.InvokeAsync(agent.Kernel, cancellationToken); + //} + + private static AzureAIP.ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults) + { + AzureAIP.ToolOutput[] toolOutputs = new AzureAIP.ToolOutput[functionResults.Length]; + + for (int index = 0; index < functionResults.Length; ++index) + { + FunctionResultContent functionResult = functionResults[index]; + + object resultValue = functionResult.Result ?? string.Empty; + + if (resultValue is not string textResult) + { + textResult = JsonSerializer.Serialize(resultValue); + } + + toolOutputs[index] = new AzureAIP.ToolOutput(functionResult.CallId, textResult!); + } + + return toolOutputs; + } + + private static async Task RetrieveMessageAsync(AzureAIP.AgentsClient client, string threadId, string messageId, TimeSpan syncDelay, CancellationToken cancellationToken) + { + AzureAIP.ThreadMessage? message = null; + + bool retry = false; + int count = 0; + do + { + try + { + message = await client.GetMessageAsync(threadId, messageId, cancellationToken).ConfigureAwait(false); + } + catch (RequestFailedException exception) + { + // Step has provided the message-id. Retry on of NotFound/404 exists. + // Extremely rarely there might be a synchronization issue between the + // assistant response and message-service. + retry = exception.Status == (int)HttpStatusCode.NotFound && count < 3; + } + + if (retry) + { + await Task.Delay(syncDelay, cancellationToken).ConfigureAwait(false); + } + + ++count; + } + while (retry); + + return message; + } +} diff --git a/dotnet/src/Agents/AzureAI/Properties/AssemblyInfo.cs b/dotnet/src/Agents/AzureAI/Properties/AssemblyInfo.cs new file mode 100644 index 000000000000..bd1c0f58314e --- /dev/null +++ b/dotnet/src/Agents/AzureAI/Properties/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0110")] diff --git a/dotnet/src/Agents/AzureAI/RunPollingOptions.cs b/dotnet/src/Agents/AzureAI/RunPollingOptions.cs new file mode 100644 index 000000000000..7fcb76e5cab9 --- /dev/null +++ b/dotnet/src/Agents/AzureAI/RunPollingOptions.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; + +namespace Microsoft.SemanticKernel.Agents.AzureAI; + +/// +/// Configuration and defaults associated with polling behavior for Assistant API run processing. +/// +public sealed class RunPollingOptions +{ + /// + /// The default maximum number or retries when monitoring thread-run status. + /// + public static int DefaultMaximumRetryCount { get; } = 3; + + /// + /// The default polling interval when monitoring thread-run status. + /// + public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500); + + /// + /// The default back-off interval when monitoring thread-run status. + /// + public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1); + + /// + /// The default number of polling iterations before using . + /// + public static int DefaultPollingBackoffThreshold { get; } = 2; + + /// + /// The default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. + /// + public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500); + + /// + /// The maximum retry count when polling thread-run status. + /// + /// + /// Only affects failures that have the potential to be transient. Explicit server error responses + /// will result in immediate failure. + /// + public int MaximumRetryCount { get; set; } = DefaultMaximumRetryCount; + + /// + /// The polling interval when monitoring thread-run status. + /// + public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval; + + /// + /// The back-off interval when monitoring thread-run status. + /// + public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff; + + /// + /// The number of polling iterations before using . + /// + public int RunPollingBackoffThreshold { get; set; } = DefaultPollingBackoffThreshold; + + /// + /// The polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. + /// + public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay; + + /// + /// Gets the polling interval for the specified iteration count. + /// + /// The number of polling iterations already attempted + public TimeSpan GetPollingInterval(int iterationCount) + { + return iterationCount > this.RunPollingBackoffThreshold ? this.RunPollingBackoff : this.RunPollingInterval; + } +} diff --git a/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs b/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs deleted file mode 100644 index d017fb403f23..000000000000 --- a/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. -using Azure.Core; -using Azure.Core.Pipeline; - -namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; - -/// -/// Helper class to inject headers into Azure SDK HTTP pipeline -/// -internal sealed class AddHeaderRequestPolicy(string headerName, string headerValue) : HttpPipelineSynchronousPolicy -{ - public override void OnSendingRequest(HttpMessage message) => message.Request.Headers.Add(headerName, headerValue); -} diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs index 2e066b91869f..c8434797bc50 100644 --- a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs @@ -829,23 +829,6 @@ private static ChatMessageContent GenerateFunctionResultContent(string agentName }; } - private static Task[] ExecuteFunctionSteps(OpenAIAssistantAgent agent, FunctionCallContent[] functionCalls, CancellationToken cancellationToken) - { - Task[] functionTasks = new Task[functionCalls.Length]; - - for (int index = 0; index < functionCalls.Length; ++index) - { - functionTasks[index] = ExecuteFunctionStep(agent, functionCalls[index], cancellationToken); - } - - return functionTasks; - } - - private static Task ExecuteFunctionStep(OpenAIAssistantAgent agent, FunctionCallContent functionCall, CancellationToken cancellationToken) - { - return functionCall.InvokeAsync(agent.Kernel, cancellationToken); - } - private static ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults) { ToolOutput[] toolOutputs = new ToolOutput[functionResults.Length]; diff --git a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj index b773878eb397..cc4c3d53b991 100644 --- a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj +++ b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj @@ -39,4 +39,8 @@ + + + + diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs deleted file mode 100644 index 6288c6a5aed8..000000000000 --- a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. -using System.Linq; -using Azure.Core; -using Azure.Core.Pipeline; -using Microsoft.SemanticKernel.Agents.OpenAI.Internal; -using Xunit; - -namespace SemanticKernel.Agents.UnitTests.OpenAI.Azure; - -/// -/// Unit testing of . -/// -public class AddHeaderRequestPolicyTests -{ - /// - /// Verify behavior of . - /// - [Fact] - public void VerifyAddHeaderRequestPolicyExecution() - { - // Arrange - using HttpClientTransport clientTransport = new(); - HttpPipeline pipeline = new(clientTransport); - - HttpMessage message = pipeline.CreateMessage(); - AddHeaderRequestPolicy policy = new(headerName: "testname", headerValue: "testvalue"); - - // Act - policy.OnSendingRequest(message); - - // Assert - Assert.Single(message.Request.Headers); - HttpHeader header = message.Request.Headers.Single(); - Assert.Equal("testname", header.Name); - Assert.Equal("testvalue", header.Value); - } -} diff --git a/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props b/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props new file mode 100644 index 000000000000..323196e5564b --- /dev/null +++ b/dotnet/src/InternalUtilities/azure/AzureAIUtilities.props @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs b/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs new file mode 100644 index 000000000000..8f412aa9e930 --- /dev/null +++ b/dotnet/src/InternalUtilities/azure/Policies/GeneratedActionPipelinePolicy.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +/// +/// Generic action pipeline policy for processing messages. +/// +[ExcludeFromCodeCoverage] +internal sealed class GenericActionPipelinePolicy : HttpPipelinePolicy +{ + private readonly Action _processMessageAction; + + internal GenericActionPipelinePolicy(Action processMessageAction) + { + this._processMessageAction = processMessageAction; + } + + public override void Process(HttpMessage message, ReadOnlyMemory pipeline) + { + this._processMessageAction(message); + } + + public override ValueTask ProcessAsync(HttpMessage message, ReadOnlyMemory pipeline) + { + this._processMessageAction(message); + return new ValueTask(Task.CompletedTask); // .NET STD 2.0 compatibility + } +} diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs index 989005333946..c21707683a06 100644 --- a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs @@ -3,8 +3,11 @@ using System.ClientModel; using System.Collections.ObjectModel; using System.Diagnostics; +using Azure.Core.Pipeline; using Azure.Identity; using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.AzureAI; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using OpenAI.Assistants; @@ -22,6 +25,8 @@ public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output /// protected const string AssistantSampleMetadataKey = "sksample"; + protected override bool ForceOpenAI => true; + /// /// Metadata to indicate the assistant as created for a sample. /// @@ -36,16 +41,31 @@ public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output { AssistantSampleMetadataKey, bool.TrueString } }); + /// + /// Provide a according to the configuration settings. + /// + protected AzureAIClientProvider GetAzureProvider() + { + //if (this.UseOpenAIConfig) // %%% + //{ + // throw new InvalidOperationException("Azure provider is not available when using OpenAI configuration."); + //} + + return AzureAIClientProvider.ForAzureOpenAI("eastus.api.azureml.ms;5b742c40-bc2b-4a4f-902f-ee9f644d8844;rg-crickman-ai;sc-xx8889760-8651", new AzureCliCredential()); // %%% CONFIG + } + /// /// Provide a according to the configuration settings. /// protected OpenAIClientProvider GetClientProvider() - => + { + return this.UseOpenAIConfig ? OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(this.ApiKey ?? throw new ConfigurationNotFoundException("OpenAI:ApiKey"))) : !string.IsNullOrWhiteSpace(this.ApiKey) ? OpenAIClientProvider.ForAzureOpenAI(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)) : OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(this.Endpoint!)); + } /// /// Common method to write formatted agent chat content to the console. diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs index f751ea6fc448..f0e71963fc80 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs @@ -3,7 +3,7 @@ using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Agents.OpenAI; +namespace Microsoft.SemanticKernel.Agents; /// /// Content type to support message annotations. diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs index 8dbcc00eb25d..525472d90047 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents; namespace Microsoft.SemanticKernel; diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs index 609f94a87180..5c5aa5780303 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs @@ -4,7 +4,7 @@ using System.Text; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Agents.OpenAI; +namespace Microsoft.SemanticKernel.Agents; /// /// Content type to support message annotations. diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs index 524caed4ff29..dac3d2c5abc6 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs @@ -1,5 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents; using Xunit; namespace SemanticKernel.UnitTests.Contents; diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs index cd753a15e201..aa5f2784f5cd 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs @@ -6,7 +6,7 @@ using System.Text; using System.Text.Json; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; using Xunit; diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs index eb954752ce4b..46da513e4a7c 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. using System.Text; -using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents; using Xunit; namespace SemanticKernel.UnitTests.Contents; From e14e2421d3e4fe866cb99253e161e8267591a2fe Mon Sep 17 00:00:00 2001 From: Chris Rickman Date: Wed, 8 Jan 2025 14:58:28 -0800 Subject: [PATCH 2/5] Namespace --- .../InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs index c21707683a06..25cfe1bf8bf9 100644 --- a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs @@ -3,7 +3,6 @@ using System.ClientModel; using System.Collections.ObjectModel; using System.Diagnostics; -using Azure.Core.Pipeline; using Azure.Identity; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; From e39ea79b4dd3a6e8ac890f26db28770bb17146a0 Mon Sep 17 00:00:00 2001 From: Chris Rickman Date: Wed, 8 Jan 2025 15:40:33 -0800 Subject: [PATCH 3/5] Namespace update --- .../CompatibilitySuppressions.xml | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 dotnet/src/SemanticKernel.Abstractions/CompatibilitySuppressions.xml diff --git a/dotnet/src/SemanticKernel.Abstractions/CompatibilitySuppressions.xml b/dotnet/src/SemanticKernel.Abstractions/CompatibilitySuppressions.xml new file mode 100644 index 000000000000..710187694735 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/CompatibilitySuppressions.xml @@ -0,0 +1,32 @@ + + + + + CP0001 + T:Microsoft.SemanticKernel.Agents.OpenAI.AnnotationContent + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + true + + + CP0001 + T:Microsoft.SemanticKernel.Agents.OpenAI.StreamingAnnotationContent + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + true + + + CP0001 + T:Microsoft.SemanticKernel.Agents.OpenAI.AnnotationContent + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + true + + + CP0001 + T:Microsoft.SemanticKernel.Agents.OpenAI.StreamingAnnotationContent + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + true + + \ No newline at end of file From d22be4fa1f6c362b47c76f0a42db3d3478d15208 Mon Sep 17 00:00:00 2001 From: Chris Rickman Date: Wed, 8 Jan 2025 15:47:23 -0800 Subject: [PATCH 4/5] Namespace --- dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs b/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs index 44091a32f693..40eb914d04f7 100644 --- a/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs +++ b/dotnet/src/Agents/AzureAI/Internal/AgentMessageFactory.cs @@ -3,7 +3,6 @@ using System.Collections.Generic; using System.Linq; using Azure.AI.Projects; -using Microsoft.SemanticKernel.Connectors.FunctionCalling; namespace Microsoft.SemanticKernel.Agents.AzureAI.Internal; From 1d6504b27ad611750f4699bca37cc1f21b7c5a0a Mon Sep 17 00:00:00 2001 From: Chris Rickman Date: Wed, 8 Jan 2025 16:03:03 -0800 Subject: [PATCH 5/5] Order --- dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs b/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs index d3d5ee6e3aa9..4cca81393215 100644 --- a/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs +++ b/dotnet/src/Agents/AzureAI/Extensions/AgentRunExtensions.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.Linq; using System.Runtime.CompilerServices; using System.Threading; -using Microsoft.SemanticKernel.Agents.AzureAI.Internal; using System.Threading.Tasks; +using Microsoft.SemanticKernel.Agents.AzureAI.Internal; using AzureAIP = Azure.AI.Projects; -using System.Linq; namespace Microsoft.SemanticKernel.Agents.AzureAI.Extensions;