From 0560bdd645dfbc579a71f2f0fea98ea83dd3bb3f Mon Sep 17 00:00:00 2001 From: Xiaoyun Zhang Date: Tue, 15 Oct 2024 07:23:33 -0700 Subject: [PATCH] [.Net] Update OpenAI, semantic kernel to latest version (#3792) * update OpenAI, semantic kernel to latest version * fix build error * update * update release note --- dotnet/Directory.Build.props | 1 - dotnet/Directory.Packages.props | 18 +++--- dotnet/eng/MetaInfo.props | 2 +- dotnet/eng/Version.props | 17 ------ .../Example08_LMStudio.cs | 3 +- .../GettingStart/Agent_Middleware.cs | 2 +- .../Connect_To_Azure_OpenAI.cs | 7 +-- .../Connect_To_Ollama.cs | 3 +- .../Tool_Call_With_Ollama_And_LiteLLM.cs | 3 +- .../AutoGen.OpenAI.Sample/Use_Json_Mode.cs | 2 +- .../AutoGen.OpenAI/Agent/OpenAIChatAgent.cs | 13 ++-- .../src/AutoGen.OpenAI/AutoGen.OpenAI.csproj | 3 +- .../OpenAIChatRequestMessageConnector.cs | 46 +++++++------- dotnet/src/AutoGen/AzureOpenAIConfig.cs | 2 +- dotnet/src/AutoGen/LMStudioConfig.cs | 3 +- ...MessageTests.BasicMessageTest.approved.txt | 60 ++++++++----------- .../AutoGen.OpenAI.Tests/MathClassTest.cs | 3 +- .../OpenAIChatAgentTest.cs | 9 +-- .../OpenAIMessageTests.cs | 10 ++-- .../KernelFunctionMiddlewareTests.cs | 9 ++- .../OpenAIChatCompletionMiddlewareTests.cs | 3 +- dotnet/website/release_note/0.2.2.md | 4 ++ dotnet/website/release_note/toc.yml | 5 +- 23 files changed, 107 insertions(+), 121 deletions(-) delete mode 100644 dotnet/eng/Version.props create mode 100644 dotnet/website/release_note/0.2.2.md diff --git a/dotnet/Directory.Build.props b/dotnet/Directory.Build.props index 67ae6d7d30..6dac23748f 100644 --- a/dotnet/Directory.Build.props +++ b/dotnet/Directory.Build.props @@ -1,6 +1,5 @@ - diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index 59f99bd558..eb519d4abd 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -1,6 +1,8 @@ true + 1.22.0 + 1.22.0-alpha @@ -12,7 +14,7 @@ - + @@ -61,12 +63,12 @@ - - - - - - + + + + + + @@ -86,7 +88,7 @@ - + diff --git a/dotnet/eng/MetaInfo.props b/dotnet/eng/MetaInfo.props index c6eeaf8434..4f3d216aa0 100644 --- a/dotnet/eng/MetaInfo.props +++ b/dotnet/eng/MetaInfo.props @@ -1,7 +1,7 @@ - 0.2.1 + 0.2.2 AutoGen https://microsoft.github.io/autogen-for-net/ https://github.com/microsoft/autogen diff --git a/dotnet/eng/Version.props b/dotnet/eng/Version.props deleted file mode 100644 index f31f8ee8c7..0000000000 --- a/dotnet/eng/Version.props +++ /dev/null @@ -1,17 +0,0 @@ - - - - 2.0.0-beta.3 - 1.18.1-rc - 1.18.1-alpha - 5.0.0 - 4.3.0 - 6.0.0 - 6.8.0 - 2.4.2 - 17.7.0 - 1.0.0-beta.24229.4 - 8.0.0 - 7.4.4 - - \ No newline at end of file diff --git a/dotnet/samples/AutoGen.BasicSamples/Example08_LMStudio.cs b/dotnet/samples/AutoGen.BasicSamples/Example08_LMStudio.cs index c3b80da5d5..499588b46c 100644 --- a/dotnet/samples/AutoGen.BasicSamples/Example08_LMStudio.cs +++ b/dotnet/samples/AutoGen.BasicSamples/Example08_LMStudio.cs @@ -2,6 +2,7 @@ // Example08_LMStudio.cs #region lmstudio_using_statements +using System.ClientModel; using AutoGen.Core; using AutoGen.OpenAI; using AutoGen.OpenAI.Extension; @@ -16,7 +17,7 @@ public class Example08_LMStudio { #region lmstudio_example_1 var endpoint = "http://localhost:1234"; - var openaiClient = new OpenAIClient("api-key", new OpenAIClientOptions + var openaiClient = new OpenAIClient(new ApiKeyCredential("api-key"), new OpenAIClientOptions { Endpoint = new Uri(endpoint), }); diff --git a/dotnet/samples/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs b/dotnet/samples/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs index a625adebc9..5942e83d0f 100644 --- a/dotnet/samples/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs +++ b/dotnet/samples/AutoGen.BasicSamples/GettingStart/Agent_Middleware.cs @@ -28,7 +28,7 @@ public class Agent_Middleware var reply = await innerAgent.GenerateReplyAsync(messages, option, ct); if (reply is MessageEnvelope chatCompletions) { - var tokenCount = chatCompletions.Content.Usage.TotalTokens; + var tokenCount = chatCompletions.Content.Usage.TotalTokenCount; totalTokenCount += tokenCount; } return reply; diff --git a/dotnet/samples/AutoGen.OpenAI.Sample/Connect_To_Azure_OpenAI.cs b/dotnet/samples/AutoGen.OpenAI.Sample/Connect_To_Azure_OpenAI.cs index eec4726e4f..0cc9fe988d 100644 --- a/dotnet/samples/AutoGen.OpenAI.Sample/Connect_To_Azure_OpenAI.cs +++ b/dotnet/samples/AutoGen.OpenAI.Sample/Connect_To_Azure_OpenAI.cs @@ -1,10 +1,9 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Connect_To_Azure_OpenAI.cs +// Copyright (c) Microsoft. All rights reserved. #region using_statement +using System.ClientModel; using AutoGen.Core; using AutoGen.OpenAI.Extension; -using Azure; using Azure.AI.OpenAI; #endregion using_statement @@ -21,7 +20,7 @@ public class Connect_To_Azure_OpenAI // Use AzureOpenAIClient to connect to openai model deployed on azure. // The AzureOpenAIClient comes from Azure.AI.OpenAI package - var openAIClient = new AzureOpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey)); + var openAIClient = new AzureOpenAIClient(new Uri(endpoint), new ApiKeyCredential(apiKey)); var agent = new OpenAIChatAgent( chatClient: openAIClient.GetChatClient(model), diff --git a/dotnet/samples/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs b/dotnet/samples/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs index 964e1f3fd3..816560fb32 100644 --- a/dotnet/samples/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs +++ b/dotnet/samples/AutoGen.OpenAI.Sample/Connect_To_Ollama.cs @@ -2,6 +2,7 @@ // Connect_To_Ollama.cs #region using_statement +using System.ClientModel; using AutoGen.Core; using AutoGen.OpenAI.Extension; using OpenAI; @@ -16,7 +17,7 @@ public class Connect_To_Ollama #region create_agent // api-key is not required for local server // so you can use any string here - var openAIClient = new OpenAIClient("api-key", new OpenAIClientOptions + var openAIClient = new OpenAIClient(new ApiKeyCredential("api-key"), new OpenAIClientOptions { Endpoint = new Uri("http://localhost:11434/v1/"), // remember to add /v1/ at the end to connect to Ollama openai server }); diff --git a/dotnet/samples/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs b/dotnet/samples/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs index 2c1870cebe..05c09f49df 100644 --- a/dotnet/samples/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs +++ b/dotnet/samples/AutoGen.OpenAI.Sample/Tool_Call_With_Ollama_And_LiteLLM.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Tool_Call_With_Ollama_And_LiteLLM.cs +using System.ClientModel; using AutoGen.Core; using AutoGen.OpenAI.Extension; using OpenAI; @@ -45,7 +46,7 @@ public class Tool_Call_With_Ollama_And_LiteLLM // api-key is not required for local server // so you can use any string here - var openAIClient = new OpenAIClient("api-key", new OpenAIClientOptions + var openAIClient = new OpenAIClient(new ApiKeyCredential("api-key"), new OpenAIClientOptions { Endpoint = new Uri("http://localhost:4000"), }); diff --git a/dotnet/samples/AutoGen.OpenAI.Sample/Use_Json_Mode.cs b/dotnet/samples/AutoGen.OpenAI.Sample/Use_Json_Mode.cs index 5019dd0e33..787987e04f 100644 --- a/dotnet/samples/AutoGen.OpenAI.Sample/Use_Json_Mode.cs +++ b/dotnet/samples/AutoGen.OpenAI.Sample/Use_Json_Mode.cs @@ -25,7 +25,7 @@ public class Use_Json_Mode name: "assistant", systemMessage: "You are a helpful assistant designed to output JSON.", seed: 0, // explicitly set a seed to enable deterministic output - responseFormat: ChatResponseFormat.JsonObject) // set response format to JSON object to enable JSON mode + responseFormat: ChatResponseFormat.CreateJsonObjectFormat()) // set response format to JSON object to enable JSON mode .RegisterMessageConnector() .RegisterPrintMessage(); #endregion create_agent diff --git a/dotnet/src/AutoGen.OpenAI/Agent/OpenAIChatAgent.cs b/dotnet/src/AutoGen.OpenAI/Agent/OpenAIChatAgent.cs index c821e5907e..3bfeb38e93 100644 --- a/dotnet/src/AutoGen.OpenAI/Agent/OpenAIChatAgent.cs +++ b/dotnet/src/AutoGen.OpenAI/Agent/OpenAIChatAgent.cs @@ -44,7 +44,7 @@ public class OpenAIChatAgent : IStreamingAgent /// system message /// temperature /// max tokens to generated - /// response format, set it to to enable json mode. + /// response format, set it to to enable json mode. /// seed to use, set it to enable deterministic output /// functions public OpenAIChatAgent( @@ -138,12 +138,11 @@ public class OpenAIChatAgent : IStreamingAgent { Seed = this.options.Seed, Temperature = options?.Temperature ?? this.options.Temperature, - MaxTokens = options?.MaxToken ?? this.options.MaxTokens, + MaxOutputTokenCount = options?.MaxToken ?? this.options.MaxOutputTokenCount, ResponseFormat = this.options.ResponseFormat, FrequencyPenalty = this.options.FrequencyPenalty, - FunctionChoice = this.options.FunctionChoice, IncludeLogProbabilities = this.options.IncludeLogProbabilities, - ParallelToolCallsEnabled = this.options.ParallelToolCallsEnabled, + AllowParallelToolCalls = this.options.AllowParallelToolCalls, PresencePenalty = this.options.PresencePenalty, ToolChoice = this.options.ToolChoice, TopLogProbabilityCount = this.options.TopLogProbabilityCount, @@ -183,9 +182,9 @@ public class OpenAIChatAgent : IStreamingAgent if (options?.OutputSchema is not null) { option.ResponseFormat = ChatResponseFormat.CreateJsonSchemaFormat( - name: options.OutputSchema.GetTitle() ?? throw new ArgumentException("Output schema must have a title"), + jsonSchemaFormatName: options.OutputSchema.GetTitle() ?? throw new ArgumentException("Output schema must have a title"), jsonSchema: BinaryData.FromObjectAsJson(options.OutputSchema), - description: options.OutputSchema.GetDescription()); + jsonSchemaFormatDescription: options.OutputSchema.GetDescription()); } return option; @@ -201,7 +200,7 @@ public class OpenAIChatAgent : IStreamingAgent var options = new ChatCompletionOptions { Temperature = temperature, - MaxTokens = maxTokens, + MaxOutputTokenCount = maxTokens, Seed = seed, ResponseFormat = responseFormat, }; diff --git a/dotnet/src/AutoGen.OpenAI/AutoGen.OpenAI.csproj b/dotnet/src/AutoGen.OpenAI/AutoGen.OpenAI.csproj index f93fdd4bc5..7f00b63be8 100644 --- a/dotnet/src/AutoGen.OpenAI/AutoGen.OpenAI.csproj +++ b/dotnet/src/AutoGen.OpenAI/AutoGen.OpenAI.csproj @@ -1,7 +1,8 @@ - + $(PackageTargetFrameworks) AutoGen.OpenAI + $(NoWarn);OPENAI001 diff --git a/dotnet/src/AutoGen.OpenAI/Middleware/OpenAIChatRequestMessageConnector.cs b/dotnet/src/AutoGen.OpenAI/Middleware/OpenAIChatRequestMessageConnector.cs index 2e4ed3e8d6..b5f5e3e0eb 100644 --- a/dotnet/src/AutoGen.OpenAI/Middleware/OpenAIChatRequestMessageConnector.cs +++ b/dotnet/src/AutoGen.OpenAI/Middleware/OpenAIChatRequestMessageConnector.cs @@ -110,7 +110,7 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa { currentToolName += toolCall.FunctionName; currentToolArguments += toolCall.FunctionArgumentsUpdate; - currentToolId += toolCall.Id; + currentToolId += toolCall.ToolCallId; yield return new ToolCallMessageUpdate(currentToolName, currentToolArguments, from: agent.Name); } @@ -118,8 +118,8 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa { toolCalls.Add(new ToolCall(currentToolName, currentToolArguments) { ToolCallId = currentToolId }); currentToolName = toolCall.FunctionName; - currentToolArguments = toolCall.FunctionArgumentsUpdate; - currentToolId = toolCall.Id; + currentToolArguments = toolCall.FunctionArgumentsUpdate.ToString(); + currentToolId = toolCall.ToolCallId; currentIndex = toolCall.Index; yield return new ToolCallMessageUpdate(currentToolName, currentToolArguments, from: agent.Name); @@ -185,7 +185,7 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa // if tool calls is not empty, return ToolCallMessage if (chatCompletion.ToolCalls is { Count: > 0 }) { - var toolCalls = chatCompletion.ToolCalls.Select(tc => new ToolCall(tc.FunctionName, tc.FunctionArguments) { ToolCallId = tc.Id }); + var toolCalls = chatCompletion.ToolCalls.Select(tc => new ToolCall(tc.FunctionName, tc.FunctionArguments.ToString()) { ToolCallId = tc.Id }); return new ToolCallMessage(toolCalls, from) { Content = textContent?.Kind switch @@ -196,20 +196,6 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa }; } - // else, process function call. - // This is deprecated and will be removed in the future. - if (chatCompletion.FunctionCall is ChatFunctionCall fc) - { - return new ToolCallMessage(fc.FunctionName, fc.FunctionArguments, from) - { - Content = textContent?.Kind switch - { - _ when textContent?.Kind == ChatMessageContentPartKind.Text => textContent.Text, - _ => null, - }, - }; - } - // if the content is text, return TextMessage if (textContent?.Kind == ChatMessageContentPartKind.Text) { @@ -298,7 +284,7 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa IEnumerable items = message.Content.Select(ci => ci switch { - TextMessage text => ChatMessageContentPart.CreateTextMessageContentPart(text.Content), + TextMessage text => ChatMessageContentPart.CreateTextPart(text.Content), ImageMessage image => this.CreateChatMessageImageContentItemFromImageMessage(image), _ => throw new NotImplementedException(), }); @@ -309,8 +295,8 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa private ChatMessageContentPart CreateChatMessageImageContentItemFromImageMessage(ImageMessage message) { return message.Data is null && message.Url is not null - ? ChatMessageContentPart.CreateImageMessageContentPart(new Uri(message.Url)) - : ChatMessageContentPart.CreateImageMessageContentPart(message.Data, message.Data?.MediaType); + ? ChatMessageContentPart.CreateImagePart(new Uri(message.Url)) + : ChatMessageContentPart.CreateImagePart(message.Data, message.Data?.MediaType); } private IEnumerable ProcessToolCallMessage(IAgent agent, ToolCallMessage message) @@ -320,12 +306,26 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa throw new ArgumentException("ToolCallMessage is not supported when message.From is not the same with agent"); } - var toolCallParts = message.ToolCalls.Select((tc, i) => ChatToolCall.CreateFunctionToolCall(tc.ToolCallId ?? $"{tc.FunctionName}_{i}", tc.FunctionName, tc.FunctionArguments)); + var toolCallParts = message.ToolCalls.Select((tc, i) => ChatToolCall.CreateFunctionToolCall(tc.ToolCallId ?? $"{tc.FunctionName}_{i}", tc.FunctionName, BinaryData.FromString(tc.FunctionArguments))); var textContent = message.GetContent() ?? null; // Don't set participant name for assistant when it is tool call // fix https://github.com/microsoft/autogen/issues/3437 - var chatRequestMessage = new AssistantChatMessage(toolCallParts, textContent); + AssistantChatMessage chatRequestMessage; + + if (string.IsNullOrEmpty(textContent) is true) + { + chatRequestMessage = new AssistantChatMessage(toolCallParts); + } + else + { + chatRequestMessage = new AssistantChatMessage(textContent); + + foreach (var toolCallPart in toolCallParts) + { + chatRequestMessage.ToolCalls.Add(toolCallPart); + } + } return [chatRequestMessage]; } diff --git a/dotnet/src/AutoGen/AzureOpenAIConfig.cs b/dotnet/src/AutoGen/AzureOpenAIConfig.cs index b93a6821a8..769d56f54b 100644 --- a/dotnet/src/AutoGen/AzureOpenAIConfig.cs +++ b/dotnet/src/AutoGen/AzureOpenAIConfig.cs @@ -23,7 +23,7 @@ public class AzureOpenAIConfig : ILLMConfig internal ChatClient CreateChatClient() { - var client = new AzureOpenAIClient(new System.Uri(this.Endpoint), this.ApiKey); + var client = new AzureOpenAIClient(new System.Uri(this.Endpoint), new System.ClientModel.ApiKeyCredential(this.ApiKey)); return client.GetChatClient(DeploymentName); } diff --git a/dotnet/src/AutoGen/LMStudioConfig.cs b/dotnet/src/AutoGen/LMStudioConfig.cs index e301c0eb17..a2c74c6d2b 100644 --- a/dotnet/src/AutoGen/LMStudioConfig.cs +++ b/dotnet/src/AutoGen/LMStudioConfig.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // LMStudioConfig.cs using System; +using System.ClientModel; using OpenAI; using OpenAI.Chat; @@ -33,7 +34,7 @@ public class LMStudioConfig : ILLMConfig internal ChatClient CreateChatClient() { - var client = new OpenAIClient("api-key", new OpenAIClientOptions + var client = new OpenAIClient(new ApiKeyCredential("api-key"), new OpenAIClientOptions { Endpoint = this.Uri, }); diff --git a/dotnet/test/AutoGen.OpenAI.Tests/ApprovalTests/OpenAIMessageTests.BasicMessageTest.approved.txt b/dotnet/test/AutoGen.OpenAI.Tests/ApprovalTests/OpenAIMessageTests.BasicMessageTest.approved.txt index 55bd6502bf..5b113c3f65 100644 --- a/dotnet/test/AutoGen.OpenAI.Tests/ApprovalTests/OpenAIMessageTests.BasicMessageTest.approved.txt +++ b/dotnet/test/AutoGen.OpenAI.Tests/ApprovalTests/OpenAIMessageTests.BasicMessageTest.approved.txt @@ -7,13 +7,13 @@ "Role": "system", "Content": [ { - "Kind": {}, + "Kind": 0, "Text": "You are a helpful AI assistant", - "Refusal": null, "ImageUri": null, "ImageBytes": null, "ImageBytesMediaType": null, - "ImageDetail": null + "ImageDetailLevel": null, + "Refusal": null } ] } @@ -26,13 +26,13 @@ "Role": "user", "Content": [ { - "Kind": {}, + "Kind": 0, "Text": "Hello", - "Refusal": null, "ImageUri": null, "ImageBytes": null, "ImageBytesMediaType": null, - "ImageDetail": null + "ImageDetailLevel": null, + "Refusal": null } ], "Name": "user", @@ -52,19 +52,17 @@ "Role": "assistant", "Content": [ { - "Kind": {}, + "Kind": 0, "Text": "How can I help you?", - "Refusal": null, "ImageUri": null, "ImageBytes": null, "ImageBytesMediaType": null, - "ImageDetail": null + "ImageDetailLevel": null, + "Refusal": null } ], "Name": "assistant", - "TooCall": [], - "FunctionCallName": null, - "FunctionCallArguments": null + "TooCall": [] } ] }, @@ -75,13 +73,13 @@ "Role": "user", "Content": [ { - "Kind": {}, + "Kind": 2, "Text": null, - "Refusal": null, "ImageUri": "https://example.com/image.png", "ImageBytes": null, "ImageBytesMediaType": null, - "ImageDetail": null + "ImageDetailLevel": null, + "Refusal": null } ], "Name": "user", @@ -101,22 +99,22 @@ "Role": "user", "Content": [ { - "Kind": {}, + "Kind": 0, "Text": "Hello", - "Refusal": null, "ImageUri": null, "ImageBytes": null, "ImageBytesMediaType": null, - "ImageDetail": null + "ImageDetailLevel": null, + "Refusal": null }, { - "Kind": {}, + "Kind": 2, "Text": null, - "Refusal": null, "ImageUri": "https://example.com/image.png", "ImageBytes": null, "ImageBytesMediaType": null, - "ImageDetail": null + "ImageDetailLevel": null, + "Refusal": null } ], "Name": "user", @@ -144,12 +142,10 @@ { "Type": "Function", "Name": "test", - "Arguments": "test", + "Arguments": "dGVzdA==", "Id": "test" } - ], - "FunctionCallName": null, - "FunctionCallArguments": null + ] } ] }, @@ -189,18 +185,16 @@ { "Type": "Function", "Name": "test", - "Arguments": "test", + "Arguments": "dGVzdA==", "Id": "test_0" }, { "Type": "Function", "Name": "test", - "Arguments": "test", + "Arguments": "dGVzdA==", "Id": "test_1" } - ], - "FunctionCallName": null, - "FunctionCallArguments": null + ] } ] }, @@ -215,12 +209,10 @@ { "Type": "Function", "Name": "test", - "Arguments": "test", + "Arguments": "dGVzdA==", "Id": "test" } - ], - "FunctionCallName": null, - "FunctionCallArguments": null + ] }, { "Role": "tool", diff --git a/dotnet/test/AutoGen.OpenAI.Tests/MathClassTest.cs b/dotnet/test/AutoGen.OpenAI.Tests/MathClassTest.cs index ce3c94b8b8..5af306a2ad 100644 --- a/dotnet/test/AutoGen.OpenAI.Tests/MathClassTest.cs +++ b/dotnet/test/AutoGen.OpenAI.Tests/MathClassTest.cs @@ -2,6 +2,7 @@ // MathClassTest.cs using System; +using System.ClientModel; using System.Collections.Generic; using System.Linq; using System.Threading; @@ -102,7 +103,7 @@ teacher, please create the next math question"; var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set"); var endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set"); var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new ArgumentException("AZURE_OPENAI_DEPLOY_NAME is not set"); - var openaiClient = new AzureOpenAIClient(new Uri(endPoint), new Azure.AzureKeyCredential(key)); + var openaiClient = new AzureOpenAIClient(new Uri(endPoint), new ApiKeyCredential(key)); var teacher = await CreateTeacherAgentAsync(openaiClient, deployName); var student = await CreateStudentAssistantAgentAsync(openaiClient, deployName); diff --git a/dotnet/test/AutoGen.OpenAI.Tests/OpenAIChatAgentTest.cs b/dotnet/test/AutoGen.OpenAI.Tests/OpenAIChatAgentTest.cs index 4f947beb63..f3d51f2ff3 100644 --- a/dotnet/test/AutoGen.OpenAI.Tests/OpenAIChatAgentTest.cs +++ b/dotnet/test/AutoGen.OpenAI.Tests/OpenAIChatAgentTest.cs @@ -2,6 +2,7 @@ // OpenAIChatAgentTest.cs using System; +using System.ClientModel; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; @@ -50,7 +51,7 @@ public partial class OpenAIChatAgentTest reply.Should().BeOfType>(); reply.As>().From.Should().Be("assistant"); reply.As>().Content.Role.Should().Be(ChatMessageRole.Assistant); - reply.As>().Content.Usage.TotalTokens.Should().BeGreaterThan(0); + reply.As>().Content.Usage.TotalTokenCount.Should().BeGreaterThan(0); // test streaming var streamingReply = openAIChatAgent.GenerateStreamingReplyAsync(new[] { chatMessageContent }); @@ -239,7 +240,7 @@ public partial class OpenAIChatAgentTest var options = new ChatCompletionOptions() { Temperature = 0.7f, - MaxTokens = 1, + MaxOutputTokenCount = 1, }; var openAIChatAgent = new OpenAIChatAgent( @@ -261,7 +262,7 @@ public partial class OpenAIChatAgentTest var options = new ChatCompletionOptions() { Temperature = 0.7f, - MaxTokens = 1, + MaxOutputTokenCount = 1, }; var agentName = "assistant"; @@ -314,6 +315,6 @@ public partial class OpenAIChatAgentTest { var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable."); var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable."); - return new AzureOpenAIClient(new Uri(endpoint), new Azure.AzureKeyCredential(key)); + return new AzureOpenAIClient(new Uri(endpoint), new ApiKeyCredential(key)); } } diff --git a/dotnet/test/AutoGen.OpenAI.Tests/OpenAIMessageTests.cs b/dotnet/test/AutoGen.OpenAI.Tests/OpenAIMessageTests.cs index fa5f394c76..9fa6b5787e 100644 --- a/dotnet/test/AutoGen.OpenAI.Tests/OpenAIMessageTests.cs +++ b/dotnet/test/AutoGen.OpenAI.Tests/OpenAIMessageTests.cs @@ -286,7 +286,7 @@ public class OpenAIMessageTests var functionToolCall = (ChatToolCall)chatRequestMessage.ToolCalls.First(); functionToolCall.FunctionName.Should().Be("test"); functionToolCall.Id.Should().Be("test"); - functionToolCall.FunctionArguments.Should().Be("test"); + functionToolCall.FunctionArguments.ToString().Should().Be("test"); return await innerAgent.GenerateReplyAsync(msgs); }) .RegisterMiddleware(middleware); @@ -321,7 +321,7 @@ public class OpenAIMessageTests var functionToolCall = (ChatToolCall)chatRequestMessage.ToolCalls.ElementAt(i); functionToolCall.FunctionName.Should().Be("test"); functionToolCall.Id.Should().Be($"test_{i}"); - functionToolCall.FunctionArguments.Should().Be("test"); + functionToolCall.FunctionArguments.ToString().Should().Be("test"); } return await innerAgent.GenerateReplyAsync(msgs); }) @@ -449,7 +449,7 @@ public class OpenAIMessageTests var functionToolCall = (ChatToolCall)toolCallRequestMessage.ToolCalls.First(); functionToolCall.FunctionName.Should().Be("test"); functionToolCall.Id.Should().Be("test"); - functionToolCall.FunctionArguments.Should().Be("test"); + functionToolCall.FunctionArguments.ToString().Should().Be("test"); return await innerAgent.GenerateReplyAsync(msgs); }) .RegisterMiddleware(middleware); @@ -481,7 +481,7 @@ public class OpenAIMessageTests var functionToolCall = (ChatToolCall)toolCallRequestMessage.ToolCalls.ElementAt(i); functionToolCall.FunctionName.Should().Be("test"); functionToolCall.Id.Should().Be($"test_{i}"); - functionToolCall.FunctionArguments.Should().Be("test"); + functionToolCall.FunctionArguments.ToString().Should().Be("test"); } for (int i = 1; i < msgs.Count(); i++) @@ -630,8 +630,6 @@ public class OpenAIMessageTests _ => throw new System.NotImplementedException(), }; }), - FunctionCallName = assistantMessage.FunctionCall?.FunctionName, - FunctionCallArguments = assistantMessage.FunctionCall?.FunctionArguments, }; } diff --git a/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs b/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs index c9774adc59..80a60421d1 100644 --- a/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs +++ b/dotnet/test/AutoGen.SemanticKernel.Tests/KernelFunctionMiddlewareTests.cs @@ -1,11 +1,10 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// KernelFunctionMiddlewareTests.cs +// Copyright (c) Microsoft. All rights reserved. +using System.ClientModel; using AutoGen.Core; using AutoGen.OpenAI; using AutoGen.OpenAI.Extension; using AutoGen.Tests; -using Azure; using Azure.AI.OpenAI; using FluentAssertions; using Microsoft.SemanticKernel; @@ -22,7 +21,7 @@ public class KernelFunctionMiddlewareTests var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); var openaiClient = new AzureOpenAIClient( endpoint: new Uri(endpoint), - credential: new AzureKeyCredential(key)); + credential: new ApiKeyCredential(key)); var kernel = new Kernel(); var plugin = kernel.ImportPluginFromType(); @@ -68,7 +67,7 @@ public class KernelFunctionMiddlewareTests var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable."); var openaiClient = new AzureOpenAIClient( endpoint: new Uri(endpoint), - credential: new AzureKeyCredential(key)); + credential: new ApiKeyCredential(key)); var kernel = new Kernel(); var getWeatherMethod = kernel.CreateFunctionFromMethod((string location) => $"The weather in {location} is sunny.", functionName: "GetWeather", description: "Get the weather for a location."); diff --git a/dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs b/dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs index 0676ae2c44..4b7d93334e 100644 --- a/dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs +++ b/dotnet/test/AutoGen.WebAPI.Tests/OpenAIChatCompletionMiddlewareTests.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // OpenAIChatCompletionMiddlewareTests.cs +using System.ClientModel; using System.ClientModel.Primitives; using AutoGen.Core; using AutoGen.OpenAI; @@ -73,7 +74,7 @@ public class OpenAIChatCompletionMiddlewareTests private OpenAIClient CreateOpenAIClient(HttpClient client) { - return new OpenAIClient("api-key", new OpenAIClientOptions + return new OpenAIClient(new ApiKeyCredential("api-key"), new OpenAIClientOptions { Transport = new HttpClientPipelineTransport(client), }); diff --git a/dotnet/website/release_note/0.2.2.md b/dotnet/website/release_note/0.2.2.md new file mode 100644 index 0000000000..9881908a8d --- /dev/null +++ b/dotnet/website/release_note/0.2.2.md @@ -0,0 +1,4 @@ +# Release Notes for AutoGen.Net v0.2.2 🚀 + +## Improvements 🌟 +- **Update OpenAI and Semantick Kernel to the latest version** : Updated OpenAI and Semantick Kernel to the latest version ([#3792](https://github.com/microsoft/autogen/pull/3792) \ No newline at end of file diff --git a/dotnet/website/release_note/toc.yml b/dotnet/website/release_note/toc.yml index 133656687d..6f070c70b8 100644 --- a/dotnet/website/release_note/toc.yml +++ b/dotnet/website/release_note/toc.yml @@ -1,5 +1,8 @@ +- name: 0.2.2 + href: 0.2.2.md + - name: 0.2.1 - href: 0.2.1.md +href: 0.2.1.md - name: 0.2.0 href: 0.2.0.md