mirror of https://github.com/microsoft/autogen.git
[.Net] Update OpenAI, semantic kernel to latest version (#3792)
* update OpenAI, semantic kernel to latest version * fix build error * update * update release note
This commit is contained in:
parent
a6f022958f
commit
0560bdd645
|
@ -1,6 +1,5 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<Import Project="./eng/Version.props" />
|
||||
<Import Project="./eng/MetaInfo.props" />
|
||||
<Import Project="./eng/Sign.props" />
|
||||
<PropertyGroup>
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
<Project>
|
||||
<PropertyGroup>
|
||||
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
|
||||
<MicrosoftSemanticKernelVersion>1.22.0</MicrosoftSemanticKernelVersion>
|
||||
<MicrosoftSemanticKernelExperimentalVersion>1.22.0-alpha</MicrosoftSemanticKernelExperimentalVersion>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageVersion Include="AspNetCore.Authentication.ApiKey" Version="8.0.1" />
|
||||
|
@ -12,7 +14,7 @@
|
|||
<PackageVersion Include="Aspire.Hosting.Orleans" Version="8.2.0" />
|
||||
<PackageVersion Include="Aspire.Hosting.Qdrant" Version="8.2.0" />
|
||||
<PackageVersion Include="Aspire.Hosting.Redis" Version="8.2.0" />
|
||||
<PackageVersion Include="Azure.AI.OpenAI" Version=" 2.0.0-beta.3" />
|
||||
<PackageVersion Include="Azure.AI.OpenAI" Version=" 2.1.0-beta.1" />
|
||||
<PackageVersion Include="Azure.AI.Inference" Version="1.0.0-beta.1" />
|
||||
<PackageVersion Include="Azure.Data.Tables" Version="12.8.3" />
|
||||
<PackageVersion Include="Azure.Identity" Version="1.12.0" />
|
||||
|
@ -61,12 +63,12 @@
|
|||
<PackageVersion Include="Microsoft.Orleans.Server" Version="8.2.0" />
|
||||
<PackageVersion Include="Microsoft.Orleans.Streaming" Version="8.2.0" />
|
||||
<PackageVersion Include="Microsoft.Orleans.Streaming.EventHubs" Version="8.2.0" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel" Version="1.18.1-rc" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel.Agents.Core" Version="1.18.1-alpha" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel.Connectors.AzureOpenAI" Version="1.18.1-rc" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel.Connectors.Qdrant" Version="1.18.1-alpha" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel.Plugins.Memory" Version="1.18.1-alpha" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel.Plugins.Web" Version="1.18.1-alpha" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel" Version="$(MicrosoftSemanticKernelVersion)" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel.Agents.Core" Version="$(MicrosoftSemanticKernelExperimentalVersion)" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel.Connectors.AzureOpenAI" Version="$(MicrosoftSemanticKernelVersion)" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel.Connectors.Qdrant" Version="$(MicrosoftSemanticKernelExperimentalVersion)" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel.Plugins.Memory" Version="$(MicrosoftSemanticKernelExperimentalVersion)" />
|
||||
<PackageVersion Include="Microsoft.SemanticKernel.Plugins.Web" Version="$(MicrosoftSemanticKernelExperimentalVersion)" />
|
||||
<PackageVersion Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageVersion Include="Octokit" Version="13.0.1" />
|
||||
<PackageVersion Include="Octokit.Webhooks.AspNetCore" Version="2.2.2" />
|
||||
|
@ -86,7 +88,7 @@
|
|||
<PackageVersion Include="Microsoft.DotNet.Interactive.Jupyter" Version="1.0.0-beta.24229.4" />
|
||||
<PackageVersion Include="Microsoft.DotNet.Interactive.PackageManagement" Version="1.0.0-beta.24229.4" />
|
||||
<PackageVersion Include="Google.Cloud.AIPlatform.V1" Version="3.0.0" />
|
||||
<PackageVersion Include="OpenAI" Version="2.0.0-beta.10" />
|
||||
<PackageVersion Include="OpenAI" Version="2.1.0-beta.1" />
|
||||
<PackageVersion Include="System.CodeDom" Version="5.0.0" />
|
||||
<PackageVersion Include="Microsoft.CodeAnalysis.CSharp.Workspaces" Version="4.3.0" />
|
||||
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<VersionPrefix>0.2.1</VersionPrefix>
|
||||
<VersionPrefix>0.2.2</VersionPrefix>
|
||||
<Authors>AutoGen</Authors>
|
||||
<PackageProjectUrl>https://microsoft.github.io/autogen-for-net/</PackageProjectUrl>
|
||||
<RepositoryUrl>https://github.com/microsoft/autogen</RepositoryUrl>
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<AzureOpenAIV2Version>2.0.0-beta.3</AzureOpenAIV2Version>
|
||||
<SemanticKernelVersion>1.18.1-rc</SemanticKernelVersion>
|
||||
<SemanticKernelExperimentalVersion>1.18.1-alpha</SemanticKernelExperimentalVersion>
|
||||
<SystemCodeDomVersion>5.0.0</SystemCodeDomVersion>
|
||||
<MicrosoftCodeAnalysisVersion>4.3.0</MicrosoftCodeAnalysisVersion>
|
||||
<ApprovalTestVersion>6.0.0</ApprovalTestVersion>
|
||||
<FluentAssertionVersion>6.8.0</FluentAssertionVersion>
|
||||
<XUnitVersion>2.4.2</XUnitVersion>
|
||||
<MicrosoftNETTestSdkVersion>17.7.0</MicrosoftNETTestSdkVersion>
|
||||
<MicrosoftDotnetInteractive>1.0.0-beta.24229.4</MicrosoftDotnetInteractive>
|
||||
<MicrosoftSourceLinkGitHubVersion>8.0.0</MicrosoftSourceLinkGitHubVersion>
|
||||
<PowershellSDKVersion>7.4.4</PowershellSDKVersion>
|
||||
</PropertyGroup>
|
||||
</Project>
|
|
@ -2,6 +2,7 @@
|
|||
// Example08_LMStudio.cs
|
||||
|
||||
#region lmstudio_using_statements
|
||||
using System.ClientModel;
|
||||
using AutoGen.Core;
|
||||
using AutoGen.OpenAI;
|
||||
using AutoGen.OpenAI.Extension;
|
||||
|
@ -16,7 +17,7 @@ public class Example08_LMStudio
|
|||
{
|
||||
#region lmstudio_example_1
|
||||
var endpoint = "http://localhost:1234";
|
||||
var openaiClient = new OpenAIClient("api-key", new OpenAIClientOptions
|
||||
var openaiClient = new OpenAIClient(new ApiKeyCredential("api-key"), new OpenAIClientOptions
|
||||
{
|
||||
Endpoint = new Uri(endpoint),
|
||||
});
|
||||
|
|
|
@ -28,7 +28,7 @@ public class Agent_Middleware
|
|||
var reply = await innerAgent.GenerateReplyAsync(messages, option, ct);
|
||||
if (reply is MessageEnvelope<ChatCompletion> chatCompletions)
|
||||
{
|
||||
var tokenCount = chatCompletions.Content.Usage.TotalTokens;
|
||||
var tokenCount = chatCompletions.Content.Usage.TotalTokenCount;
|
||||
totalTokenCount += tokenCount;
|
||||
}
|
||||
return reply;
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Connect_To_Azure_OpenAI.cs
|
||||
// Copyright (c) Microsoft. All rights reserved.
|
||||
|
||||
#region using_statement
|
||||
using System.ClientModel;
|
||||
using AutoGen.Core;
|
||||
using AutoGen.OpenAI.Extension;
|
||||
using Azure;
|
||||
using Azure.AI.OpenAI;
|
||||
#endregion using_statement
|
||||
|
||||
|
@ -21,7 +20,7 @@ public class Connect_To_Azure_OpenAI
|
|||
|
||||
// Use AzureOpenAIClient to connect to openai model deployed on azure.
|
||||
// The AzureOpenAIClient comes from Azure.AI.OpenAI package
|
||||
var openAIClient = new AzureOpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey));
|
||||
var openAIClient = new AzureOpenAIClient(new Uri(endpoint), new ApiKeyCredential(apiKey));
|
||||
|
||||
var agent = new OpenAIChatAgent(
|
||||
chatClient: openAIClient.GetChatClient(model),
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
// Connect_To_Ollama.cs
|
||||
|
||||
#region using_statement
|
||||
using System.ClientModel;
|
||||
using AutoGen.Core;
|
||||
using AutoGen.OpenAI.Extension;
|
||||
using OpenAI;
|
||||
|
@ -16,7 +17,7 @@ public class Connect_To_Ollama
|
|||
#region create_agent
|
||||
// api-key is not required for local server
|
||||
// so you can use any string here
|
||||
var openAIClient = new OpenAIClient("api-key", new OpenAIClientOptions
|
||||
var openAIClient = new OpenAIClient(new ApiKeyCredential("api-key"), new OpenAIClientOptions
|
||||
{
|
||||
Endpoint = new Uri("http://localhost:11434/v1/"), // remember to add /v1/ at the end to connect to Ollama openai server
|
||||
});
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Tool_Call_With_Ollama_And_LiteLLM.cs
|
||||
|
||||
using System.ClientModel;
|
||||
using AutoGen.Core;
|
||||
using AutoGen.OpenAI.Extension;
|
||||
using OpenAI;
|
||||
|
@ -45,7 +46,7 @@ public class Tool_Call_With_Ollama_And_LiteLLM
|
|||
|
||||
// api-key is not required for local server
|
||||
// so you can use any string here
|
||||
var openAIClient = new OpenAIClient("api-key", new OpenAIClientOptions
|
||||
var openAIClient = new OpenAIClient(new ApiKeyCredential("api-key"), new OpenAIClientOptions
|
||||
{
|
||||
Endpoint = new Uri("http://localhost:4000"),
|
||||
});
|
||||
|
|
|
@ -25,7 +25,7 @@ public class Use_Json_Mode
|
|||
name: "assistant",
|
||||
systemMessage: "You are a helpful assistant designed to output JSON.",
|
||||
seed: 0, // explicitly set a seed to enable deterministic output
|
||||
responseFormat: ChatResponseFormat.JsonObject) // set response format to JSON object to enable JSON mode
|
||||
responseFormat: ChatResponseFormat.CreateJsonObjectFormat()) // set response format to JSON object to enable JSON mode
|
||||
.RegisterMessageConnector()
|
||||
.RegisterPrintMessage();
|
||||
#endregion create_agent
|
||||
|
|
|
@ -44,7 +44,7 @@ public class OpenAIChatAgent : IStreamingAgent
|
|||
/// <param name="systemMessage">system message</param>
|
||||
/// <param name="temperature">temperature</param>
|
||||
/// <param name="maxTokens">max tokens to generated</param>
|
||||
/// <param name="responseFormat">response format, set it to <see cref="ChatResponseFormat.JsonObject"/> to enable json mode.</param>
|
||||
/// <param name="responseFormat">response format, set it to <see cref="ChatResponseFormat"/> to enable json mode.</param>
|
||||
/// <param name="seed">seed to use, set it to enable deterministic output</param>
|
||||
/// <param name="functions">functions</param>
|
||||
public OpenAIChatAgent(
|
||||
|
@ -138,12 +138,11 @@ public class OpenAIChatAgent : IStreamingAgent
|
|||
{
|
||||
Seed = this.options.Seed,
|
||||
Temperature = options?.Temperature ?? this.options.Temperature,
|
||||
MaxTokens = options?.MaxToken ?? this.options.MaxTokens,
|
||||
MaxOutputTokenCount = options?.MaxToken ?? this.options.MaxOutputTokenCount,
|
||||
ResponseFormat = this.options.ResponseFormat,
|
||||
FrequencyPenalty = this.options.FrequencyPenalty,
|
||||
FunctionChoice = this.options.FunctionChoice,
|
||||
IncludeLogProbabilities = this.options.IncludeLogProbabilities,
|
||||
ParallelToolCallsEnabled = this.options.ParallelToolCallsEnabled,
|
||||
AllowParallelToolCalls = this.options.AllowParallelToolCalls,
|
||||
PresencePenalty = this.options.PresencePenalty,
|
||||
ToolChoice = this.options.ToolChoice,
|
||||
TopLogProbabilityCount = this.options.TopLogProbabilityCount,
|
||||
|
@ -183,9 +182,9 @@ public class OpenAIChatAgent : IStreamingAgent
|
|||
if (options?.OutputSchema is not null)
|
||||
{
|
||||
option.ResponseFormat = ChatResponseFormat.CreateJsonSchemaFormat(
|
||||
name: options.OutputSchema.GetTitle() ?? throw new ArgumentException("Output schema must have a title"),
|
||||
jsonSchemaFormatName: options.OutputSchema.GetTitle() ?? throw new ArgumentException("Output schema must have a title"),
|
||||
jsonSchema: BinaryData.FromObjectAsJson(options.OutputSchema),
|
||||
description: options.OutputSchema.GetDescription());
|
||||
jsonSchemaFormatDescription: options.OutputSchema.GetDescription());
|
||||
}
|
||||
|
||||
return option;
|
||||
|
@ -201,7 +200,7 @@ public class OpenAIChatAgent : IStreamingAgent
|
|||
var options = new ChatCompletionOptions
|
||||
{
|
||||
Temperature = temperature,
|
||||
MaxTokens = maxTokens,
|
||||
MaxOutputTokenCount = maxTokens,
|
||||
Seed = seed,
|
||||
ResponseFormat = responseFormat,
|
||||
};
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFrameworks>$(PackageTargetFrameworks)</TargetFrameworks>
|
||||
<RootNamespace>AutoGen.OpenAI</RootNamespace>
|
||||
<NoWarn>$(NoWarn);OPENAI001</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<Import Project="$(RepoRoot)/nuget/nuget-package.props" />
|
||||
|
|
|
@ -110,7 +110,7 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa
|
|||
{
|
||||
currentToolName += toolCall.FunctionName;
|
||||
currentToolArguments += toolCall.FunctionArgumentsUpdate;
|
||||
currentToolId += toolCall.Id;
|
||||
currentToolId += toolCall.ToolCallId;
|
||||
|
||||
yield return new ToolCallMessageUpdate(currentToolName, currentToolArguments, from: agent.Name);
|
||||
}
|
||||
|
@ -118,8 +118,8 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa
|
|||
{
|
||||
toolCalls.Add(new ToolCall(currentToolName, currentToolArguments) { ToolCallId = currentToolId });
|
||||
currentToolName = toolCall.FunctionName;
|
||||
currentToolArguments = toolCall.FunctionArgumentsUpdate;
|
||||
currentToolId = toolCall.Id;
|
||||
currentToolArguments = toolCall.FunctionArgumentsUpdate.ToString();
|
||||
currentToolId = toolCall.ToolCallId;
|
||||
currentIndex = toolCall.Index;
|
||||
|
||||
yield return new ToolCallMessageUpdate(currentToolName, currentToolArguments, from: agent.Name);
|
||||
|
@ -185,7 +185,7 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa
|
|||
// if tool calls is not empty, return ToolCallMessage
|
||||
if (chatCompletion.ToolCalls is { Count: > 0 })
|
||||
{
|
||||
var toolCalls = chatCompletion.ToolCalls.Select(tc => new ToolCall(tc.FunctionName, tc.FunctionArguments) { ToolCallId = tc.Id });
|
||||
var toolCalls = chatCompletion.ToolCalls.Select(tc => new ToolCall(tc.FunctionName, tc.FunctionArguments.ToString()) { ToolCallId = tc.Id });
|
||||
return new ToolCallMessage(toolCalls, from)
|
||||
{
|
||||
Content = textContent?.Kind switch
|
||||
|
@ -196,20 +196,6 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa
|
|||
};
|
||||
}
|
||||
|
||||
// else, process function call.
|
||||
// This is deprecated and will be removed in the future.
|
||||
if (chatCompletion.FunctionCall is ChatFunctionCall fc)
|
||||
{
|
||||
return new ToolCallMessage(fc.FunctionName, fc.FunctionArguments, from)
|
||||
{
|
||||
Content = textContent?.Kind switch
|
||||
{
|
||||
_ when textContent?.Kind == ChatMessageContentPartKind.Text => textContent.Text,
|
||||
_ => null,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// if the content is text, return TextMessage
|
||||
if (textContent?.Kind == ChatMessageContentPartKind.Text)
|
||||
{
|
||||
|
@ -298,7 +284,7 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa
|
|||
|
||||
IEnumerable<ChatMessageContentPart> items = message.Content.Select<IMessage, ChatMessageContentPart>(ci => ci switch
|
||||
{
|
||||
TextMessage text => ChatMessageContentPart.CreateTextMessageContentPart(text.Content),
|
||||
TextMessage text => ChatMessageContentPart.CreateTextPart(text.Content),
|
||||
ImageMessage image => this.CreateChatMessageImageContentItemFromImageMessage(image),
|
||||
_ => throw new NotImplementedException(),
|
||||
});
|
||||
|
@ -309,8 +295,8 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa
|
|||
private ChatMessageContentPart CreateChatMessageImageContentItemFromImageMessage(ImageMessage message)
|
||||
{
|
||||
return message.Data is null && message.Url is not null
|
||||
? ChatMessageContentPart.CreateImageMessageContentPart(new Uri(message.Url))
|
||||
: ChatMessageContentPart.CreateImageMessageContentPart(message.Data, message.Data?.MediaType);
|
||||
? ChatMessageContentPart.CreateImagePart(new Uri(message.Url))
|
||||
: ChatMessageContentPart.CreateImagePart(message.Data, message.Data?.MediaType);
|
||||
}
|
||||
|
||||
private IEnumerable<ChatMessage> ProcessToolCallMessage(IAgent agent, ToolCallMessage message)
|
||||
|
@ -320,12 +306,26 @@ public class OpenAIChatRequestMessageConnector : IMiddleware, IStreamingMiddlewa
|
|||
throw new ArgumentException("ToolCallMessage is not supported when message.From is not the same with agent");
|
||||
}
|
||||
|
||||
var toolCallParts = message.ToolCalls.Select((tc, i) => ChatToolCall.CreateFunctionToolCall(tc.ToolCallId ?? $"{tc.FunctionName}_{i}", tc.FunctionName, tc.FunctionArguments));
|
||||
var toolCallParts = message.ToolCalls.Select((tc, i) => ChatToolCall.CreateFunctionToolCall(tc.ToolCallId ?? $"{tc.FunctionName}_{i}", tc.FunctionName, BinaryData.FromString(tc.FunctionArguments)));
|
||||
var textContent = message.GetContent() ?? null;
|
||||
|
||||
// Don't set participant name for assistant when it is tool call
|
||||
// fix https://github.com/microsoft/autogen/issues/3437
|
||||
var chatRequestMessage = new AssistantChatMessage(toolCallParts, textContent);
|
||||
AssistantChatMessage chatRequestMessage;
|
||||
|
||||
if (string.IsNullOrEmpty(textContent) is true)
|
||||
{
|
||||
chatRequestMessage = new AssistantChatMessage(toolCallParts);
|
||||
}
|
||||
else
|
||||
{
|
||||
chatRequestMessage = new AssistantChatMessage(textContent);
|
||||
|
||||
foreach (var toolCallPart in toolCallParts)
|
||||
{
|
||||
chatRequestMessage.ToolCalls.Add(toolCallPart);
|
||||
}
|
||||
}
|
||||
|
||||
return [chatRequestMessage];
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ public class AzureOpenAIConfig : ILLMConfig
|
|||
|
||||
internal ChatClient CreateChatClient()
|
||||
{
|
||||
var client = new AzureOpenAIClient(new System.Uri(this.Endpoint), this.ApiKey);
|
||||
var client = new AzureOpenAIClient(new System.Uri(this.Endpoint), new System.ClientModel.ApiKeyCredential(this.ApiKey));
|
||||
|
||||
return client.GetChatClient(DeploymentName);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// LMStudioConfig.cs
|
||||
using System;
|
||||
using System.ClientModel;
|
||||
using OpenAI;
|
||||
using OpenAI.Chat;
|
||||
|
||||
|
@ -33,7 +34,7 @@ public class LMStudioConfig : ILLMConfig
|
|||
|
||||
internal ChatClient CreateChatClient()
|
||||
{
|
||||
var client = new OpenAIClient("api-key", new OpenAIClientOptions
|
||||
var client = new OpenAIClient(new ApiKeyCredential("api-key"), new OpenAIClientOptions
|
||||
{
|
||||
Endpoint = this.Uri,
|
||||
});
|
||||
|
|
|
@ -7,13 +7,13 @@
|
|||
"Role": "system",
|
||||
"Content": [
|
||||
{
|
||||
"Kind": {},
|
||||
"Kind": 0,
|
||||
"Text": "You are a helpful AI assistant",
|
||||
"Refusal": null,
|
||||
"ImageUri": null,
|
||||
"ImageBytes": null,
|
||||
"ImageBytesMediaType": null,
|
||||
"ImageDetail": null
|
||||
"ImageDetailLevel": null,
|
||||
"Refusal": null
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -26,13 +26,13 @@
|
|||
"Role": "user",
|
||||
"Content": [
|
||||
{
|
||||
"Kind": {},
|
||||
"Kind": 0,
|
||||
"Text": "Hello",
|
||||
"Refusal": null,
|
||||
"ImageUri": null,
|
||||
"ImageBytes": null,
|
||||
"ImageBytesMediaType": null,
|
||||
"ImageDetail": null
|
||||
"ImageDetailLevel": null,
|
||||
"Refusal": null
|
||||
}
|
||||
],
|
||||
"Name": "user",
|
||||
|
@ -52,19 +52,17 @@
|
|||
"Role": "assistant",
|
||||
"Content": [
|
||||
{
|
||||
"Kind": {},
|
||||
"Kind": 0,
|
||||
"Text": "How can I help you?",
|
||||
"Refusal": null,
|
||||
"ImageUri": null,
|
||||
"ImageBytes": null,
|
||||
"ImageBytesMediaType": null,
|
||||
"ImageDetail": null
|
||||
"ImageDetailLevel": null,
|
||||
"Refusal": null
|
||||
}
|
||||
],
|
||||
"Name": "assistant",
|
||||
"TooCall": [],
|
||||
"FunctionCallName": null,
|
||||
"FunctionCallArguments": null
|
||||
"TooCall": []
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -75,13 +73,13 @@
|
|||
"Role": "user",
|
||||
"Content": [
|
||||
{
|
||||
"Kind": {},
|
||||
"Kind": 2,
|
||||
"Text": null,
|
||||
"Refusal": null,
|
||||
"ImageUri": "https://example.com/image.png",
|
||||
"ImageBytes": null,
|
||||
"ImageBytesMediaType": null,
|
||||
"ImageDetail": null
|
||||
"ImageDetailLevel": null,
|
||||
"Refusal": null
|
||||
}
|
||||
],
|
||||
"Name": "user",
|
||||
|
@ -101,22 +99,22 @@
|
|||
"Role": "user",
|
||||
"Content": [
|
||||
{
|
||||
"Kind": {},
|
||||
"Kind": 0,
|
||||
"Text": "Hello",
|
||||
"Refusal": null,
|
||||
"ImageUri": null,
|
||||
"ImageBytes": null,
|
||||
"ImageBytesMediaType": null,
|
||||
"ImageDetail": null
|
||||
"ImageDetailLevel": null,
|
||||
"Refusal": null
|
||||
},
|
||||
{
|
||||
"Kind": {},
|
||||
"Kind": 2,
|
||||
"Text": null,
|
||||
"Refusal": null,
|
||||
"ImageUri": "https://example.com/image.png",
|
||||
"ImageBytes": null,
|
||||
"ImageBytesMediaType": null,
|
||||
"ImageDetail": null
|
||||
"ImageDetailLevel": null,
|
||||
"Refusal": null
|
||||
}
|
||||
],
|
||||
"Name": "user",
|
||||
|
@ -144,12 +142,10 @@
|
|||
{
|
||||
"Type": "Function",
|
||||
"Name": "test",
|
||||
"Arguments": "test",
|
||||
"Arguments": "dGVzdA==",
|
||||
"Id": "test"
|
||||
}
|
||||
],
|
||||
"FunctionCallName": null,
|
||||
"FunctionCallArguments": null
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -189,18 +185,16 @@
|
|||
{
|
||||
"Type": "Function",
|
||||
"Name": "test",
|
||||
"Arguments": "test",
|
||||
"Arguments": "dGVzdA==",
|
||||
"Id": "test_0"
|
||||
},
|
||||
{
|
||||
"Type": "Function",
|
||||
"Name": "test",
|
||||
"Arguments": "test",
|
||||
"Arguments": "dGVzdA==",
|
||||
"Id": "test_1"
|
||||
}
|
||||
],
|
||||
"FunctionCallName": null,
|
||||
"FunctionCallArguments": null
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -215,12 +209,10 @@
|
|||
{
|
||||
"Type": "Function",
|
||||
"Name": "test",
|
||||
"Arguments": "test",
|
||||
"Arguments": "dGVzdA==",
|
||||
"Id": "test"
|
||||
}
|
||||
],
|
||||
"FunctionCallName": null,
|
||||
"FunctionCallArguments": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"Role": "tool",
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
// MathClassTest.cs
|
||||
|
||||
using System;
|
||||
using System.ClientModel;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
|
@ -102,7 +103,7 @@ teacher, please create the next math question";
|
|||
var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new ArgumentException("AZURE_OPENAI_API_KEY is not set");
|
||||
var endPoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new ArgumentException("AZURE_OPENAI_ENDPOINT is not set");
|
||||
var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new ArgumentException("AZURE_OPENAI_DEPLOY_NAME is not set");
|
||||
var openaiClient = new AzureOpenAIClient(new Uri(endPoint), new Azure.AzureKeyCredential(key));
|
||||
var openaiClient = new AzureOpenAIClient(new Uri(endPoint), new ApiKeyCredential(key));
|
||||
var teacher = await CreateTeacherAgentAsync(openaiClient, deployName);
|
||||
var student = await CreateStudentAssistantAgentAsync(openaiClient, deployName);
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
// OpenAIChatAgentTest.cs
|
||||
|
||||
using System;
|
||||
using System.ClientModel;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
|
@ -50,7 +51,7 @@ public partial class OpenAIChatAgentTest
|
|||
reply.Should().BeOfType<MessageEnvelope<ChatCompletion>>();
|
||||
reply.As<MessageEnvelope<ChatCompletion>>().From.Should().Be("assistant");
|
||||
reply.As<MessageEnvelope<ChatCompletion>>().Content.Role.Should().Be(ChatMessageRole.Assistant);
|
||||
reply.As<MessageEnvelope<ChatCompletion>>().Content.Usage.TotalTokens.Should().BeGreaterThan(0);
|
||||
reply.As<MessageEnvelope<ChatCompletion>>().Content.Usage.TotalTokenCount.Should().BeGreaterThan(0);
|
||||
|
||||
// test streaming
|
||||
var streamingReply = openAIChatAgent.GenerateStreamingReplyAsync(new[] { chatMessageContent });
|
||||
|
@ -239,7 +240,7 @@ public partial class OpenAIChatAgentTest
|
|||
var options = new ChatCompletionOptions()
|
||||
{
|
||||
Temperature = 0.7f,
|
||||
MaxTokens = 1,
|
||||
MaxOutputTokenCount = 1,
|
||||
};
|
||||
|
||||
var openAIChatAgent = new OpenAIChatAgent(
|
||||
|
@ -261,7 +262,7 @@ public partial class OpenAIChatAgentTest
|
|||
var options = new ChatCompletionOptions()
|
||||
{
|
||||
Temperature = 0.7f,
|
||||
MaxTokens = 1,
|
||||
MaxOutputTokenCount = 1,
|
||||
};
|
||||
|
||||
var agentName = "assistant";
|
||||
|
@ -314,6 +315,6 @@ public partial class OpenAIChatAgentTest
|
|||
{
|
||||
var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new Exception("Please set AZURE_OPENAI_ENDPOINT environment variable.");
|
||||
var key = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY") ?? throw new Exception("Please set AZURE_OPENAI_API_KEY environment variable.");
|
||||
return new AzureOpenAIClient(new Uri(endpoint), new Azure.AzureKeyCredential(key));
|
||||
return new AzureOpenAIClient(new Uri(endpoint), new ApiKeyCredential(key));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -286,7 +286,7 @@ public class OpenAIMessageTests
|
|||
var functionToolCall = (ChatToolCall)chatRequestMessage.ToolCalls.First();
|
||||
functionToolCall.FunctionName.Should().Be("test");
|
||||
functionToolCall.Id.Should().Be("test");
|
||||
functionToolCall.FunctionArguments.Should().Be("test");
|
||||
functionToolCall.FunctionArguments.ToString().Should().Be("test");
|
||||
return await innerAgent.GenerateReplyAsync(msgs);
|
||||
})
|
||||
.RegisterMiddleware(middleware);
|
||||
|
@ -321,7 +321,7 @@ public class OpenAIMessageTests
|
|||
var functionToolCall = (ChatToolCall)chatRequestMessage.ToolCalls.ElementAt(i);
|
||||
functionToolCall.FunctionName.Should().Be("test");
|
||||
functionToolCall.Id.Should().Be($"test_{i}");
|
||||
functionToolCall.FunctionArguments.Should().Be("test");
|
||||
functionToolCall.FunctionArguments.ToString().Should().Be("test");
|
||||
}
|
||||
return await innerAgent.GenerateReplyAsync(msgs);
|
||||
})
|
||||
|
@ -449,7 +449,7 @@ public class OpenAIMessageTests
|
|||
var functionToolCall = (ChatToolCall)toolCallRequestMessage.ToolCalls.First();
|
||||
functionToolCall.FunctionName.Should().Be("test");
|
||||
functionToolCall.Id.Should().Be("test");
|
||||
functionToolCall.FunctionArguments.Should().Be("test");
|
||||
functionToolCall.FunctionArguments.ToString().Should().Be("test");
|
||||
return await innerAgent.GenerateReplyAsync(msgs);
|
||||
})
|
||||
.RegisterMiddleware(middleware);
|
||||
|
@ -481,7 +481,7 @@ public class OpenAIMessageTests
|
|||
var functionToolCall = (ChatToolCall)toolCallRequestMessage.ToolCalls.ElementAt(i);
|
||||
functionToolCall.FunctionName.Should().Be("test");
|
||||
functionToolCall.Id.Should().Be($"test_{i}");
|
||||
functionToolCall.FunctionArguments.Should().Be("test");
|
||||
functionToolCall.FunctionArguments.ToString().Should().Be("test");
|
||||
}
|
||||
|
||||
for (int i = 1; i < msgs.Count(); i++)
|
||||
|
@ -630,8 +630,6 @@ public class OpenAIMessageTests
|
|||
_ => throw new System.NotImplementedException(),
|
||||
};
|
||||
}),
|
||||
FunctionCallName = assistantMessage.FunctionCall?.FunctionName,
|
||||
FunctionCallArguments = assistantMessage.FunctionCall?.FunctionArguments,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// KernelFunctionMiddlewareTests.cs
|
||||
// Copyright (c) Microsoft. All rights reserved.
|
||||
|
||||
using System.ClientModel;
|
||||
using AutoGen.Core;
|
||||
using AutoGen.OpenAI;
|
||||
using AutoGen.OpenAI.Extension;
|
||||
using AutoGen.Tests;
|
||||
using Azure;
|
||||
using Azure.AI.OpenAI;
|
||||
using FluentAssertions;
|
||||
using Microsoft.SemanticKernel;
|
||||
|
@ -22,7 +21,7 @@ public class KernelFunctionMiddlewareTests
|
|||
var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable.");
|
||||
var openaiClient = new AzureOpenAIClient(
|
||||
endpoint: new Uri(endpoint),
|
||||
credential: new AzureKeyCredential(key));
|
||||
credential: new ApiKeyCredential(key));
|
||||
|
||||
var kernel = new Kernel();
|
||||
var plugin = kernel.ImportPluginFromType<TestPlugin>();
|
||||
|
@ -68,7 +67,7 @@ public class KernelFunctionMiddlewareTests
|
|||
var deployName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOY_NAME") ?? throw new Exception("Please set AZURE_OPENAI_DEPLOY_NAME environment variable.");
|
||||
var openaiClient = new AzureOpenAIClient(
|
||||
endpoint: new Uri(endpoint),
|
||||
credential: new AzureKeyCredential(key));
|
||||
credential: new ApiKeyCredential(key));
|
||||
|
||||
var kernel = new Kernel();
|
||||
var getWeatherMethod = kernel.CreateFunctionFromMethod((string location) => $"The weather in {location} is sunny.", functionName: "GetWeather", description: "Get the weather for a location.");
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// OpenAIChatCompletionMiddlewareTests.cs
|
||||
|
||||
using System.ClientModel;
|
||||
using System.ClientModel.Primitives;
|
||||
using AutoGen.Core;
|
||||
using AutoGen.OpenAI;
|
||||
|
@ -73,7 +74,7 @@ public class OpenAIChatCompletionMiddlewareTests
|
|||
|
||||
private OpenAIClient CreateOpenAIClient(HttpClient client)
|
||||
{
|
||||
return new OpenAIClient("api-key", new OpenAIClientOptions
|
||||
return new OpenAIClient(new ApiKeyCredential("api-key"), new OpenAIClientOptions
|
||||
{
|
||||
Transport = new HttpClientPipelineTransport(client),
|
||||
});
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
# Release Notes for AutoGen.Net v0.2.2 🚀
|
||||
|
||||
## Improvements 🌟
|
||||
- **Update OpenAI and Semantick Kernel to the latest version** : Updated OpenAI and Semantick Kernel to the latest version ([#3792](https://github.com/microsoft/autogen/pull/3792)
|
|
@ -1,5 +1,8 @@
|
|||
- name: 0.2.2
|
||||
href: 0.2.2.md
|
||||
|
||||
- name: 0.2.1
|
||||
href: 0.2.1.md
|
||||
href: 0.2.1.md
|
||||
|
||||
- name: 0.2.0
|
||||
href: 0.2.0.md
|
||||
|
|
Loading…
Reference in New Issue