mirror of https://github.com/microsoft/autogen.git
* add lmstudio agent to assistant agent * fix #2609 * update updatelog * Update Directory.Build.props
This commit is contained in:
parent
ecc4113a7e
commit
f75103f254
|
@ -9,6 +9,7 @@
|
|||
<Nullable>enable</Nullable>
|
||||
<SignAssembly>True</SignAssembly>
|
||||
<AssemblyOriginatorKeyFile>$(MSBuildThisFileDirectory)eng/opensource.snk</AssemblyOriginatorKeyFile>
|
||||
<PublicKey>0024000004800000940000000602000000240000525341310004000001000100f1d038d0b85ae392ad72011df91e9343b0b5df1bb8080aa21b9424362d696919e0e9ac3a8bca24e283e10f7a569c6f443e1d4e3ebc84377c87ca5caa562e80f9932bf5ea91b7862b538e13b8ba91c7565cf0e8dfeccfea9c805ae3bda044170ecc7fc6f147aeeac422dd96aeb9eb1f5a5882aa650efe2958f2f8107d2038f2ab</PublicKey>
|
||||
<CSNoWarn>CS1998;CS1591</CSNoWarn>
|
||||
<NoWarn>$(NoWarn);$(CSNoWarn);NU5104</NoWarn>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
|
@ -20,4 +21,4 @@
|
|||
<PropertyGroup>
|
||||
<RepoRoot>$(MSBuildThisFileDirectory)</RepoRoot>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
</Project>
|
||||
|
|
|
@ -13,6 +13,15 @@ public class LMStudioConfig : ILLMConfig
|
|||
this.Host = host;
|
||||
this.Port = port;
|
||||
this.Version = version;
|
||||
this.Uri = new Uri($"http://{host}:{port}/v{version}");
|
||||
}
|
||||
|
||||
public LMStudioConfig(Uri uri)
|
||||
{
|
||||
this.Uri = uri;
|
||||
this.Host = uri.Host;
|
||||
this.Port = uri.Port;
|
||||
this.Version = int.Parse(uri.Segments[1].TrimStart('v'));
|
||||
}
|
||||
|
||||
public string Host { get; }
|
||||
|
@ -21,5 +30,5 @@ public class LMStudioConfig : ILLMConfig
|
|||
|
||||
public int Version { get; }
|
||||
|
||||
public Uri Uri => new Uri($"http://{Host}:{Port}/v{Version}");
|
||||
public Uri Uri { get; }
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ using System.Collections.Generic;
|
|||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using AutoGen.LMStudio;
|
||||
using AutoGen.OpenAI;
|
||||
|
||||
namespace AutoGen;
|
||||
|
@ -74,15 +75,25 @@ public class ConversableAgent : IAgent
|
|||
this.functions = llmConfig?.FunctionContracts;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// For test purpose only.
|
||||
/// </summary>
|
||||
internal IAgent? InnerAgent => this.innerAgent;
|
||||
|
||||
private IAgent? CreateInnerAgentFromConfigList(ConversableAgentConfig config)
|
||||
{
|
||||
IAgent? agent = null;
|
||||
foreach (var llmConfig in config.ConfigList ?? Enumerable.Empty<ILLMConfig>())
|
||||
{
|
||||
var nextAgent = llmConfig switch
|
||||
IAgent nextAgent = llmConfig switch
|
||||
{
|
||||
AzureOpenAIConfig azureConfig => new GPTAgent(this.Name!, this.systemMessage, azureConfig, temperature: config.Temperature ?? 0),
|
||||
OpenAIConfig openAIConfig => new GPTAgent(this.Name!, this.systemMessage, openAIConfig, temperature: config.Temperature ?? 0),
|
||||
LMStudioConfig lmStudioConfig => new LMStudioAgent(
|
||||
name: this.Name,
|
||||
config: lmStudioConfig,
|
||||
systemMessage: this.systemMessage,
|
||||
temperature: config.Temperature ?? 0),
|
||||
_ => throw new ArgumentException($"Unsupported config type {llmConfig.GetType()}"),
|
||||
};
|
||||
|
||||
|
|
|
@ -26,5 +26,9 @@
|
|||
<ProjectReference Include="..\AutoGen.Core\AutoGen.Core.csproj" />
|
||||
<ProjectReference Include="..\AutoGen.OpenAI\AutoGen.OpenAI.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<InternalsVisibleTo Include="AutoGen.Tests" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
|
|
@ -37,7 +37,7 @@ public partial class MistralClientAgentTests
|
|||
model: "open-mistral-7b")
|
||||
.RegisterMessageConnector();
|
||||
var singleAgentTest = new SingleAgentTest(_output);
|
||||
await singleAgentTest.UpperCaseTest(agent);
|
||||
await singleAgentTest.UpperCaseTestAsync(agent);
|
||||
await singleAgentTest.UpperCaseStreamingTestAsync(agent);
|
||||
}
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ using System.Collections.Generic;
|
|||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using AutoGen.LMStudio;
|
||||
using AutoGen.OpenAI;
|
||||
using Azure.AI.OpenAI;
|
||||
using FluentAssertions;
|
||||
|
@ -42,7 +43,7 @@ namespace AutoGen.Tests
|
|||
|
||||
var agent = new GPTAgent("gpt", "You are a helpful AI assistant", config);
|
||||
|
||||
await UpperCaseTest(agent);
|
||||
await UpperCaseTestAsync(agent);
|
||||
await UpperCaseStreamingTestAsync(agent);
|
||||
}
|
||||
|
||||
|
@ -117,7 +118,7 @@ namespace AutoGen.Tests
|
|||
var agentWithFunction = new GPTAgent("gpt", "You are a helpful AI assistant", config, 0, functions: new[] { this.EchoAsyncFunction });
|
||||
|
||||
await EchoFunctionCallTestAsync(agentWithFunction);
|
||||
await UpperCaseTest(agentWithFunction);
|
||||
await UpperCaseTestAsync(agentWithFunction);
|
||||
}
|
||||
|
||||
[ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")]
|
||||
|
@ -143,7 +144,43 @@ namespace AutoGen.Tests
|
|||
llmConfig: llmConfig);
|
||||
|
||||
await EchoFunctionCallTestAsync(assistantAgent);
|
||||
await UpperCaseTest(assistantAgent);
|
||||
await UpperCaseTestAsync(assistantAgent);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ItCreateAssistantAgentFromLMStudioConfigAsync()
|
||||
{
|
||||
var host = "http://localhost";
|
||||
var port = 8080;
|
||||
var lmStudioConfig = new LMStudioConfig(host, port);
|
||||
|
||||
var assistantAgent = new AssistantAgent(
|
||||
name: "assistant",
|
||||
llmConfig: new ConversableAgentConfig()
|
||||
{
|
||||
ConfigList = [lmStudioConfig],
|
||||
});
|
||||
|
||||
assistantAgent.Name.Should().Be("assistant");
|
||||
assistantAgent.InnerAgent.Should().BeOfType<LMStudioAgent>();
|
||||
}
|
||||
|
||||
[ApiKeyFact("LMStudio_ENDPOINT")]
|
||||
public async Task ItTestAssistantAgentFromLMStudioConfigAsync()
|
||||
{
|
||||
var Uri = Environment.GetEnvironmentVariable("LMStudio_ENDPOINT") ?? throw new ArgumentException("LMStudio_ENDPOINT is not set");
|
||||
var lmStudioConfig = new LMStudioConfig(new Uri(Uri));
|
||||
|
||||
var assistantAgent = new AssistantAgent(
|
||||
name: "assistant",
|
||||
llmConfig: new ConversableAgentConfig()
|
||||
{
|
||||
ConfigList = [lmStudioConfig],
|
||||
});
|
||||
|
||||
assistantAgent.Name.Should().Be("assistant");
|
||||
assistantAgent.InnerAgent.Should().BeOfType<LMStudioAgent>();
|
||||
await this.UpperCaseTestAsync(assistantAgent);
|
||||
}
|
||||
|
||||
|
||||
|
@ -186,7 +223,6 @@ namespace AutoGen.Tests
|
|||
});
|
||||
|
||||
await EchoFunctionCallExecutionTestAsync(assistantAgent);
|
||||
await UpperCaseTest(assistantAgent);
|
||||
}
|
||||
|
||||
[ApiKeyFact("AZURE_OPENAI_API_KEY", "AZURE_OPENAI_ENDPOINT")]
|
||||
|
@ -206,7 +242,7 @@ namespace AutoGen.Tests
|
|||
|
||||
await EchoFunctionCallExecutionStreamingTestAsync(agent);
|
||||
await EchoFunctionCallExecutionTestAsync(agent);
|
||||
await UpperCaseTest(agent);
|
||||
await UpperCaseTestAsync(agent);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -283,7 +319,7 @@ namespace AutoGen.Tests
|
|||
}
|
||||
}
|
||||
|
||||
public async Task UpperCaseTest(IAgent agent)
|
||||
public async Task UpperCaseTestAsync(IAgent agent)
|
||||
{
|
||||
var message = new TextMessage(Role.System, "You are a helpful AI assistant that convert user message to upper case");
|
||||
var uppCaseMessage = new TextMessage(Role.User, "abcdefg");
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
- [API Breaking Change] Update the return type of `IStreamingAgent.GenerateStreamingReplyAsync` from `Task<IAsyncEnumerable<IStreamingMessage>>` to `IAsyncEnumerable<IStreamingMessage>`
|
||||
- [API Breaking Change] Update the return type of `IStreamingMiddleware.InvokeAsync` from `Task<IAsyncEnumerable<IStreamingMessage>>` to `IAsyncEnumerable<IStreamingMessage>`
|
||||
- [API Breaking Change] Mark `RegisterReply`, `RegisterPreProcess` and `RegisterPostProcess` as obsolete. You can replace them with `RegisterMiddleware`
|
||||
- Fix [Issue 2609](https://github.com/microsoft/autogen/issues/2609)
|
||||
##### Update on 0.0.12 (2024-04-22)
|
||||
- Add AutoGen.Mistral package to support Mistral.AI models
|
||||
##### Update on 0.0.11 (2024-04-10)
|
||||
|
|
Loading…
Reference in New Issue