[.Net] rename Autogen.Ollama to AutoGen.Ollama and add more test cases to AutoGen.Ollama (#2772)

* update test

* add llava test

* add more tests

* rm Autogen.Ollama

* add AutoGen.ollama

* update

* rename to temp

* remove ollama

* rename

* update

* rename

* rename

* update
This commit is contained in:
Xiaoyun Zhang 2024-05-23 12:15:25 -07:00 committed by GitHub
parent 4ebfb82186
commit edb2f92b0d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
28 changed files with 738 additions and 298 deletions

View File

@ -141,7 +141,7 @@ csharp_preserve_single_line_statements = true
csharp_preserve_single_line_blocks = true
# Code block
csharp_prefer_braces = false:none
csharp_prefer_braces = true:warning
# Using statements
csharp_using_directive_placement = outside_namespace:error
@ -173,6 +173,11 @@ dotnet_diagnostic.CS1573.severity = none
# disable CS1570: XML comment has badly formed XML
dotnet_diagnostic.CS1570.severity = none
dotnet_diagnostic.IDE0035.severity = warning # Remove unreachable code
dotnet_diagnostic.IDE0161.severity = warning # Use file-scoped namespace
csharp_style_var_elsewhere = true:suggestion # Prefer 'var' everywhere
# disable check for generated code
[*.generated.cs]
generated_code = true

View File

@ -35,12 +35,13 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Mistral.Tests", "te
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.SemanticKernel.Tests", "test\AutoGen.SemanticKernel.Tests\AutoGen.SemanticKernel.Tests.csproj", "{1DFABC4A-8458-4875-8DCB-59F3802DAC65}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AutoGen.OpenAI.Tests", "test\AutoGen.OpenAI.Tests\AutoGen.OpenAI.Tests.csproj", "{D36A85F9-C172-487D-8192-6BFE5D05B4A7}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AutoGen.DotnetInteractive.Tests", "test\AutoGen.DotnetInteractive.Tests\AutoGen.DotnetInteractive.Tests.csproj", "{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.OpenAI.Tests", "test\AutoGen.OpenAI.Tests\AutoGen.OpenAI.Tests.csproj", "{D36A85F9-C172-487D-8192-6BFE5D05B4A7}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Autogen.Ollama", "src\Autogen.Ollama\Autogen.Ollama.csproj", "{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.DotnetInteractive.Tests", "test\AutoGen.DotnetInteractive.Tests\AutoGen.DotnetInteractive.Tests.csproj", "{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Autogen.Ollama.Tests", "test\Autogen.Ollama.Tests\Autogen.Ollama.Tests.csproj", "{C24FDE63-952D-4F8E-A807-AF31D43AD675}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Ollama", "src\AutoGen.Ollama\AutoGen.Ollama.csproj", "{9F9E6DED-3D92-4970-909A-70FC11F1A665}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AutoGen.Ollama.Tests", "test\AutoGen.Ollama.Tests\AutoGen.Ollama.Tests.csproj", "{03E31CAA-3728-48D3-B936-9F11CF6C18FE}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@ -96,14 +97,6 @@ Global
{15441693-3659-4868-B6C1-B106F52FF3BA}.Debug|Any CPU.Build.0 = Debug|Any CPU
{15441693-3659-4868-B6C1-B106F52FF3BA}.Release|Any CPU.ActiveCfg = Release|Any CPU
{15441693-3659-4868-B6C1-B106F52FF3BA}.Release|Any CPU.Build.0 = Release|Any CPU
{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1}.Release|Any CPU.Build.0 = Release|Any CPU
{C24FDE63-952D-4F8E-A807-AF31D43AD675}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C24FDE63-952D-4F8E-A807-AF31D43AD675}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C24FDE63-952D-4F8E-A807-AF31D43AD675}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C24FDE63-952D-4F8E-A807-AF31D43AD675}.Release|Any CPU.Build.0 = Release|Any CPU
{1DFABC4A-8458-4875-8DCB-59F3802DAC65}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1DFABC4A-8458-4875-8DCB-59F3802DAC65}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1DFABC4A-8458-4875-8DCB-59F3802DAC65}.Release|Any CPU.ActiveCfg = Release|Any CPU
@ -116,6 +109,14 @@ Global
{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}.Debug|Any CPU.Build.0 = Debug|Any CPU
{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}.Release|Any CPU.ActiveCfg = Release|Any CPU
{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E}.Release|Any CPU.Build.0 = Release|Any CPU
{9F9E6DED-3D92-4970-909A-70FC11F1A665}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9F9E6DED-3D92-4970-909A-70FC11F1A665}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9F9E6DED-3D92-4970-909A-70FC11F1A665}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9F9E6DED-3D92-4970-909A-70FC11F1A665}.Release|Any CPU.Build.0 = Release|Any CPU
{03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{03E31CAA-3728-48D3-B936-9F11CF6C18FE}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@ -133,11 +134,11 @@ Global
{63445BB7-DBB9-4AEF-9D6F-98BBE75EE1EC} = {18BF8DD7-0585-48BF-8F97-AD333080CE06}
{6585D1A4-3D97-4D76-A688-1933B61AEB19} = {18BF8DD7-0585-48BF-8F97-AD333080CE06}
{15441693-3659-4868-B6C1-B106F52FF3BA} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{A4EFA175-44CC-44A9-B93E-1C7B6FAC38F1} = {18BF8DD7-0585-48BF-8F97-AD333080CE06}
{C24FDE63-952D-4F8E-A807-AF31D43AD675} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{1DFABC4A-8458-4875-8DCB-59F3802DAC65} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{D36A85F9-C172-487D-8192-6BFE5D05B4A7} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{B61388CA-DC73-4B7F-A7B2-7B9A86C9229E} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
{9F9E6DED-3D92-4970-909A-70FC11F1A665} = {18BF8DD7-0585-48BF-8F97-AD333080CE06}
{03E31CAA-3728-48D3-B936-9F11CF6C18FE} = {F823671B-3ECA-4AE6-86DA-25E920D3FE64}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {93384647-528D-46C8-922C-8DB36A382F0B}

View File

@ -49,7 +49,9 @@ public class ImageMessage : IMessage
public string BuildDataUri()
{
if (this.Data is null)
{
throw new NullReferenceException($"{nameof(Data)}");
}
return $"data:{this.Data.MediaType};base64,{Convert.ToBase64String(this.Data.ToArray())}";
}

View File

@ -13,7 +13,7 @@ public class ChatMessage
/// </summary>
/// <param name="role">role.</param>
/// <param name="content">content.</param>
public ChatMessage(RoleEnum? role = default(RoleEnum?), string? content = null)
public ChatMessage(RoleEnum? role = default, string? content = null)
{
this.Role = role;
this.Content = content;

View File

@ -13,7 +13,7 @@ using System.Threading;
using System.Threading.Tasks;
using AutoGen.Core;
namespace Autogen.Ollama;
namespace AutoGen.Ollama;
/// <summary>
/// An agent that can interact with ollama models.
@ -21,7 +21,6 @@ namespace Autogen.Ollama;
public class OllamaAgent : IStreamingAgent
{
private readonly HttpClient _httpClient;
public string Name { get; }
private readonly string _modelName;
private readonly string _systemMessage;
private readonly OllamaReplyOptions? _replyOptions;
@ -36,13 +35,14 @@ public class OllamaAgent : IStreamingAgent
_systemMessage = systemMessage;
_replyOptions = replyOptions;
}
public async Task<IMessage> GenerateReplyAsync(
IEnumerable<IMessage> messages, GenerateReplyOptions? options = null, CancellationToken cancellation = default)
{
ChatRequest request = await BuildChatRequest(messages, options);
request.Stream = false;
using (HttpResponseMessage? response = await _httpClient
.SendAsync(BuildRequestMessage(request), HttpCompletionOption.ResponseContentRead, cancellation))
var httpRequest = BuildRequest(request);
using (HttpResponseMessage? response = await _httpClient.SendAsync(httpRequest, HttpCompletionOption.ResponseContentRead, cancellation))
{
response.EnsureSuccessStatusCode();
Stream? streamResponse = await response.Content.ReadAsStreamAsync();
@ -52,6 +52,7 @@ public class OllamaAgent : IStreamingAgent
return output;
}
}
public async IAsyncEnumerable<IStreamingMessage> GenerateStreamingReplyAsync(
IEnumerable<IMessage> messages,
GenerateReplyOptions? options = null,
@ -59,7 +60,7 @@ public class OllamaAgent : IStreamingAgent
{
ChatRequest request = await BuildChatRequest(messages, options);
request.Stream = true;
HttpRequestMessage message = BuildRequestMessage(request);
HttpRequestMessage message = BuildRequest(request);
using (HttpResponseMessage? response = await _httpClient.SendAsync(message, HttpCompletionOption.ResponseHeadersRead, cancellationToken))
{
response.EnsureSuccessStatusCode();
@ -69,22 +70,28 @@ public class OllamaAgent : IStreamingAgent
while (!reader.EndOfStream && !cancellationToken.IsCancellationRequested)
{
string? line = await reader.ReadLineAsync();
if (string.IsNullOrWhiteSpace(line)) continue;
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
ChatResponseUpdate? update = JsonSerializer.Deserialize<ChatResponseUpdate>(line);
if (update != null)
if (update is { Done: false })
{
yield return new MessageEnvelope<ChatResponseUpdate>(update, from: Name);
}
else
{
var finalUpdate = JsonSerializer.Deserialize<ChatResponse>(line) ?? throw new Exception("Failed to deserialize response");
if (update is { Done: false }) continue;
ChatResponse? chatMessage = JsonSerializer.Deserialize<ChatResponse>(line);
if (chatMessage == null) continue;
yield return new MessageEnvelope<ChatResponse>(chatMessage, from: Name);
yield return new MessageEnvelope<ChatResponse>(finalUpdate, from: Name);
}
}
}
}
public string Name { get; }
private async Task<ChatRequest> BuildChatRequest(IEnumerable<IMessage> messages, GenerateReplyOptions? options)
{
var request = new ChatRequest
@ -152,49 +159,22 @@ public class OllamaAgent : IStreamingAgent
}
private async Task<List<Message>> BuildChatHistory(IEnumerable<IMessage> messages)
{
if (!messages.Any(m => m.IsSystemMessage()))
var history = messages.Select(m => m switch
{
var systemMessage = new TextMessage(Role.System, _systemMessage, from: Name);
messages = new[] { systemMessage }.Concat(messages);
IMessage<Message> chatMessage => chatMessage.Content,
_ => throw new ArgumentException("Invalid message type")
});
// if there's no system message in the history, add one to the beginning
if (!history.Any(m => m.Role == "system"))
{
history = new[] { new Message() { Role = "system", Value = _systemMessage } }.Concat(history);
}
var collection = new List<Message>();
foreach (IMessage? message in messages)
{
Message item;
switch (message)
{
case TextMessage tm:
item = new Message { Role = tm.Role.ToString(), Value = tm.Content };
break;
case ImageMessage im:
string base64Image = await ImageUrlToBase64(im.Url!);
item = new Message { Role = im.Role.ToString(), Images = [base64Image] };
break;
case MultiModalMessage mm:
var textsGroupedByRole = mm.Content.OfType<TextMessage>().GroupBy(tm => tm.Role)
.ToDictionary(g => g.Key, g => string.Join(Environment.NewLine, g.Select(tm => tm.Content)));
string content = string.Join($"{Environment.NewLine}", textsGroupedByRole
.Select(g => $"{g.Key}{Environment.NewLine}:{g.Value}"));
IEnumerable<Task<string>> imagesConversionTasks = mm.Content
.OfType<ImageMessage>()
.Select(async im => await ImageUrlToBase64(im.Url!));
string[]? imagesBase64 = await Task.WhenAll(imagesConversionTasks);
item = new Message { Role = mm.Role.ToString(), Value = content, Images = imagesBase64 };
break;
default:
throw new NotSupportedException();
}
collection.Add(item);
}
return collection;
return history.ToList();
}
private static HttpRequestMessage BuildRequestMessage(ChatRequest request)
private static HttpRequestMessage BuildRequest(ChatRequest request)
{
string serialized = JsonSerializer.Serialize(request);
return new HttpRequestMessage(HttpMethod.Post, OllamaConsts.ChatCompletionEndpoint)
@ -202,15 +182,4 @@ public class OllamaAgent : IStreamingAgent
Content = new StringContent(serialized, Encoding.UTF8, OllamaConsts.JsonMediaType)
};
}
private async Task<string> ImageUrlToBase64(string imageUrl)
{
if (string.IsNullOrWhiteSpace(imageUrl))
{
throw new ArgumentException("required parameter", nameof(imageUrl));
}
byte[] imageBytes = await _httpClient.GetByteArrayAsync(imageUrl);
return imageBytes != null
? Convert.ToBase64String(imageBytes)
: throw new InvalidOperationException("no image byte array");
}
}

View File

@ -2,6 +2,7 @@
<PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework>
<RootNamespace>AutoGen.Ollama</RootNamespace>
<GenerateDocumentationFile>True</GenerateDocumentationFile>
</PropertyGroup>

View File

@ -1,11 +1,10 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// ChatRequest.cs
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace Autogen.Ollama;
namespace AutoGen.Ollama;
public class ChatRequest
{
@ -19,7 +18,7 @@ public class ChatRequest
/// the messages of the chat, this can be used to keep a chat memory
/// </summary>
[JsonPropertyName("messages")]
public IList<Message> Messages { get; set; } = Array.Empty<Message>();
public IList<Message> Messages { get; set; } = [];
/// <summary>
/// the format to return a response in. Currently, the only accepted value is json

View File

@ -3,7 +3,7 @@
using System.Text.Json.Serialization;
namespace Autogen.Ollama;
namespace AutoGen.Ollama;
public class ChatResponse : ChatResponseUpdate
{

View File

@ -0,0 +1,21 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// ChatResponseUpdate.cs
using System.Text.Json.Serialization;
namespace AutoGen.Ollama;
public class ChatResponseUpdate
{
[JsonPropertyName("model")]
public string Model { get; set; } = string.Empty;
[JsonPropertyName("created_at")]
public string CreatedAt { get; set; } = string.Empty;
[JsonPropertyName("message")]
public Message? Message { get; set; }
[JsonPropertyName("done")]
public bool Done { get; set; }
}

View File

@ -4,25 +4,20 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace Autogen.Ollama;
public class ChatResponseUpdate
{
[JsonPropertyName("model")]
public string Model { get; set; } = string.Empty;
[JsonPropertyName("created_at")]
public string CreatedAt { get; set; } = string.Empty;
[JsonPropertyName("message")]
public Message? Message { get; set; }
[JsonPropertyName("done")]
public bool Done { get; set; }
}
namespace AutoGen.Ollama;
public class Message
{
public Message()
{
}
public Message(string role, string value)
{
Role = role;
Value = value;
}
/// <summary>
/// the role of the message, either system, user or assistant
/// </summary>

View File

@ -3,7 +3,7 @@
using System.Text.Json.Serialization;
namespace Autogen.Ollama;
namespace AutoGen.Ollama;
//https://github.com/ollama/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values
public class ModelReplyOptions

View File

@ -3,12 +3,12 @@
using AutoGen.Core;
namespace Autogen.Ollama;
namespace AutoGen.Ollama;
public enum FormatType
{
None,
Json
Json,
}
public class OllamaReplyOptions : GenerateReplyOptions

View File

@ -4,7 +4,7 @@
using System.Threading;
using System.Threading.Tasks;
namespace Autogen.Ollama;
namespace AutoGen.Ollama;
public interface ITextEmbeddingService
{

View File

@ -9,7 +9,7 @@ using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace Autogen.Ollama;
namespace AutoGen.Ollama;
public class OllamaTextEmbeddingService : ITextEmbeddingService
{

View File

@ -3,7 +3,7 @@
using System.Text.Json.Serialization;
namespace Autogen.Ollama;
namespace AutoGen.Ollama;
public class TextEmbeddingsRequest
{

View File

@ -3,7 +3,7 @@
using System.Text.Json.Serialization;
namespace Autogen.Ollama;
namespace AutoGen.Ollama;
public class TextEmbeddingsResponse
{

View File

@ -0,0 +1,39 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// OllamaAgentExtension.cs
using AutoGen.Core;
namespace AutoGen.Ollama.Extension;
public static class OllamaAgentExtension
{
/// <summary>
/// Register an <see cref="OllamaMessageConnector"/> to the <see cref="OllamaAgent"/>
/// </summary>
/// <param name="connector">the connector to use. If null, a new instance of <see cref="OllamaMessageConnector"/> will be created.</param>
public static MiddlewareStreamingAgent<OllamaAgent> RegisterMessageConnector(
this OllamaAgent agent, OllamaMessageConnector? connector = null)
{
if (connector == null)
{
connector = new OllamaMessageConnector();
}
return agent.RegisterStreamingMiddleware(connector);
}
/// <summary>
/// Register an <see cref="OllamaMessageConnector"/> to the <see cref="MiddlewareAgent{T}"/> where T is <see cref="OllamaAgent"/>
/// </summary>
/// <param name="connector">the connector to use. If null, a new instance of <see cref="OllamaMessageConnector"/> will be created.</param>
public static MiddlewareStreamingAgent<OllamaAgent> RegisterMessageConnector(
this MiddlewareStreamingAgent<OllamaAgent> agent, OllamaMessageConnector? connector = null)
{
if (connector == null)
{
connector = new OllamaMessageConnector();
}
return agent.RegisterStreamingMiddleware(connector);
}
}

View File

@ -0,0 +1,183 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// OllamaMessageConnector.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using AutoGen.Core;
namespace AutoGen.Ollama;
public class OllamaMessageConnector : IStreamingMiddleware
{
public string Name => nameof(OllamaMessageConnector);
public async Task<IMessage> InvokeAsync(MiddlewareContext context, IAgent agent,
CancellationToken cancellationToken = default)
{
var messages = ProcessMessage(context.Messages, agent);
IMessage reply = await agent.GenerateReplyAsync(messages, context.Options, cancellationToken);
return reply switch
{
IMessage<ChatResponse> messageEnvelope when messageEnvelope.Content.Message?.Value is string content => new TextMessage(Role.Assistant, content, messageEnvelope.From),
IMessage<ChatResponse> messageEnvelope when messageEnvelope.Content.Message?.Value is null => throw new InvalidOperationException("Message content is null"),
_ => reply
};
}
public async IAsyncEnumerable<IStreamingMessage> InvokeAsync(MiddlewareContext context, IStreamingAgent agent,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var messages = ProcessMessage(context.Messages, agent);
var chunks = new List<ChatResponseUpdate>();
await foreach (var update in agent.GenerateStreamingReplyAsync(messages, context.Options, cancellationToken))
{
if (update is IStreamingMessage<ChatResponseUpdate> chatResponseUpdate)
{
var response = chatResponseUpdate.Content switch
{
_ when chatResponseUpdate.Content.Message?.Value is string content => new TextMessageUpdate(Role.Assistant, content, chatResponseUpdate.From),
_ => null,
};
if (response != null)
{
chunks.Add(chatResponseUpdate.Content);
yield return response;
}
}
else
{
yield return update;
}
}
if (chunks.Count == 0)
{
yield break;
}
// if the chunks are not empty, aggregate them into a single message
var messageContent = string.Join(string.Empty, chunks.Select(c => c.Message?.Value));
var message = new Message
{
Role = "assistant",
Value = messageContent,
};
yield return MessageEnvelope.Create(message, agent.Name);
}
private IEnumerable<IMessage> ProcessMessage(IEnumerable<IMessage> messages, IAgent agent)
{
return messages.SelectMany(m =>
{
if (m is IMessage<Message> messageEnvelope)
{
return [m];
}
else
{
return m switch
{
TextMessage textMessage => ProcessTextMessage(textMessage, agent),
ImageMessage imageMessage => ProcessImageMessage(imageMessage, agent),
MultiModalMessage multiModalMessage => ProcessMultiModalMessage(multiModalMessage, agent),
_ => [m],
};
}
});
}
private IEnumerable<IMessage> ProcessMultiModalMessage(MultiModalMessage multiModalMessage, IAgent agent)
{
var messages = new List<IMessage>();
foreach (var message in multiModalMessage.Content)
{
messages.AddRange(message switch
{
TextMessage textMessage => ProcessTextMessage(textMessage, agent),
ImageMessage imageMessage => ProcessImageMessage(imageMessage, agent),
_ => throw new InvalidOperationException("Invalid message type"),
});
}
return messages;
}
private IEnumerable<IMessage> ProcessImageMessage(ImageMessage imageMessage, IAgent agent)
{
byte[]? data = imageMessage.Data?.ToArray();
if (data is null)
{
if (imageMessage.Url is null)
{
throw new InvalidOperationException("Invalid ImageMessage, the data or url must be provided");
}
var uri = new Uri(imageMessage.Url);
// download the image from the URL
using var client = new HttpClient();
var response = client.GetAsync(uri).Result;
if (!response.IsSuccessStatusCode)
{
throw new HttpRequestException($"Failed to download the image from {uri}");
}
data = response.Content.ReadAsByteArrayAsync().Result;
}
var base64Image = Convert.ToBase64String(data);
var message = imageMessage.From switch
{
null when imageMessage.Role == Role.User => new Message { Role = "user", Images = [base64Image] },
null => throw new InvalidOperationException("Invalid Role, the role must be user"),
_ when imageMessage.From != agent.Name => new Message { Role = "user", Images = [base64Image] },
_ => throw new InvalidOperationException("The from field must be null or the agent name"),
};
return [MessageEnvelope.Create(message, agent.Name)];
}
private IEnumerable<IMessage> ProcessTextMessage(TextMessage textMessage, IAgent agent)
{
if (textMessage.Role == Role.System)
{
var message = new Message
{
Role = "system",
Value = textMessage.Content
};
return [MessageEnvelope.Create(message, agent.Name)];
}
else if (textMessage.From == agent.Name)
{
var message = new Message
{
Role = "assistant",
Value = textMessage.Content
};
return [MessageEnvelope.Create(message, agent.Name)];
}
else
{
var message = textMessage.From switch
{
null when textMessage.Role == Role.User => new Message { Role = "user", Value = textMessage.Content },
null when textMessage.Role == Role.Assistant => new Message { Role = "assistant", Value = textMessage.Content },
null => throw new InvalidOperationException("Invalid Role"),
_ when textMessage.From != agent.Name => new Message { Role = "user", Value = textMessage.Content },
_ => throw new InvalidOperationException("The from field must be null or the agent name"),
};
return [MessageEnvelope.Create(message, agent.Name)];
}
}
}

View File

@ -1,7 +1,7 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// OllamaConsts.cs
namespace Autogen.Ollama;
namespace AutoGen.Ollama;
public class OllamaConsts
{

View File

@ -1,56 +0,0 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// OllamaMessageConnector.cs
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using AutoGen.Core;
namespace Autogen.Ollama;
public class OllamaMessageConnector : IMiddleware, IStreamingMiddleware
{
public string Name => nameof(OllamaMessageConnector);
public async Task<IMessage> InvokeAsync(MiddlewareContext context, IAgent agent,
CancellationToken cancellationToken = default)
{
IEnumerable<IMessage> messages = context.Messages;
IMessage reply = await agent.GenerateReplyAsync(messages, context.Options, cancellationToken);
switch (reply)
{
case IMessage<ChatResponse> messageEnvelope:
Message? message = messageEnvelope.Content.Message;
return new TextMessage(Role.Assistant, message != null ? message.Value : "EMPTY_CONTENT", messageEnvelope.From);
default:
throw new NotSupportedException();
}
}
public async IAsyncEnumerable<IStreamingMessage> InvokeAsync(MiddlewareContext context, IStreamingAgent agent,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
await foreach (IStreamingMessage? update in agent.GenerateStreamingReplyAsync(context.Messages, context.Options, cancellationToken))
{
switch (update)
{
case IMessage<ChatResponse> complete:
{
string? textContent = complete.Content.Message?.Value;
yield return new TextMessage(Role.Assistant, textContent!, complete.From);
break;
}
case IMessage<ChatResponseUpdate> updatedMessage:
{
string? textContent = updatedMessage.Content.Message?.Value;
yield return new TextMessageUpdate(Role.Assistant, textContent, updatedMessage.From);
break;
}
default:
throw new InvalidOperationException("Message type not supported.");
}
}
}
}

View File

@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>$(TestTargetFramework)</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<IsPackable>false</IsPackable>
<GenerateDocumentationFile>True</GenerateDocumentationFile>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="ApprovalTests" Version="$(ApprovalTestVersion)" />
<PackageReference Include="FluentAssertions" Version="$(FluentAssertionVersion)" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="$(MicrosoftNETTestSdkVersion)" />
<PackageReference Include="xunit" Version="$(XUnitVersion)" />
<PackageReference Include="xunit.runner.console" Version="$(XUnitVersion)" />
<PackageReference Include="xunit.runner.visualstudio" Version="$(XUnitVersion)" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\src\AutoGen.Ollama\AutoGen.Ollama.csproj" />
<ProjectReference Include="..\AutoGen.Tests\AutoGen.Tests.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="images\image.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="images\square.png">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@ -0,0 +1,224 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// OllamaAgentTests.cs
using System.Text.Json;
using AutoGen.Core;
using AutoGen.Ollama.Extension;
using AutoGen.Tests;
using FluentAssertions;
namespace AutoGen.Ollama.Tests;
public class OllamaAgentTests
{
[ApiKeyFact("OLLAMA_HOST", "OLLAMA_MODEL_NAME")]
public async Task GenerateReplyAsync_ReturnsValidMessage_WhenCalled()
{
string host = Environment.GetEnvironmentVariable("OLLAMA_HOST")
?? throw new InvalidOperationException("OLLAMA_HOST is not set.");
string modelName = Environment.GetEnvironmentVariable("OLLAMA_MODEL_NAME")
?? throw new InvalidOperationException("OLLAMA_MODEL_NAME is not set.");
OllamaAgent ollamaAgent = BuildOllamaAgent(host, modelName);
var message = new Message("user", "hey how are you");
var messages = new IMessage[] { MessageEnvelope.Create(message, from: modelName) };
IMessage result = await ollamaAgent.GenerateReplyAsync(messages);
result.Should().NotBeNull();
result.Should().BeOfType<MessageEnvelope<ChatResponse>>();
result.From.Should().Be(ollamaAgent.Name);
}
[ApiKeyFact("OLLAMA_HOST", "OLLAMA_MODEL_NAME")]
public async Task GenerateReplyAsync_ReturnsValidJsonMessageContent_WhenCalled()
{
string host = Environment.GetEnvironmentVariable("OLLAMA_HOST")
?? throw new InvalidOperationException("OLLAMA_HOST is not set.");
string modelName = Environment.GetEnvironmentVariable("OLLAMA_MODEL_NAME")
?? throw new InvalidOperationException("OLLAMA_MODEL_NAME is not set.");
OllamaAgent ollamaAgent = BuildOllamaAgent(host, modelName);
var message = new Message("user", "What color is the sky at different times of the day? Respond using JSON");
var messages = new IMessage[] { MessageEnvelope.Create(message, from: modelName) };
IMessage result = await ollamaAgent.GenerateReplyAsync(messages, new OllamaReplyOptions
{
Format = FormatType.Json
});
result.Should().NotBeNull();
result.Should().BeOfType<MessageEnvelope<ChatResponse>>();
result.From.Should().Be(ollamaAgent.Name);
string jsonContent = ((MessageEnvelope<ChatResponse>)result).Content.Message!.Value;
bool isValidJson = IsValidJsonMessage(jsonContent);
isValidJson.Should().BeTrue();
}
[ApiKeyFact("OLLAMA_HOST", "OLLAMA_MODEL_NAME")]
public async Task GenerateStreamingReplyAsync_ReturnsValidMessages_WhenCalled()
{
string host = Environment.GetEnvironmentVariable("OLLAMA_HOST")
?? throw new InvalidOperationException("OLLAMA_HOST is not set.");
string modelName = Environment.GetEnvironmentVariable("OLLAMA_MODEL_NAME")
?? throw new InvalidOperationException("OLLAMA_MODEL_NAME is not set.");
OllamaAgent ollamaAgent = BuildOllamaAgent(host, modelName);
var msg = new Message("user", "hey how are you");
var messages = new IMessage[] { MessageEnvelope.Create(msg, from: modelName) };
IStreamingMessage? finalReply = default;
await foreach (IStreamingMessage message in ollamaAgent.GenerateStreamingReplyAsync(messages))
{
message.Should().NotBeNull();
message.From.Should().Be(ollamaAgent.Name);
var streamingMessage = (IMessage<ChatResponseUpdate>)message;
if (streamingMessage.Content.Done)
{
finalReply = message;
break;
}
else
{
streamingMessage.Content.Message.Should().NotBeNull();
streamingMessage.Content.Done.Should().BeFalse();
}
}
finalReply.Should().BeOfType<MessageEnvelope<ChatResponse>>();
var update = ((MessageEnvelope<ChatResponse>)finalReply!).Content;
update.Done.Should().BeTrue();
update.TotalDuration.Should().BeGreaterThan(0);
}
[ApiKeyFact("OLLAMA_HOST")]
public async Task ItReturnValidMessageUsingLLavaAsync()
{
var host = Environment.GetEnvironmentVariable("OLLAMA_HOST")
?? throw new InvalidOperationException("OLLAMA_HOST is not set.");
var modelName = "llava:latest";
var ollamaAgent = BuildOllamaAgent(host, modelName);
var imagePath = Path.Combine("images", "image.png");
var base64Image = Convert.ToBase64String(File.ReadAllBytes(imagePath));
var message = new Message()
{
Role = "user",
Value = "What's the color of the background in this image",
Images = [base64Image],
};
var messages = new IMessage[] { MessageEnvelope.Create(message, from: modelName) };
var reply = await ollamaAgent.GenerateReplyAsync(messages);
reply.Should().BeOfType<MessageEnvelope<ChatResponse>>();
var chatResponse = ((MessageEnvelope<ChatResponse>)reply).Content;
chatResponse.Message.Should().NotBeNull();
}
[ApiKeyFact("OLLAMA_HOST")]
public async Task ItCanProcessMultiModalMessageUsingLLavaAsync()
{
var host = Environment.GetEnvironmentVariable("OLLAMA_HOST")
?? throw new InvalidOperationException("OLLAMA_HOST is not set.");
var modelName = "llava:latest";
var ollamaAgent = BuildOllamaAgent(host, modelName)
.RegisterMessageConnector();
var image = Path.Combine("images", "image.png");
var binaryData = BinaryData.FromBytes(File.ReadAllBytes(image), "image/png");
var imageMessage = new ImageMessage(Role.User, binaryData);
var textMessage = new TextMessage(Role.User, "What's in this image?");
var multiModalMessage = new MultiModalMessage(Role.User, [textMessage, imageMessage]);
var reply = await ollamaAgent.SendAsync(multiModalMessage);
reply.Should().BeOfType<TextMessage>();
reply.GetRole().Should().Be(Role.Assistant);
reply.GetContent().Should().NotBeNullOrEmpty();
reply.From.Should().Be(ollamaAgent.Name);
}
[ApiKeyFact("OLLAMA_HOST")]
public async Task ItCanProcessImageMessageUsingLLavaAsync()
{
var host = Environment.GetEnvironmentVariable("OLLAMA_HOST")
?? throw new InvalidOperationException("OLLAMA_HOST is not set.");
var modelName = "llava:latest";
var ollamaAgent = BuildOllamaAgent(host, modelName)
.RegisterMessageConnector();
var image = Path.Combine("images", "image.png");
var binaryData = BinaryData.FromBytes(File.ReadAllBytes(image), "image/png");
var imageMessage = new ImageMessage(Role.User, binaryData);
var reply = await ollamaAgent.SendAsync(imageMessage);
reply.Should().BeOfType<TextMessage>();
reply.GetRole().Should().Be(Role.Assistant);
reply.GetContent().Should().NotBeNullOrEmpty();
reply.From.Should().Be(ollamaAgent.Name);
}
[ApiKeyFact("OLLAMA_HOST")]
public async Task ItReturnValidStreamingMessageUsingLLavaAsync()
{
var host = Environment.GetEnvironmentVariable("OLLAMA_HOST")
?? throw new InvalidOperationException("OLLAMA_HOST is not set.");
var modelName = "llava:latest";
var ollamaAgent = BuildOllamaAgent(host, modelName);
var squareImagePath = Path.Combine("images", "square.png");
var base64Image = Convert.ToBase64String(File.ReadAllBytes(squareImagePath));
var imageMessage = new Message()
{
Role = "user",
Value = "What's in this image?",
Images = [base64Image],
};
var messages = new IMessage[] { MessageEnvelope.Create(imageMessage, from: modelName) };
IStreamingMessage? finalReply = default;
await foreach (IStreamingMessage message in ollamaAgent.GenerateStreamingReplyAsync(messages))
{
message.Should().NotBeNull();
message.From.Should().Be(ollamaAgent.Name);
var streamingMessage = (IMessage<ChatResponseUpdate>)message;
if (streamingMessage.Content.Done)
{
finalReply = message;
break;
}
else
{
streamingMessage.Content.Message.Should().NotBeNull();
streamingMessage.Content.Done.Should().BeFalse();
}
}
finalReply.Should().BeOfType<MessageEnvelope<ChatResponse>>();
var update = ((MessageEnvelope<ChatResponse>)finalReply!).Content;
update.Done.Should().BeTrue();
update.TotalDuration.Should().BeGreaterThan(0);
}
private static bool IsValidJsonMessage(string input)
{
try
{
JsonDocument.Parse(input);
return true;
}
catch (JsonException)
{
return false;
}
catch (Exception ex)
{
Console.WriteLine("An unexpected exception occurred: " + ex.Message);
return false;
}
}
private static OllamaAgent BuildOllamaAgent(string host, string modelName)
{
var httpClient = new HttpClient
{
BaseAddress = new Uri(host)
};
return new OllamaAgent(httpClient, "TestAgent", modelName);
}
}

View File

@ -0,0 +1,153 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// OllamaMessageTests.cs
using AutoGen.Core;
using AutoGen.Ollama;
using AutoGen.Tests;
using FluentAssertions;
using Xunit;
using Message = AutoGen.Ollama.Message;
namespace Autogen.Ollama.Tests;
public class OllamaMessageTests
{
[Fact]
public async Task ItProcessUserTextMessageAsync()
{
var messageConnector = new OllamaMessageConnector();
var agent = new EchoAgent("assistant")
.RegisterMiddleware(async (msgs, _, innerAgent, ct) =>
{
msgs.Count().Should().Be(1);
var innerMessage = msgs.First();
innerMessage.Should().BeOfType<MessageEnvelope<Message>>();
var message = (IMessage<Message>)innerMessage;
message.Content.Value.Should().Be("Hello");
message.Content.Images.Should().BeNullOrEmpty();
message.Content.Role.Should().Be("user");
return await innerAgent.GenerateReplyAsync(msgs);
})
.RegisterMiddleware(messageConnector);
// when from is null and role is user
await agent.SendAsync("Hello");
// when from is user and role is user
var userMessage = new TextMessage(Role.User, "Hello", from: "user");
await agent.SendAsync(userMessage);
// when from is user but role is assistant
userMessage = new TextMessage(Role.Assistant, "Hello", from: "user");
await agent.SendAsync(userMessage);
}
[Fact]
public async Task ItProcessAssistantTextMessageAsync()
{
var messageConnector = new OllamaMessageConnector();
var agent = new EchoAgent("assistant")
.RegisterMiddleware(async (msgs, _, innerAgent, ct) =>
{
msgs.Count().Should().Be(1);
var innerMessage = msgs.First();
innerMessage.Should().BeOfType<MessageEnvelope<Message>>();
var message = (IMessage<Message>)innerMessage;
message.Content.Value.Should().Be("Hello");
message.Content.Images.Should().BeNullOrEmpty();
message.Content.Role.Should().Be("assistant");
return await innerAgent.GenerateReplyAsync(msgs);
})
.RegisterMiddleware(messageConnector);
// when from is null and role is assistant
var assistantMessage = new TextMessage(Role.Assistant, "Hello");
await agent.SendAsync(assistantMessage);
// when from is assistant and role is assistant
assistantMessage = new TextMessage(Role.Assistant, "Hello", from: "assistant");
await agent.SendAsync(assistantMessage);
// when from is assistant but role is user
assistantMessage = new TextMessage(Role.User, "Hello", from: "assistant");
await agent.SendAsync(assistantMessage);
}
[Fact]
public async Task ItProcessSystemTextMessageAsync()
{
var messageConnector = new OllamaMessageConnector();
var agent = new EchoAgent("assistant")
.RegisterMiddleware(async (msgs, _, innerAgent, ct) =>
{
msgs.Count().Should().Be(1);
var innerMessage = msgs.First();
innerMessage.Should().BeOfType<MessageEnvelope<Message>>();
var message = (IMessage<Message>)innerMessage;
message.Content.Value.Should().Be("Hello");
message.Content.Images.Should().BeNullOrEmpty();
message.Content.Role.Should().Be("system");
return await innerAgent.GenerateReplyAsync(msgs);
})
.RegisterMiddleware(messageConnector);
// when role is system
var systemMessage = new TextMessage(Role.System, "Hello");
await agent.SendAsync(systemMessage);
}
[Fact]
public async Task ItProcessImageMessageAsync()
{
var messageConnector = new OllamaMessageConnector();
var agent = new EchoAgent("assistant")
.RegisterMiddleware(async (msgs, _, innerAgent, ct) =>
{
msgs.Count().Should().Be(1);
var innerMessage = msgs.First();
innerMessage.Should().BeOfType<MessageEnvelope<Message>>();
var message = (IMessage<Message>)innerMessage;
message.Content.Images!.Count.Should().Be(1);
message.Content.Role.Should().Be("user");
return await innerAgent.GenerateReplyAsync(msgs);
})
.RegisterMiddleware(messageConnector);
var square = Path.Combine("images", "square.png");
BinaryData imageBinaryData = BinaryData.FromBytes(File.ReadAllBytes(square), "image/png");
var imageMessage = new ImageMessage(Role.User, imageBinaryData);
await agent.SendAsync(imageMessage);
}
[Fact]
public async Task ItProcessMultiModalMessageAsync()
{
var messageConnector = new OllamaMessageConnector();
var agent = new EchoAgent("assistant")
.RegisterMiddleware(async (msgs, _, innerAgent, ct) =>
{
msgs.Count().Should().Be(2);
var textMessage = msgs.First();
textMessage.Should().BeOfType<MessageEnvelope<Message>>();
var message = (IMessage<Message>)textMessage;
message.Content.Role.Should().Be("user");
var imageMessage = msgs.Last();
imageMessage.Should().BeOfType<MessageEnvelope<Message>>();
message = (IMessage<Message>)imageMessage;
message.Content.Role.Should().Be("user");
message.Content.Images!.Count.Should().Be(1);
return await innerAgent.GenerateReplyAsync(msgs);
})
.RegisterMiddleware(messageConnector);
var square = Path.Combine("images", "square.png");
BinaryData imageBinaryData = BinaryData.FromBytes(File.ReadAllBytes(square), "image/png");
var imageMessage = new ImageMessage(Role.User, imageBinaryData);
var textMessage = new TextMessage(Role.User, "Hello");
var multiModalMessage = new MultiModalMessage(Role.User, [textMessage, imageMessage]);
await agent.SendAsync(multiModalMessage);
}
}

View File

@ -4,7 +4,7 @@
using AutoGen.Tests;
using FluentAssertions;
namespace Autogen.Ollama.Tests;
namespace AutoGen.Ollama.Tests;
public class OllamaTextEmbeddingServiceTests
{

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:300b7c9d6ba0c23a3e52fbd2e268141ddcca0434a9fb9dcf7e58e7e903d36dcf
size 2126185

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:8323d0b8eceb752e14c29543b2e28bb2fc648ed9719095c31b7708867a4dc918
size 491

View File

@ -1,33 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<GenerateDocumentationFile>True</GenerateDocumentationFile>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.8.0" />
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.7.0"/>
<PackageReference Include="xunit" Version="2.4.2"/>
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.5">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" Version="6.0.0">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\src\Autogen.Ollama\Autogen.Ollama.csproj" />
<ProjectReference Include="..\AutoGen.Tests\AutoGen.Tests.csproj" />
</ItemGroup>
</Project>

View File

@ -1,102 +0,0 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// OllamaAgentTests.cs
using System.Text.Json;
using AutoGen.Core;
using AutoGen.Tests;
using FluentAssertions;
namespace Autogen.Ollama.Tests;
public class OllamaAgentTests
{
[ApiKeyFact("OLLAMA_HOST", "OLLAMA_MODEL_NAME")]
public async Task GenerateReplyAsync_ReturnsValidMessage_WhenCalled()
{
string host = Environment.GetEnvironmentVariable("OLLAMA_HOST")
?? throw new InvalidOperationException("OLLAMA_HOST is not set.");
string modelName = Environment.GetEnvironmentVariable("OLLAMA_MODEL_NAME")
?? throw new InvalidOperationException("OLLAMA_MODEL_NAME is not set.");
OllamaAgent ollamaAgent = BuildOllamaAgent(host, modelName);
var messages = new IMessage[] { new TextMessage(Role.User, "Hello, how are you") };
IMessage result = await ollamaAgent.GenerateReplyAsync(messages);
result.Should().NotBeNull();
result.Should().BeOfType<MessageEnvelope<ChatResponse>>();
result.From.Should().Be(ollamaAgent.Name);
}
[ApiKeyFact("OLLAMA_HOST", "OLLAMA_MODEL_NAME")]
public async Task GenerateReplyAsync_ReturnsValidJsonMessageContent_WhenCalled()
{
string host = Environment.GetEnvironmentVariable("OLLAMA_HOST")
?? throw new InvalidOperationException("OLLAMA_HOST is not set.");
string modelName = Environment.GetEnvironmentVariable("OLLAMA_MODEL_NAME")
?? throw new InvalidOperationException("OLLAMA_MODEL_NAME is not set.");
OllamaAgent ollamaAgent = BuildOllamaAgent(host, modelName);
var messages = new IMessage[] { new TextMessage(Role.User, "Hello, how are you") };
IMessage result = await ollamaAgent.GenerateReplyAsync(messages, new OllamaReplyOptions
{
Format = FormatType.Json
});
result.Should().NotBeNull();
result.Should().BeOfType<MessageEnvelope<ChatResponse>>();
result.From.Should().Be(ollamaAgent.Name);
string jsonContent = ((MessageEnvelope<ChatResponse>)result).Content.Message!.Value;
bool isValidJson = IsValidJsonMessage(jsonContent);
isValidJson.Should().BeTrue();
}
[ApiKeyFact("OLLAMA_HOST", "OLLAMA_MODEL_NAME")]
public async Task GenerateStreamingReplyAsync_ReturnsValidMessages_WhenCalled()
{
string host = Environment.GetEnvironmentVariable("OLLAMA_HOST")
?? throw new InvalidOperationException("OLLAMA_HOST is not set.");
string modelName = Environment.GetEnvironmentVariable("OLLAMA_MODEL_NAME")
?? throw new InvalidOperationException("OLLAMA_MODEL_NAME is not set.");
OllamaAgent ollamaAgent = BuildOllamaAgent(host, modelName);
var messages = new IMessage[] { new TextMessage(Role.User, "Hello how are you") };
IStreamingMessage? finalReply = default;
await foreach (IStreamingMessage message in ollamaAgent.GenerateStreamingReplyAsync(messages))
{
message.Should().NotBeNull();
message.From.Should().Be(ollamaAgent.Name);
finalReply = message;
}
finalReply.Should().BeOfType<MessageEnvelope<ChatResponse>>();
}
private static bool IsValidJsonMessage(string input)
{
try
{
JsonDocument.Parse(input);
return true;
}
catch (JsonException)
{
return false;
}
catch (Exception ex)
{
Console.WriteLine("An unexpected exception occurred: " + ex.Message);
return false;
}
}
private static OllamaAgent BuildOllamaAgent(string host, string modelName)
{
var httpClient = new HttpClient
{
BaseAddress = new Uri(host)
};
return new OllamaAgent(httpClient, "TestAgent", modelName);
}
}