You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
I can't acomplish that Chat class allows me to stream the content. I have made this extensions to allow that.
Every token in response fires an event and then you can present it to user one by one
using CSharpToJsonSchema;
using System;
using System.Collections.Generic;
using System.Text;
namespace Ollama
{
public partial class Chat
{
///
/// Sends a message in a given role(User by default) to the currently selected model
///
/// The role in which the message should be sent
/// The message to send
/// Base64 encoded images to send to the model
/// The token to cancel the operation with
/// Whether to stream the response
/// An action to be called when a response is received
public async Task<Ollama.Message> SendAsyncWithEvent(
string? message = null,
MessageRole role = MessageRole.User,
IEnumerable? imagesAsBase64 = null,
CancellationToken cancellationToken = default,
bool stream = false, Action<bool, string> onContent = default)
{
if (message != null)
{
History.Add(new Ollama.Message
{
Content = message,
Role = role,
Images = imagesAsBase64?.ToList() ?? [],
});
}
var answer = await Client.Chat.GenerateChatCompletionAsyncWithEvent(
model: Model,
messages: History,
format: ResponseFormat,
options: RequestOptions,
stream: stream,
keepAlive: KeepAlive,
tools: Tools.Count == 0 ? null : Tools,
cancellationToken: cancellationToken,
**onContent: onContent**).WaitAsync().ConfigureAwait(false);
if (answer.Message == null)
{
throw new InvalidOperationException("Response message was null.");
}
History.Add(answer.Message);
if (AutoCallTools && answer.Message.ToolCalls?.Count > 0)
{
foreach (var call in answer.Message.ToolCalls)
{
string funcName = call.Function?.Name ?? string.Empty;
if (string.IsNullOrEmpty(funcName)) continue;
if (!Calls.TryGetValue(funcName, out var func)) continue;
var json = await func(call.Function?.Arguments.AsJson() ?? string.Empty, cancellationToken).ConfigureAwait(false);
History.Add(json.AsToolMessage());
}
return await **SendAsyncWithEvent**(cancellationToken: cancellationToken).ConfigureAwait(false);
}
return answer.Message;
}
}
public partial class ChatClient
{
public async global::System.Collections.Generic.IAsyncEnumerable<global::Ollama.GenerateChatCompletionResponse> **GenerateChatCompletionAsyncWithEvent**(
string model,
global::System.Collections.Generic.IList<global::Ollama.Message> messages,
global::Ollama.ResponseFormat? format = default,
global::Ollama.RequestOptions? options = default,
bool? stream = default,
int? keepAlive = default,
global::System.Collections.Generic.IList<global::Ollama.Tool>? tools = default,
[global::System.Runtime.CompilerServices.EnumeratorCancellation] global::System.Threading.CancellationToken cancellationToken = default,
**Action<bool, string> onContent = default**)
{
var __request = new global::Ollama.GenerateChatCompletionRequest
{
Model = model,
Messages = messages,
Format = format,
Options = options,
Stream = stream,
KeepAlive = keepAlive,
Tools = tools,
};
var __enumerable = GenerateChatCompletionAsync(
request: __request,
cancellationToken: cancellationToken);
bool responseStarted = true;
await foreach (var __response in __enumerable)
{
**string? content = __response.Message?.Content;
if (onContent != null)
{
onContent(responseStarted, content);
}**
responseStarted = false;
yield return __response;
}
}
}
}
The text was updated successfully, but these errors were encountered:
I can't acomplish that Chat class allows me to stream the content. I have made this extensions to allow that.
Every token in response fires an event and then you can present it to user one by one
using CSharpToJsonSchema;
using System;
using System.Collections.Generic;
using System.Text;
namespace Ollama
{
public partial class Chat
{
///
/// Sends a message in a given role(User by default) to the currently selected model
///
/// The role in which the message should be sent
/// The message to send
/// Base64 encoded images to send to the model
/// The token to cancel the operation with
/// Whether to stream the response
/// An action to be called when a response is received
public async Task<Ollama.Message> SendAsyncWithEvent(
string? message = null,
MessageRole role = MessageRole.User,
IEnumerable? imagesAsBase64 = null,
CancellationToken cancellationToken = default,
bool stream = false,
Action<bool, string> onContent = default)
{
if (message != null)
{
History.Add(new Ollama.Message
{
Content = message,
Role = role,
Images = imagesAsBase64?.ToList() ?? [],
});
}
}
The text was updated successfully, but these errors were encountered: