Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add string constructor to OllamaEmbeddingGenerator #5562

Merged
merged 1 commit into from
Oct 24, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -21,6 +21,18 @@ public sealed class OllamaEmbeddingGenerator : IEmbeddingGenerator<string, Embed
/// <summary>The <see cref="HttpClient"/> to use for sending requests.</summary>
private readonly HttpClient _httpClient;

/// <summary>Initializes a new instance of the <see cref="OllamaEmbeddingGenerator"/> class.</summary>
/// <param name="endpoint">The endpoint URI where Ollama is hosted.</param>
/// <param name="modelId">
/// The id of the model to use. This may also be overridden per request via <see cref="ChatOptions.ModelId"/>.
/// Either this parameter or <see cref="ChatOptions.ModelId"/> must provide a valid model id.
/// </param>
/// <param name="httpClient">An <see cref="HttpClient"/> instance to use for HTTP operations.</param>
public OllamaEmbeddingGenerator(string endpoint, string? modelId = null, HttpClient? httpClient = null)
: this(new Uri(Throw.IfNull(endpoint)), modelId, httpClient)
{
}

/// <summary>Initializes a new instance of the <see cref="OllamaEmbeddingGenerator"/> class.</summary>
/// <param name="endpoint">The endpoint URI where Ollama is hosted.</param>
/// <param name="modelId">
@@ -59,7 +71,8 @@ public void Dispose()
}

/// <inheritdoc />
public async Task<GeneratedEmbeddings<Embedding<float>>> GenerateAsync(IEnumerable<string> values, EmbeddingGenerationOptions? options = null, CancellationToken cancellationToken = default)
public async Task<GeneratedEmbeddings<Embedding<float>>> GenerateAsync(
IEnumerable<string> values, EmbeddingGenerationOptions? options = null, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(values);

Original file line number Diff line number Diff line change
@@ -17,14 +17,14 @@ public class OllamaEmbeddingGeneratorTests
[Fact]
public void Ctor_InvalidArgs_Throws()
{
Assert.Throws<ArgumentNullException>("endpoint", () => new OllamaEmbeddingGenerator(null!));
Assert.Throws<ArgumentException>("modelId", () => new OllamaEmbeddingGenerator(new("http://localhost"), " "));
Assert.Throws<ArgumentNullException>("endpoint", () => new OllamaEmbeddingGenerator((string)null!));
Assert.Throws<ArgumentException>("modelId", () => new OllamaEmbeddingGenerator(new Uri("http://localhost"), " "));
}

[Fact]
public void GetService_SuccessfullyReturnsUnderlyingClient()
{
using OllamaEmbeddingGenerator generator = new(new("http://localhost"));
using OllamaEmbeddingGenerator generator = new("http://localhost");

Assert.Same(generator, generator.GetService<OllamaEmbeddingGenerator>());
Assert.Same(generator, generator.GetService<IEmbeddingGenerator<string, Embedding<float>>>());
@@ -76,7 +76,7 @@ public async Task GetEmbeddingsAsync_ExpectedRequestResponse()

using VerbatimHttpHandler handler = new(Input, Output);
using HttpClient httpClient = new(handler);
using IEmbeddingGenerator<string, Embedding<float>> generator = new OllamaEmbeddingGenerator(new("http://localhost:11434"), "all-minilm", httpClient);
using IEmbeddingGenerator<string, Embedding<float>> generator = new OllamaEmbeddingGenerator("http://localhost:11434", "all-minilm", httpClient);

var response = await generator.GenerateAsync([
"hello, world!",
Loading