diff --git a/demo/Demos/ModelManagerConsole.cs b/demo/Demos/ModelManagerConsole.cs
index 93c3992..0db9d3c 100644
--- a/demo/Demos/ModelManagerConsole.cs
+++ b/demo/Demos/ModelManagerConsole.cs
@@ -78,8 +78,10 @@ private async Task CopyModel()
private async Task CreateModel()
{
var createName = ReadInput("Enter a name for your new model:");
- var createModelFileContent = ReadInput("Enter the contents for the model file:", $"[{HintTextColor}]See [/][{AccentTextColor}][link]https://ollama.ai/library[/][/][{HintTextColor}] for available models[/]");
- await foreach (var status in Ollama.CreateModelAsync(createName, createModelFileContent))
+ var fromModel = ReadInput("Enter the name of the model to create from:",
+ $"[{HintTextColor}]See [/][{AccentTextColor}][link]https://ollama.ai/library[/][/][{HintTextColor}] for available models[/]");
+ var systemPrompt = ReadInput("Set a new system prompt word for the model:");
+ await foreach (var status in Ollama.CreateModelAsync(new CreateModelRequest { From = fromModel, System = systemPrompt, Model = createName }))
AnsiConsole.MarkupLineInterpolated($"{status?.Status ?? ""}");
}
diff --git a/src/Constants/Application.cs b/src/Constants/Application.cs
index 6b5a118..1f715e8 100644
--- a/src/Constants/Application.cs
+++ b/src/Constants/Application.cs
@@ -101,4 +101,8 @@ internal static class Application
public const string Completed = "completed";
public const string Embeddings = "embeddings";
public const string ParameterSize = "parameter_size";
+ public const string Messages = "message";
+ public const string Adapters = "adapters";
+ public const string Files = "files";
+ public const string From = "from";
}
\ No newline at end of file
diff --git a/src/IOllamaApiClient.cs b/src/IOllamaApiClient.cs
index 7bf03a2..c374d45 100644
--- a/src/IOllamaApiClient.cs
+++ b/src/IOllamaApiClient.cs
@@ -132,4 +132,19 @@ public interface IOllamaApiClient
/// The token to cancel the operation with.
/// A task that represents the asynchronous operation. The task result contains the .
Task GetVersionAsync(CancellationToken cancellationToken = default);
+
+ ///
+ /// Push a file to the Ollama server to create a "blob" (Binary Large Object).
+ ///
+ /// The expected SHA256 digest of the file.
+ /// The bytes data of the file.
+ /// The token to cancel the operation with.
+ Task PushBlobAsync(string digest, byte[] bytes, CancellationToken cancellationToken = default);
+
+ ///
+ /// Ensures that the file blob (Binary Large Object) used with create a model exists on the server. This checks your Ollama server and not ollama.com.
+ ///
+ /// The expected SHA256 digest of the file.
+ /// The token to cancel the operation with.
+ Task IsBlobExistsAsync(string digest, CancellationToken cancellationToken = default);
}
\ No newline at end of file
diff --git a/src/Models/CreateModel.cs b/src/Models/CreateModel.cs
index 85c8a5e..9af5a83 100644
--- a/src/Models/CreateModel.cs
+++ b/src/Models/CreateModel.cs
@@ -1,14 +1,17 @@
using System.Text.Json.Serialization;
using OllamaSharp.Constants;
+using OllamaSharp.Models.Chat;
namespace OllamaSharp.Models;
///
-/// Create a model from a Modelfile. It is recommended to set to the
-/// content of the Modelfile rather than just set path. This is a requirement
-/// for remote create. Remote model creation must also create any file blobs,
-/// fields such as FROM and ADAPTER, explicitly with the server using Create a
-/// Blob and the value to the path indicated in the response.
+/// Create a model from:
+/// another model;
+/// a safetensors directory; or
+/// a GGUF file.
+/// If you are creating a model from a safetensors directory or from a GGUF file,
+/// you must [create a blob] for each of the files and then use the file name and SHA256
+/// digest associated with each blob in the `files` field.
///
/// Ollama API docs
///
@@ -23,17 +26,52 @@ public class CreateModelRequest : OllamaRequest
public string? Model { get; set; }
///
- /// Contents of the Modelfile
- /// See https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md
+ /// Name of an existing model to create the new model from (optional)
///
- [JsonPropertyName(Application.ModelFile)]
- public string ModelFileContent { get; set; } = null!;
+ [JsonPropertyName(Application.From)]
+ public string? From { get; set; }
///
- /// Path to the Modelfile (optional)
+ /// A dictionary of file names to SHA256 digests of blobs to create the model from (optional)
///
- [JsonPropertyName(Application.Path)]
- public string? Path { get; set; }
+ [JsonPropertyName(Application.Files)]
+ public Dictionary? Files { get; set; }
+
+ ///
+ /// A dictionary of file names to SHA256 digests of blobs for LORA adapters (optional)
+ ///
+ [JsonPropertyName(Application.Adapters)]
+ public Dictionary? Adapters { get; set; }
+
+ ///
+ /// The prompt template for the model (optional)
+ ///
+ [JsonPropertyName(Application.Template)]
+ public string? Template { get; set; }
+
+ ///
+ /// A string or list of strings containing the license or licenses for the model (optional)
+ ///
+ [JsonPropertyName(Application.License)]
+ public object? License { get; set; }
+
+ ///
+ /// A string containing the system prompt for the model (optional)
+ ///
+ [JsonPropertyName(Application.System)]
+ public string? System { get; set; }
+
+ ///
+ /// A dictionary of parameters for the model (optional)
+ ///
+ [JsonPropertyName(Application.Parameters)]
+ public Dictionary? Parameters { get; set; }
+
+ ///
+ /// A list of message objects used to create a conversation (optional)
+ ///
+ [JsonPropertyName(Application.Messages)]
+ public IEnumerable? Messages { get; set; }
///
/// If false the response will be returned as a single response object, rather than a stream of objects (optional)
diff --git a/src/OllamaApiClient.cs b/src/OllamaApiClient.cs
index ca20966..4bfa882 100644
--- a/src/OllamaApiClient.cs
+++ b/src/OllamaApiClient.cs
@@ -219,6 +219,24 @@ public async Task GetVersionAsync(CancellationToken cancellationToken =
return Version.Parse(versionString);
}
+ ///
+ public async Task PushBlobAsync(string digest, byte[] bytes, CancellationToken cancellationToken = default)
+ {
+ using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "api/blobs/" + digest);
+ requestMessage.Content = new ByteArrayContent(bytes);
+ using var response = await SendToOllamaAsync(requestMessage, null, HttpCompletionOption.ResponseContentRead, cancellationToken).ConfigureAwait(false);
+ response.EnsureSuccessStatusCode();
+ }
+
+ ///
+ public async Task IsBlobExistsAsync(string digest, CancellationToken cancellationToken = default)
+ {
+ using var requestMessage = new HttpRequestMessage(HttpMethod.Head, "api/blobs/" + digest);
+ requestMessage.ApplyCustomHeaders(DefaultRequestHeaders, null);
+ var response = await _client.SendAsync(requestMessage, cancellationToken).ConfigureAwait(false);
+ return response.StatusCode == HttpStatusCode.OK;
+ }
+
private async IAsyncEnumerable GenerateCompletionAsync(GenerateRequest generateRequest, [EnumeratorCancellation] CancellationToken cancellationToken)
{
using var requestMessage = CreateRequestMessage(HttpMethod.Post, Endpoints.Generate, generateRequest);
@@ -236,15 +254,13 @@ public async Task GetVersionAsync(CancellationToken cancellationToken =
private async Task GetAsync(string endpoint, CancellationToken cancellationToken)
{
using var requestMessage = CreateRequestMessage(HttpMethod.Get, endpoint);
-
+
using var response = await SendToOllamaAsync(requestMessage, null, HttpCompletionOption.ResponseContentRead, cancellationToken).ConfigureAwait(false);
-
+
using var responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false);
return (await JsonSerializer.DeserializeAsync(responseStream, IncomingJsonSerializerOptions, cancellationToken))!;
}
-
-
private async Task PostAsync(string endpoint, TRequest ollamaRequest, CancellationToken cancellationToken) where TRequest : OllamaRequest
{
diff --git a/src/OllamaApiClientExtensions.cs b/src/OllamaApiClientExtensions.cs
index 1a99dcb..3993cf6 100644
--- a/src/OllamaApiClientExtensions.cs
+++ b/src/OllamaApiClientExtensions.cs
@@ -1,3 +1,4 @@
+using System.Security.Cryptography;
using OllamaSharp.Models;
namespace OllamaSharp;
@@ -18,52 +19,6 @@ public static class OllamaApiClientExtensions
public static Task CopyModelAsync(this IOllamaApiClient client, string source, string destination, CancellationToken cancellationToken = default)
=> client.CopyModelAsync(new CopyModelRequest { Source = source, Destination = destination }, cancellationToken);
- ///
- /// Sends a request to the /api/create endpoint to create a model.
- ///
- /// The client used to execute the command.
- /// The name for the new model.
- ///
- /// The file content for the model file the new model should be built with.
- /// See .
- ///
- /// The token to cancel the operation with.
- /// An async enumerable that can be used to iterate over the streamed responses. See .
- public static IAsyncEnumerable CreateModelAsync(this IOllamaApiClient client, string name, string modelFileContent, CancellationToken cancellationToken = default)
- {
- var request = new CreateModelRequest
- {
- Model = name,
- ModelFileContent = modelFileContent,
- Stream = true
- };
- return client.CreateModelAsync(request, cancellationToken);
- }
-
- ///
- /// Sends a request to the /api/create endpoint to create a model.
- ///
- /// The client used to execute the command.
- /// The name for the new model.
- ///
- /// The file content for the model file the new model should be built with.
- /// See .
- ///
- /// The name path to the model file.
- /// The token to cancel the operation with.
- /// An async enumerable that can be used to iterate over the streamed responses. See .
- public static IAsyncEnumerable CreateModelAsync(this IOllamaApiClient client, string name, string modelFileContent, string path, CancellationToken cancellationToken = default)
- {
- var request = new CreateModelRequest
- {
- Model = name,
- ModelFileContent = modelFileContent,
- Path = path,
- Stream = true
- };
- return client.CreateModelAsync(request, cancellationToken);
- }
-
///
/// Sends a request to the /api/delete endpoint to delete a model.
///
@@ -144,4 +99,13 @@ public static Task EmbedAsync(this IOllamaApiClient client, strin
/// A task that represents the asynchronous operation. The task result contains the with the model information.
public static Task ShowModelAsync(this IOllamaApiClient client, string model, CancellationToken cancellationToken = default)
=> client.ShowModelAsync(new ShowModelRequest { Model = model }, cancellationToken);
+
+ ///
+ /// Push a file to the Ollama server to create a "blob" (Binary Large Object).
+ ///
+ /// The client used to execute the command.
+ /// The bytes data of the file.
+ /// The token to cancel the operation with.
+ public static Task PushBlobAsync(this IOllamaApiClient client, byte[] bytes, CancellationToken cancellationToken = default)
+ => client.PushBlobAsync($"sha256:{BitConverter.ToString(SHA256.Create().ComputeHash(bytes)).Replace("-", string.Empty).ToLower()}", bytes, cancellationToken);
}
diff --git a/test/FunctionalTests/OllamaApiClientTests.cs b/test/FunctionalTests/OllamaApiClientTests.cs
index ce0f70e..a8fed64 100644
--- a/test/FunctionalTests/OllamaApiClientTests.cs
+++ b/test/FunctionalTests/OllamaApiClientTests.cs
@@ -70,15 +70,7 @@ public async Task CreateModel()
var model = new CreateModelRequest
{
Model = _localModel,
- ModelFileContent =
- """
- FROM llama3.2
- PARAMETER temperature 0.3
- PARAMETER num_ctx 100
-
- # sets a custom system message to specify the behavior of the chat assistant
- SYSTEM You are a concise model that tries to return yes or no answers.
- """
+ From = _model
};
var response = await _client
diff --git a/test/TestOllamaApiClient.cs b/test/TestOllamaApiClient.cs
index 00c9b60..0627188 100644
--- a/test/TestOllamaApiClient.cs
+++ b/test/TestOllamaApiClient.cs
@@ -69,6 +69,16 @@ public Task GetVersionAsync(CancellationToken cancellationToken = defau
throw new NotImplementedException();
}
+ public Task PushBlobAsync(string digest, byte[] bytes, CancellationToken cancellationToken = default)
+ {
+ throw new NotImplementedException();
+ }
+
+ public Task IsBlobExistsAsync(string digest, CancellationToken cancellationToken = default)
+ {
+ throw new NotImplementedException();
+ }
+
public Task IsRunningAsync(CancellationToken cancellationToken = default)
{
throw new NotImplementedException();