Skip to content

Commit

Permalink
Merge pull request #170 from awaescher/merge169
Browse files Browse the repository at this point in the history
Merge #169
  • Loading branch information
awaescher authored Jan 15, 2025
2 parents 83578f9 + 66428eb commit 7549d86
Show file tree
Hide file tree
Showing 8 changed files with 114 additions and 73 deletions.
6 changes: 4 additions & 2 deletions demo/Demos/ModelManagerConsole.cs
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,10 @@ private async Task CopyModel()
private async Task CreateModel()
{
var createName = ReadInput("Enter a name for your new model:");
var createModelFileContent = ReadInput("Enter the contents for the model file:", $"[{HintTextColor}]See [/][{AccentTextColor}][link]https://ollama.ai/library[/][/][{HintTextColor}] for available models[/]");
await foreach (var status in Ollama.CreateModelAsync(createName, createModelFileContent))
var fromModel = ReadInput("Enter the name of the model to create from:",
$"[{HintTextColor}]See [/][{AccentTextColor}][link]https://ollama.ai/library[/][/][{HintTextColor}] for available models[/]");
var systemPrompt = ReadInput("Set a new system prompt word for the model:");
await foreach (var status in Ollama.CreateModelAsync(new CreateModelRequest { From = fromModel, System = systemPrompt, Model = createName }))
AnsiConsole.MarkupLineInterpolated($"{status?.Status ?? ""}");
}

Expand Down
4 changes: 4 additions & 0 deletions src/Constants/Application.cs
Original file line number Diff line number Diff line change
Expand Up @@ -101,4 +101,8 @@ internal static class Application
public const string Completed = "completed";
public const string Embeddings = "embeddings";
public const string ParameterSize = "parameter_size";
public const string Messages = "message";
public const string Adapters = "adapters";
public const string Files = "files";
public const string From = "from";
}
15 changes: 15 additions & 0 deletions src/IOllamaApiClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -132,4 +132,19 @@ public interface IOllamaApiClient
/// <param name="cancellationToken">The token to cancel the operation with.</param>
/// <returns>A task that represents the asynchronous operation. The task result contains the <see cref="Version"/>.</returns>
Task<Version> GetVersionAsync(CancellationToken cancellationToken = default);

/// <summary>
/// Push a file to the Ollama server to create a "blob" (Binary Large Object).
/// </summary>
/// <param name="digest">The expected SHA256 digest of the file.</param>
/// <param name="bytes">The bytes data of the file.</param>
/// <param name="cancellationToken">The token to cancel the operation with.</param>
Task PushBlobAsync(string digest, byte[] bytes, CancellationToken cancellationToken = default);

/// <summary>
/// Ensures that the file blob (Binary Large Object) used with create a model exists on the server. This checks your Ollama server and not ollama.com.
/// </summary>
/// <param name="digest">The expected SHA256 digest of the file.</param>
/// <param name="cancellationToken">The token to cancel the operation with.</param>
Task<bool> IsBlobExistsAsync(string digest, CancellationToken cancellationToken = default);
}
62 changes: 50 additions & 12 deletions src/Models/CreateModel.cs
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
using System.Text.Json.Serialization;
using OllamaSharp.Constants;
using OllamaSharp.Models.Chat;

namespace OllamaSharp.Models;

/// <summary>
/// Create a model from a Modelfile. It is recommended to set <see cref="ModelFileContent"/> to the
/// content of the Modelfile rather than just set path. This is a requirement
/// for remote create. Remote model creation must also create any file blobs,
/// fields such as FROM and ADAPTER, explicitly with the server using Create a
/// Blob and the value to the path indicated in the response.
/// Create a model from:
/// another model;
/// a safetensors directory; or
/// a GGUF file.
/// If you are creating a model from a safetensors directory or from a GGUF file,
/// you must [create a blob] for each of the files and then use the file name and SHA256
/// digest associated with each blob in the `files` field.
///
/// <see href="https://github.com/jmorganca/ollama/blob/main/docs/api.md#create-a-model">Ollama API docs</see>
///
Expand All @@ -23,17 +26,52 @@ public class CreateModelRequest : OllamaRequest
public string? Model { get; set; }

/// <summary>
/// Contents of the Modelfile
/// See https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md
/// Name of an existing model to create the new model from (optional)
/// </summary>
[JsonPropertyName(Application.ModelFile)]
public string ModelFileContent { get; set; } = null!;
[JsonPropertyName(Application.From)]
public string? From { get; set; }

/// <summary>
/// Path to the Modelfile (optional)
/// A dictionary of file names to SHA256 digests of blobs to create the model from (optional)
/// </summary>
[JsonPropertyName(Application.Path)]
public string? Path { get; set; }
[JsonPropertyName(Application.Files)]
public Dictionary<string, string>? Files { get; set; }

/// <summary>
/// A dictionary of file names to SHA256 digests of blobs for LORA adapters (optional)
/// </summary>
[JsonPropertyName(Application.Adapters)]
public Dictionary<string, string>? Adapters { get; set; }

/// <summary>
/// The prompt template for the model (optional)
/// </summary>
[JsonPropertyName(Application.Template)]
public string? Template { get; set; }

/// <summary>
/// A string or list of strings containing the license or licenses for the model (optional)
/// </summary>
[JsonPropertyName(Application.License)]
public object? License { get; set; }

/// <summary>
/// A string containing the system prompt for the model (optional)
/// </summary>
[JsonPropertyName(Application.System)]
public string? System { get; set; }

/// <summary>
/// A dictionary of parameters for the model (optional)
/// </summary>
[JsonPropertyName(Application.Parameters)]
public Dictionary<string, string>? Parameters { get; set; }

/// <summary>
/// A list of message objects used to create a conversation (optional)
/// </summary>
[JsonPropertyName(Application.Messages)]
public IEnumerable<Message>? Messages { get; set; }

/// <summary>
/// If false the response will be returned as a single response object, rather than a stream of objects (optional)
Expand Down
24 changes: 20 additions & 4 deletions src/OllamaApiClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,24 @@ public async Task<Version> GetVersionAsync(CancellationToken cancellationToken =
return Version.Parse(versionString);
}

/// <inheritdoc />
public async Task PushBlobAsync(string digest, byte[] bytes, CancellationToken cancellationToken = default)
{
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "api/blobs/" + digest);
requestMessage.Content = new ByteArrayContent(bytes);
using var response = await SendToOllamaAsync(requestMessage, null, HttpCompletionOption.ResponseContentRead, cancellationToken).ConfigureAwait(false);
response.EnsureSuccessStatusCode();
}

/// <inheritdoc />
public async Task<bool> IsBlobExistsAsync(string digest, CancellationToken cancellationToken = default)
{
using var requestMessage = new HttpRequestMessage(HttpMethod.Head, "api/blobs/" + digest);
requestMessage.ApplyCustomHeaders(DefaultRequestHeaders, null);
var response = await _client.SendAsync(requestMessage, cancellationToken).ConfigureAwait(false);
return response.StatusCode == HttpStatusCode.OK;
}

private async IAsyncEnumerable<GenerateResponseStream?> GenerateCompletionAsync(GenerateRequest generateRequest, [EnumeratorCancellation] CancellationToken cancellationToken)
{
using var requestMessage = CreateRequestMessage(HttpMethod.Post, Endpoints.Generate, generateRequest);
Expand All @@ -236,15 +254,13 @@ public async Task<Version> GetVersionAsync(CancellationToken cancellationToken =
private async Task<TResponse> GetAsync<TResponse>(string endpoint, CancellationToken cancellationToken)
{
using var requestMessage = CreateRequestMessage(HttpMethod.Get, endpoint);

using var response = await SendToOllamaAsync(requestMessage, null, HttpCompletionOption.ResponseContentRead, cancellationToken).ConfigureAwait(false);

using var responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false);

return (await JsonSerializer.DeserializeAsync<TResponse>(responseStream, IncomingJsonSerializerOptions, cancellationToken))!;
}



private async Task PostAsync<TRequest>(string endpoint, TRequest ollamaRequest, CancellationToken cancellationToken) where TRequest : OllamaRequest
{
Expand Down
56 changes: 10 additions & 46 deletions src/OllamaApiClientExtensions.cs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
using System.Security.Cryptography;
using OllamaSharp.Models;

namespace OllamaSharp;
Expand All @@ -18,52 +19,6 @@ public static class OllamaApiClientExtensions
public static Task CopyModelAsync(this IOllamaApiClient client, string source, string destination, CancellationToken cancellationToken = default)
=> client.CopyModelAsync(new CopyModelRequest { Source = source, Destination = destination }, cancellationToken);

/// <summary>
/// Sends a request to the /api/create endpoint to create a model.
/// </summary>
/// <param name="client">The client used to execute the command.</param>
/// <param name="name">The name for the new model.</param>
/// <param name="modelFileContent">
/// The file content for the model file the new model should be built with.
/// See <see href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md"/>.
/// </param>
/// <param name="cancellationToken">The token to cancel the operation with.</param>
/// <returns>An async enumerable that can be used to iterate over the streamed responses. See <see cref="CreateModelResponse"/>.</returns>
public static IAsyncEnumerable<CreateModelResponse?> CreateModelAsync(this IOllamaApiClient client, string name, string modelFileContent, CancellationToken cancellationToken = default)
{
var request = new CreateModelRequest
{
Model = name,
ModelFileContent = modelFileContent,
Stream = true
};
return client.CreateModelAsync(request, cancellationToken);
}

/// <summary>
/// Sends a request to the /api/create endpoint to create a model.
/// </summary>
/// <param name="client">The client used to execute the command.</param>
/// <param name="name">The name for the new model.</param>
/// <param name="modelFileContent">
/// The file content for the model file the new model should be built with.
/// See <see href="https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md"/>.
/// </param>
/// <param name="path">The name path to the model file.</param>
/// <param name="cancellationToken">The token to cancel the operation with.</param>
/// <returns>An async enumerable that can be used to iterate over the streamed responses. See <see cref="CreateModelResponse"/>.</returns>
public static IAsyncEnumerable<CreateModelResponse?> CreateModelAsync(this IOllamaApiClient client, string name, string modelFileContent, string path, CancellationToken cancellationToken = default)
{
var request = new CreateModelRequest
{
Model = name,
ModelFileContent = modelFileContent,
Path = path,
Stream = true
};
return client.CreateModelAsync(request, cancellationToken);
}

/// <summary>
/// Sends a request to the /api/delete endpoint to delete a model.
/// </summary>
Expand Down Expand Up @@ -144,4 +99,13 @@ public static Task<EmbedResponse> EmbedAsync(this IOllamaApiClient client, strin
/// <returns>A task that represents the asynchronous operation. The task result contains the <see cref="ShowModelResponse"/> with the model information.</returns>
public static Task<ShowModelResponse> ShowModelAsync(this IOllamaApiClient client, string model, CancellationToken cancellationToken = default)
=> client.ShowModelAsync(new ShowModelRequest { Model = model }, cancellationToken);

/// <summary>
/// Push a file to the Ollama server to create a "blob" (Binary Large Object).
/// </summary>
/// <param name="client">The client used to execute the command.</param>
/// <param name="bytes">The bytes data of the file.</param>
/// <param name="cancellationToken">The token to cancel the operation with.</param>
public static Task PushBlobAsync(this IOllamaApiClient client, byte[] bytes, CancellationToken cancellationToken = default)
=> client.PushBlobAsync($"sha256:{BitConverter.ToString(SHA256.Create().ComputeHash(bytes)).Replace("-", string.Empty).ToLower()}", bytes, cancellationToken);
}
10 changes: 1 addition & 9 deletions test/FunctionalTests/OllamaApiClientTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -70,15 +70,7 @@ public async Task CreateModel()
var model = new CreateModelRequest
{
Model = _localModel,
ModelFileContent =
"""
FROM llama3.2
PARAMETER temperature 0.3
PARAMETER num_ctx 100
# sets a custom system message to specify the behavior of the chat assistant
SYSTEM You are a concise model that tries to return yes or no answers.
"""
From = _model
};

var response = await _client
Expand Down
10 changes: 10 additions & 0 deletions test/TestOllamaApiClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,16 @@ public Task<Version> GetVersionAsync(CancellationToken cancellationToken = defau
throw new NotImplementedException();
}

public Task PushBlobAsync(string digest, byte[] bytes, CancellationToken cancellationToken = default)
{
throw new NotImplementedException();
}

public Task<bool> IsBlobExistsAsync(string digest, CancellationToken cancellationToken = default)
{
throw new NotImplementedException();
}

public Task<bool> IsRunningAsync(CancellationToken cancellationToken = default)
{
throw new NotImplementedException();
Expand Down

0 comments on commit 7549d86

Please sign in to comment.