Skip to content

Commit

Permalink
Add logprops and top_logprops properties
Browse files Browse the repository at this point in the history
  • Loading branch information
marcominerva committed Mar 27, 2024
1 parent 992591f commit 4b28125
Show file tree
Hide file tree
Showing 4 changed files with 53 additions and 12 deletions.
9 changes: 7 additions & 2 deletions src/ChatGptNet/ChatGptClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,9 @@ public async Task<ChatGptResponse> AskAsync(Guid conversationId, string message,
var requestUri = options.ServiceConfiguration.GetChatCompletionEndpoint(model ?? options.DefaultModel);
using var httpResponse = await httpClient.PostAsJsonAsync(requestUri, request, jsonSerializerOptions, cancellationToken);

var json = await httpResponse.Content.ReadAsStringAsync(cancellationToken);
Console.WriteLine(json);

var response = await httpResponse.Content.ReadFromJsonAsync<ChatGptResponse>(jsonSerializerOptions, cancellationToken: cancellationToken);
NormalizeResponse(httpResponse, response!, conversationId, model ?? options.DefaultModel);

Expand Down Expand Up @@ -356,8 +359,10 @@ private ChatGptRequest CreateChatGptRequest(IEnumerable<ChatGptMessage> messages
MaxTokens = parameters?.MaxTokens ?? options.DefaultParameters.MaxTokens,
PresencePenalty = parameters?.PresencePenalty ?? options.DefaultParameters.PresencePenalty,
FrequencyPenalty = parameters?.FrequencyPenalty ?? options.DefaultParameters.FrequencyPenalty,
User = options.User,
ResponseFormat = parameters?.ResponseFormat ?? options.DefaultParameters.ResponseFormat
ResponseFormat = parameters?.ResponseFormat ?? options.DefaultParameters.ResponseFormat,
LogProbabilities = parameters?.LogProbabilities ?? options.DefaultParameters.LogProbabilities,
TopLogProbabilities = parameters?.TopLogProbabilities ?? options.DefaultParameters.TopLogProbabilities,
User = options.User
};

private EmbeddingRequest CreateEmbeddingRequest(IEnumerable<string> messages, EmbeddingParameters? parameters, string? model)
Expand Down
4 changes: 3 additions & 1 deletion src/ChatGptNet/ChatGptOptionsBuilder.cs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,9 @@ internal ChatGptOptionsBuilder(ChatGptOptionsBuilder source)
ResponseFormat = source.DefaultParameters?.ResponseFormat,
TopP = source.DefaultParameters?.TopP,
Temperature = source.DefaultParameters?.Temperature,
Seed = source.DefaultParameters?.Seed
Seed = source.DefaultParameters?.Seed,
LogProbabilities = source.DefaultParameters?.LogProbabilities,
TopLogProbabilities = source.DefaultParameters?.TopLogProbabilities
};

DefaultEmbeddingParameters = new()
Expand Down
19 changes: 18 additions & 1 deletion src/ChatGptNet/Models/ChatGptParameters.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ namespace ChatGptNet.Models;
public class ChatGptParameters
{
/// <summary>
/// If specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result.
/// Gets or sets a value such that, if specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result.
/// </summary>
/// <remarks>
/// Determinism is not guaranteed, and you should refer to the <see cref="ChatGptResponse.SystemFingerprint"/> response parameter to monitor changes in the backend.
Expand Down Expand Up @@ -64,4 +64,21 @@ public class ChatGptParameters
/// <seealso cref="ChatGptResponseFormat"/>
[JsonPropertyName("response_format")]
public ChatGptResponseFormat? ResponseFormat { get; set; }

/// <summary>
/// Gets or set a value that determines whether to return log probabilities of the output tokens or not. If <see langword="true"/>, returns the log probabilities of each output token returned in the content of message (default: <see langword="false"/>).
/// </summary>
/// <seealso cref="TopLogProbabilities"/>
[JsonPropertyName("logprobs")]
public bool? LogProbabilities { get; set; }

/// <summary>
/// Gets or sets a value between 0 and 20 specifying the number of most likely tokens to return at each token position, each with an associated log probability.
/// </summary>
/// <remarks>
/// <see cref="LogProbabilities"/>must be set to <see langword="true"/> if this parameter is used.
/// </remarks>
/// <seealso cref="LogProbabilities"/>
[JsonPropertyName("top_logprobs")]
public int? TopLogProbabilities { get; set; }
}
33 changes: 25 additions & 8 deletions src/ChatGptNet/Models/ChatGptRequest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ internal class ChatGptRequest
public bool Stream { get; set; }

/// <summary>
/// If specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result.
/// Gets or sets a value such that, if specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result.
/// </summary>
/// <remarks>
/// Determinism is not guaranteed, and you should refer to the <see cref="ChatGptResponse.SystemFingerprint"/> response parameter to monitor changes in the backend.
Expand Down Expand Up @@ -142,17 +142,34 @@ internal class ChatGptRequest
public double? FrequencyPenalty { get; set; }

/// <summary>
/// Gets or sets the user identification for chat completion, which can help to monitor and detect abuse.
/// An object specifying the format that the model must output. Used to enable JSON mode.
/// </summary>
/// <seealso cref="ChatGptResponseFormat"/>
[JsonPropertyName("response_format")]
public ChatGptResponseFormat? ResponseFormat { get; set; }

/// <summary>
/// Gets or set a value that determines whether to return log probabilities of the output tokens or not. If <see langword="true"/>, returns the log probabilities of each output token returned in the content of message (default: <see langword="false"/>).
/// </summary>
/// <seealso cref="TopLogProbabilities"/>
[JsonPropertyName("logprobs")]
public bool? LogProbabilities { get; set; }

/// <summary>
/// Gets or sets a value between 0 and 20 specifying the number of most likely tokens to return at each token position, each with an associated log probability.
/// </summary>
/// <remarks>
/// See <see href="https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids">Safety best practices</see> for more information.
/// <see cref="LogProbabilities"/>must be set to <see langword="true"/> if this parameter is used.
/// </remarks>
public string? User { get; set; }
/// <seealso cref="LogProbabilities"/>
[JsonPropertyName("top_logprobs")]
public int? TopLogProbabilities { get; set; }

/// <summary>
/// An object specifying the format that the model must output. Used to enable JSON mode.
/// Gets or sets the user identification for chat completion, which can help to monitor and detect abuse.
/// </summary>
/// <seealso cref="ChatGptResponseFormat"/>
[JsonPropertyName("response_format")]
public ChatGptResponseFormat? ResponseFormat { get; set; }
/// <remarks>
/// See <see href="https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids">Safety best practices</see> for more information.
/// </remarks>
public string? User { get; set; }
}

0 comments on commit 4b28125

Please sign in to comment.