Browse Source

Adding Response Format - Correcting non-standard way of working with PromptExecutionSettings

can be used downstream to post-process the messages based on the requested format
pull/689/head
Zoli Somogyi 1 year ago
parent
commit
2aa96b206f
1 changed files with 6 additions and 0 deletions
  1. +6
    -0
      LLama.SemanticKernel/LLamaSharpPromptExecutionSettings.cs

+ 6
- 0
LLama.SemanticKernel/LLamaSharpPromptExecutionSettings.cs View File

@@ -74,6 +74,12 @@ public class LLamaSharpPromptExecutionSettings : PromptExecutionSettings
[JsonPropertyName("token_selection_biases")]
public IDictionary<int, int> TokenSelectionBiases { get; set; } = new Dictionary<int, int>();

/// <summary>
/// Indicates the format of the response which can be used downstream to post-process the messages. Handlebars: handlebars_object. JSON: json_object, etc.
/// </summary>
[JsonPropertyName("response_format")]
public string ResponseFormat { get; set; } = string.Empty;

/// <summary>
/// Create a new settings object with the values from another settings object.
/// </summary>


Loading…
Cancel
Save