From 2aa96b206f88f1a9a715e6d59e371d72c9d03e31 Mon Sep 17 00:00:00 2001 From: Zoli Somogyi Date: Sat, 27 Apr 2024 09:39:40 +0200 Subject: [PATCH] Adding Response Format - Correcting non-standard way of working with PromptExecutionSettings can be used downstream to post-process the messages based on the requested format --- LLama.SemanticKernel/LLamaSharpPromptExecutionSettings.cs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/LLama.SemanticKernel/LLamaSharpPromptExecutionSettings.cs b/LLama.SemanticKernel/LLamaSharpPromptExecutionSettings.cs index 87dda39e..5e8a6669 100644 --- a/LLama.SemanticKernel/LLamaSharpPromptExecutionSettings.cs +++ b/LLama.SemanticKernel/LLamaSharpPromptExecutionSettings.cs @@ -74,6 +74,12 @@ public class LLamaSharpPromptExecutionSettings : PromptExecutionSettings [JsonPropertyName("token_selection_biases")] public IDictionary TokenSelectionBiases { get; set; } = new Dictionary(); + /// + /// Indicates the format of the response which can be used downstream to post-process the messages. Handlebars: handlebars_object. JSON: json_object, etc. + /// + [JsonPropertyName("response_format")] + public string ResponseFormat { get; set; } = string.Empty; + /// /// Create a new settings object with the values from another settings object. ///