You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

ChatRequestSettings.cs 4.6 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125
  1. 
  2. /* Unmerged change from project 'LLamaSharp.SemanticKernel (netstandard2.0)'
  3. Before:
  4. using Microsoft.SemanticKernel;
  5. After:
  6. using LLamaSharp;
  7. using LLamaSharp.SemanticKernel;
  8. using LLamaSharp.SemanticKernel;
  9. using LLamaSharp.SemanticKernel.ChatCompletion;
  10. using Microsoft.SemanticKernel;
  11. */
  12. using LLamaSharp.SemanticKernel.ChatCompletion;
  13. using Microsoft.SemanticKernel;
  14. using System.Text.Json;
  15. using System.Text.Json.Serialization;
  16. namespace LLamaSharp.SemanticKernel;
  17. public class LLamaSharpPromptExecutionSettings : PromptExecutionSettings
  18. {
  19. /// <summary>
  20. /// Temperature controls the randomness of the completion.
  21. /// The higher the temperature, the more random the completion.
  22. /// </summary>
  23. [JsonPropertyName("temperature")]
  24. public double Temperature { get; set; } = 0;
  25. /// <summary>
  26. /// TopP controls the diversity of the completion.
  27. /// The higher the TopP, the more diverse the completion.
  28. /// </summary>
  29. [JsonPropertyName("top_p")]
  30. public double TopP { get; set; } = 0;
  31. /// <summary>
  32. /// Number between -2.0 and 2.0. Positive values penalize new tokens
  33. /// based on whether they appear in the text so far, increasing the
  34. /// model's likelihood to talk about new topics.
  35. /// </summary>
  36. [JsonPropertyName("presence_penalty")]
  37. public double PresencePenalty { get; set; } = 0;
  38. /// <summary>
  39. /// Number between -2.0 and 2.0. Positive values penalize new tokens
  40. /// based on their existing frequency in the text so far, decreasing
  41. /// the model's likelihood to repeat the same line verbatim.
  42. /// </summary>
  43. [JsonPropertyName("frequency_penalty")]
  44. public double FrequencyPenalty { get; set; } = 0;
  45. /// <summary>
  46. /// Sequences where the completion will stop generating further tokens.
  47. /// </summary>
  48. [JsonPropertyName("stop_sequences")]
  49. public IList<string> StopSequences { get; set; } = Array.Empty<string>();
  50. /// <summary>
  51. /// How many completions to generate for each prompt. Default is 1.
  52. /// Note: Because this parameter generates many completions, it can quickly consume your token quota.
  53. /// Use carefully and ensure that you have reasonable settings for max_tokens and stop.
  54. /// </summary>
  55. [JsonPropertyName("results_per_prompt")]
  56. public int ResultsPerPrompt { get; set; } = 1;
  57. /// <summary>
  58. /// The maximum number of tokens to generate in the completion.
  59. /// </summary>
  60. [JsonPropertyName("max_tokens")]
  61. public int? MaxTokens { get; set; }
  62. /// <summary>
  63. /// Modify the likelihood of specified tokens appearing in the completion.
  64. /// </summary>
  65. [JsonPropertyName("token_selection_biases")]
  66. public IDictionary<int, int> TokenSelectionBiases { get; set; } = new Dictionary<int, int>();
  67. /// <summary>
  68. /// Create a new settings object with the values from another settings object.
  69. /// </summary>
  70. /// <param name="requestSettings">Template configuration</param>
  71. /// <param name="defaultMaxTokens">Default max tokens</param>
  72. /// <returns>An instance of OpenAIRequestSettings</returns>
  73. public static LLamaSharpPromptExecutionSettings FromRequestSettings(PromptExecutionSettings? requestSettings, int? defaultMaxTokens = null)
  74. {
  75. if (requestSettings is null)
  76. {
  77. return new LLamaSharpPromptExecutionSettings()
  78. {
  79. MaxTokens = defaultMaxTokens
  80. };
  81. }
  82. if (requestSettings is LLamaSharpPromptExecutionSettings requestSettingsChatRequestSettings)
  83. {
  84. return requestSettingsChatRequestSettings;
  85. }
  86. var json = JsonSerializer.Serialize(requestSettings);
  87. var chatRequestSettings = JsonSerializer.Deserialize<LLamaSharpPromptExecutionSettings>(json, s_options);
  88. if (chatRequestSettings is not null)
  89. {
  90. return chatRequestSettings;
  91. }
  92. throw new ArgumentException($"Invalid request settings, cannot convert to {nameof(LLamaSharpPromptExecutionSettings)}", nameof(requestSettings));
  93. }
  94. private static readonly JsonSerializerOptions s_options = CreateOptions();
  95. private static JsonSerializerOptions CreateOptions()
  96. {
  97. JsonSerializerOptions options = new()
  98. {
  99. WriteIndented = true,
  100. MaxDepth = 20,
  101. AllowTrailingCommas = true,
  102. PropertyNameCaseInsensitive = true,
  103. ReadCommentHandling = JsonCommentHandling.Skip,
  104. Converters = { new LLamaSharpPromptExecutionSettingsConverter() }
  105. };
  106. return options;
  107. }
  108. }