You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

ExtensionMethods.cs 1.8 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849
  1. using LLamaSharp.SemanticKernel.ChatCompletion;
  2. using Microsoft.SemanticKernel.AI.ChatCompletion;
  3. namespace LLamaSharp.SemanticKernel;
  4. public static class ExtensionMethods
  5. {
  6. public static global::LLama.Common.ChatHistory ToLLamaSharpChatHistory(this ChatHistory chatHistory)
  7. {
  8. if (chatHistory is null)
  9. {
  10. throw new ArgumentNullException(nameof(chatHistory));
  11. }
  12. var history = new global::LLama.Common.ChatHistory();
  13. foreach (var chat in chatHistory)
  14. {
  15. var role = Enum.TryParse<global::LLama.Common.AuthorRole>(chat.Role.Label, true, out var _role) ? _role : global::LLama.Common.AuthorRole.Unknown;
  16. history.AddMessage(role, chat.Content);
  17. }
  18. return history;
  19. }
  20. /// <summary>
  21. /// Convert ChatRequestSettings to LLamaSharp InferenceParams
  22. /// </summary>
  23. /// <param name="requestSettings"></param>
  24. /// <returns></returns>
  25. internal static global::LLama.Common.InferenceParams ToLLamaSharpInferenceParams(this ChatRequestSettings requestSettings)
  26. {
  27. if (requestSettings is null)
  28. {
  29. throw new ArgumentNullException(nameof(requestSettings));
  30. }
  31. var antiPrompts = new List<string>(requestSettings.StopSequences) { AuthorRole.User.ToString() + ":" };
  32. return new global::LLama.Common.InferenceParams
  33. {
  34. Temperature = (float)requestSettings.Temperature,
  35. TopP = (float)requestSettings.TopP,
  36. PresencePenalty = (float)requestSettings.PresencePenalty,
  37. FrequencyPenalty = (float)requestSettings.FrequencyPenalty,
  38. AntiPrompts = antiPrompts,
  39. MaxTokens = requestSettings.MaxTokens ?? -1
  40. };
  41. }
  42. }