You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

ExtensionMethods.cs 2.7 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172
  1. using Microsoft.SemanticKernel.AI.ChatCompletion;
  2. using Microsoft.SemanticKernel.AI.TextCompletion;
  3. namespace LLamaSharp.SemanticKernel;
  4. internal static class ExtensionMethods
  5. {
  6. internal static global::LLama.Common.ChatHistory ToLLamaSharpChatHistory(this ChatHistory chatHistory)
  7. {
  8. if (chatHistory is null)
  9. {
  10. throw new ArgumentNullException(nameof(chatHistory));
  11. }
  12. var history = new global::LLama.Common.ChatHistory();
  13. foreach (var chat in chatHistory)
  14. {
  15. var role = Enum.TryParse<global::LLama.Common.AuthorRole>(chat.Role.Label, out var _role) ? _role : global::LLama.Common.AuthorRole.Unknown;
  16. history.AddMessage(role, chat.Content);
  17. }
  18. return history;
  19. }
  20. /// <summary>
  21. /// Convert ChatRequestSettings to LLamaSharp InferenceParams
  22. /// </summary>
  23. /// <param name="requestSettings"></param>
  24. /// <returns></returns>
  25. internal static global::LLama.Common.InferenceParams ToLLamaSharpInferenceParams(this ChatRequestSettings requestSettings)
  26. {
  27. if (requestSettings is null)
  28. {
  29. throw new ArgumentNullException(nameof(requestSettings));
  30. }
  31. var antiPrompts = new List<string>(requestSettings.StopSequences) { AuthorRole.User.ToString() + ":" };
  32. return new global::LLama.Common.InferenceParams
  33. {
  34. Temperature = (float)requestSettings.Temperature,
  35. TopP = (float)requestSettings.TopP,
  36. PresencePenalty = (float)requestSettings.PresencePenalty,
  37. FrequencyPenalty = (float)requestSettings.FrequencyPenalty,
  38. AntiPrompts = antiPrompts,
  39. MaxTokens = requestSettings.MaxTokens ?? -1
  40. };
  41. }
  42. /// <summary>
  43. /// Convert CompleteRequestSettings to LLamaSharp InferenceParams
  44. /// </summary>
  45. /// <param name="requestSettings"></param>
  46. /// <returns></returns>
  47. internal static global::LLama.Common.InferenceParams ToLLamaSharpInferenceParams(this CompleteRequestSettings requestSettings)
  48. {
  49. if (requestSettings is null)
  50. {
  51. throw new ArgumentNullException(nameof(requestSettings));
  52. }
  53. return new global::LLama.Common.InferenceParams
  54. {
  55. Temperature = (float)requestSettings.Temperature,
  56. TopP = (float)requestSettings.TopP,
  57. PresencePenalty = (float)requestSettings.PresencePenalty,
  58. FrequencyPenalty = (float)requestSettings.FrequencyPenalty,
  59. AntiPrompts = requestSettings.StopSequences,
  60. MaxTokens = requestSettings.MaxTokens ?? -1
  61. };
  62. }
  63. }