You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LlamaSharpTextGenerator.cs 5.0 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113
  1. using LLama;
  2. using LLama.Abstractions;
  3. using LLama.Common;
  4. using Microsoft.KernelMemory.AI;
  5. using System;
  6. using System.Collections.Generic;
  7. using System.Linq;
  8. using System.Text;
  9. using System.Threading.Tasks;
  10. namespace LLamaSharp.KernelMemory
  11. {
  12. /// <summary>
  13. /// Provides text generation for LLamaSharp.
  14. /// </summary>
  15. public class LlamaSharpTextGenerator : ITextGenerator, IDisposable
  16. {
  17. private readonly LLamaWeights _weights;
  18. private readonly StatelessExecutor _executor;
  19. private readonly LLamaContext _context;
  20. private readonly InferenceParams? _defaultInferenceParams;
  21. private bool _ownsContext = false;
  22. private bool _ownsWeights = false;
  23. public int MaxTokenTotal { get; }
  24. /// <summary>
  25. /// Initializes a new instance of the <see cref="LlamaSharpTextGenerator"/> class.
  26. /// </summary>
  27. /// <param name="config">The configuration for LLamaSharp.</param>
  28. public LlamaSharpTextGenerator(LLamaSharpConfig config)
  29. {
  30. var parameters = new ModelParams(config.ModelPath)
  31. {
  32. ContextSize = config?.ContextSize ?? 2048,
  33. Seed = config?.Seed ?? 0,
  34. GpuLayerCount = config?.GpuLayerCount ?? 20
  35. };
  36. _weights = LLamaWeights.LoadFromFile(parameters);
  37. _context = _weights.CreateContext(parameters);
  38. _executor = new StatelessExecutor(_weights, parameters);
  39. _defaultInferenceParams = config?.DefaultInferenceParams;
  40. _ownsWeights = _ownsContext = true;
  41. MaxTokenTotal = (int)parameters.ContextSize;
  42. }
  43. /// <summary>
  44. /// Initializes a new instance of the <see cref="LlamaSharpTextGenerator"/> class from reused weights, context and executor.
  45. /// If executor is not specified, then a StatelessExecutor will be created with `context.Params`. So far only `StatelessExecutor` is expected.
  46. /// </summary>
  47. /// <param name="weights">A LLamaWeights object.</param>
  48. /// <param name="context">A LLamaContext object.</param>
  49. /// <param name="executor">An executor. Currently only StatelessExecutor is expected.</param>
  50. public LlamaSharpTextGenerator(LLamaWeights weights, LLamaContext context, StatelessExecutor? executor = null, InferenceParams? inferenceParams = null)
  51. {
  52. _weights = weights;
  53. _context = context;
  54. _executor = executor ?? new StatelessExecutor(_weights, _context.Params);
  55. _defaultInferenceParams = inferenceParams;
  56. MaxTokenTotal = (int)_context.Params.ContextSize;
  57. }
  58. /// <inheritdoc/>
  59. public void Dispose()
  60. {
  61. if (_ownsWeights)
  62. {
  63. _weights?.Dispose();
  64. }
  65. if (_ownsContext)
  66. {
  67. _context.Dispose();
  68. }
  69. }
  70. /// <inheritdoc/>
  71. public IAsyncEnumerable<string> GenerateTextAsync(string prompt, TextGenerationOptions options, CancellationToken cancellationToken = default)
  72. {
  73. return _executor.InferAsync(prompt, OptionsToParams(options, this._defaultInferenceParams), cancellationToken: cancellationToken);
  74. }
  75. private static InferenceParams OptionsToParams(TextGenerationOptions options, InferenceParams? defaultParams)
  76. {
  77. if (defaultParams != null)
  78. {
  79. return defaultParams with
  80. {
  81. AntiPrompts = defaultParams.AntiPrompts.Concat(options.StopSequences).ToList().AsReadOnly(),
  82. Temperature = options.Temperature == defaultParams.Temperature ? defaultParams.Temperature : (float)options.Temperature,
  83. MaxTokens = options.MaxTokens ?? defaultParams.MaxTokens,
  84. FrequencyPenalty = options.FrequencyPenalty == defaultParams.FrequencyPenalty ? defaultParams.FrequencyPenalty : (float)options.FrequencyPenalty,
  85. PresencePenalty = options.PresencePenalty == defaultParams.PresencePenalty ? defaultParams.PresencePenalty : (float)options.PresencePenalty,
  86. TopP = options.TopP == defaultParams.TopP ? defaultParams.TopP : (float)options.TopP
  87. };
  88. }
  89. else
  90. {
  91. return new InferenceParams()
  92. {
  93. AntiPrompts = options.StopSequences.ToList().AsReadOnly(),
  94. Temperature = (float)options.Temperature,
  95. MaxTokens = options.MaxTokens ?? 1024,
  96. FrequencyPenalty = (float)options.FrequencyPenalty,
  97. PresencePenalty = (float)options.PresencePenalty,
  98. TopP = (float)options.TopP,
  99. };
  100. }
  101. }
  102. /// <inheritdoc/>
  103. public int CountTokens(string text) => _context.Tokenize(text).Length;
  104. }
  105. }