You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LlamaSharpTextGeneration.cs 4.7 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106
  1. using LLama;
  2. using LLama.Abstractions;
  3. using LLama.Common;
  4. using Microsoft.KernelMemory.AI;
  5. using System;
  6. using System.Collections.Generic;
  7. using System.Linq;
  8. using System.Text;
  9. using System.Threading.Tasks;
  10. namespace LLamaSharp.KernelMemory
  11. {
  12. /// <summary>
  13. /// Provides text generation for LLamaSharp.
  14. /// </summary>
  15. public class LlamaSharpTextGeneration : ITextGeneration, IDisposable
  16. {
  17. private readonly LLamaWeights _weights;
  18. private readonly StatelessExecutor _executor;
  19. private readonly LLamaContext _context;
  20. private readonly InferenceParams? _defaultInferenceParams;
  21. private bool _ownsContext = false;
  22. private bool _ownsWeights = false;
  23. /// <summary>
  24. /// Initializes a new instance of the <see cref="LlamaSharpTextGeneration"/> class.
  25. /// </summary>
  26. /// <param name="config">The configuration for LLamaSharp.</param>
  27. public LlamaSharpTextGeneration(LLamaSharpConfig config)
  28. {
  29. var parameters = new ModelParams(config.ModelPath)
  30. {
  31. ContextSize = config?.ContextSize ?? 2048,
  32. Seed = config?.Seed ?? 0,
  33. GpuLayerCount = config?.GpuLayerCount ?? 20
  34. };
  35. _weights = LLamaWeights.LoadFromFile(parameters);
  36. _context = _weights.CreateContext(parameters);
  37. _executor = new StatelessExecutor(_weights, parameters);
  38. _defaultInferenceParams = config?.DefaultInferenceParams;
  39. _ownsWeights = _ownsContext = true;
  40. }
  41. /// <summary>
  42. /// Initializes a new instance of the <see cref="LlamaSharpTextGeneration"/> class from reused weights, context and executor.
  43. /// If executor is not specified, then a StatelessExecutor will be created with `context.Params`. So far only `StatelessExecutor` is expected.
  44. /// </summary>
  45. /// <param name="weights">A LLamaWeights object.</param>
  46. /// <param name="context">A LLamaContext object.</param>
  47. /// <param name="executor">An executor. Currently only StatelessExecutor is expected.</param>
  48. public LlamaSharpTextGeneration(LLamaWeights weights, LLamaContext context, StatelessExecutor? executor = null, InferenceParams? inferenceParams = null)
  49. {
  50. _weights = weights;
  51. _context = context;
  52. _executor = executor ?? new StatelessExecutor(_weights, _context.Params);
  53. _defaultInferenceParams = inferenceParams;
  54. }
  55. /// <inheritdoc/>
  56. public void Dispose()
  57. {
  58. if (_ownsWeights)
  59. {
  60. _weights?.Dispose();
  61. }
  62. if (_ownsContext)
  63. {
  64. _context.Dispose();
  65. }
  66. }
  67. /// <inheritdoc/>
  68. public IAsyncEnumerable<string> GenerateTextAsync(string prompt, TextGenerationOptions options, CancellationToken cancellationToken = default)
  69. {
  70. return _executor.InferAsync(prompt, OptionsToParams(options, this._defaultInferenceParams), cancellationToken: cancellationToken);
  71. }
  72. private static InferenceParams OptionsToParams(TextGenerationOptions options, InferenceParams? defaultParams)
  73. {
  74. if (defaultParams != null)
  75. {
  76. return defaultParams with
  77. {
  78. AntiPrompts = defaultParams.AntiPrompts.Concat(options.StopSequences).ToList().AsReadOnly(),
  79. Temperature = options.Temperature == defaultParams.Temperature ? defaultParams.Temperature : (float)options.Temperature,
  80. MaxTokens = options.MaxTokens ?? defaultParams.MaxTokens,
  81. FrequencyPenalty = options.FrequencyPenalty == defaultParams.FrequencyPenalty ? defaultParams.FrequencyPenalty : (float)options.FrequencyPenalty,
  82. PresencePenalty = options.PresencePenalty == defaultParams.PresencePenalty ? defaultParams.PresencePenalty : (float)options.PresencePenalty,
  83. TopP = options.TopP == defaultParams.TopP ? defaultParams.TopP : (float)options.TopP
  84. };
  85. }
  86. else
  87. {
  88. return new InferenceParams()
  89. {
  90. AntiPrompts = options.StopSequences.ToList().AsReadOnly(),
  91. Temperature = (float)options.Temperature,
  92. MaxTokens = options.MaxTokens ?? 1024,
  93. FrequencyPenalty = (float)options.FrequencyPenalty,
  94. PresencePenalty = (float)options.PresencePenalty,
  95. TopP = (float)options.TopP,
  96. };
  97. }
  98. }
  99. }
  100. }