You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaWeights.cs 2.6 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. using System;
  2. using LLama.Abstractions;
  3. using LLama.Extensions;
  4. using LLama.Native;
  5. namespace LLama
  6. {
  7. /// <summary>
  8. /// A set of model weights, loaded into memory.
  9. /// </summary>
  10. public sealed class LLamaWeights
  11. : IDisposable
  12. {
  13. /// <summary>
  14. /// The native handle, which is used in the native APIs
  15. /// </summary>
  16. /// <remarks>Be careful how you use this!</remarks>
  17. public SafeLlamaModelHandle NativeHandle { get; }
  18. /// <summary>
  19. /// Total number of tokens in vocabulary of this model
  20. /// </summary>
  21. public int VocabCount => NativeHandle.VocabCount;
  22. /// <summary>
  23. /// Total number of tokens in the context
  24. /// </summary>
  25. public int ContextSize => NativeHandle.ContextSize;
  26. /// <summary>
  27. /// Get the size of this model in bytes
  28. /// </summary>
  29. public ulong SizeInBytes => NativeHandle.SizeInBytes;
  30. /// <summary>
  31. /// Get the number of parameters in this model
  32. /// </summary>
  33. public ulong ParameterCount => NativeHandle.ParameterCount;
  34. /// <summary>
  35. /// Dimension of embedding vectors
  36. /// </summary>
  37. public int EmbeddingSize => NativeHandle.EmbeddingSize;
  38. internal LLamaWeights(SafeLlamaModelHandle weights)
  39. {
  40. NativeHandle = weights;
  41. }
  42. /// <summary>
  43. /// Load weights into memory
  44. /// </summary>
  45. /// <param name="params"></param>
  46. /// <returns></returns>
  47. public static LLamaWeights LoadFromFile(IModelParams @params)
  48. {
  49. using var pin = @params.ToLlamaModelParams(out var lparams);
  50. var weights = SafeLlamaModelHandle.LoadFromFile(@params.ModelPath, lparams);
  51. foreach (var adapter in @params.LoraAdapters)
  52. {
  53. if (string.IsNullOrEmpty(adapter.Path))
  54. continue;
  55. if (adapter.Scale <= 0)
  56. continue;
  57. weights.ApplyLoraFromFile(adapter.Path, adapter.Scale, @params.LoraBase);
  58. }
  59. return new LLamaWeights(weights);
  60. }
  61. /// <inheritdoc />
  62. public void Dispose()
  63. {
  64. NativeHandle.Dispose();
  65. }
  66. /// <summary>
  67. /// Create a llama_context using this model
  68. /// </summary>
  69. /// <param name="params"></param>
  70. /// <returns></returns>
  71. public LLamaContext CreateContext(IContextParams @params)
  72. {
  73. return new LLamaContext(this, @params);
  74. }
  75. }
  76. }