You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LlamaSharpConfig.cs 2.4 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576
  1. using LLama.Common;
  2. using LLama.Native;
  3. using System;
  4. using System.Collections.Generic;
  5. using System.Linq;
  6. using System.Text;
  7. using System.Threading.Tasks;
  8. namespace LLamaSharp.KernelMemory
  9. {
  10. /// <summary>
  11. /// Represents the configuration for LLamaSharp. Available properties are `ModelPath`, `ContextSize`, `Seed`, `GpuLayerCount`.
  12. /// </summary>
  13. public class LLamaSharpConfig
  14. {
  15. /// <summary>
  16. /// Initializes a new instance of the <see cref="LLamaSharpConfig"/> class.
  17. /// </summary>
  18. /// <param name="modelPath">The path to the model file.</param>
  19. public LLamaSharpConfig(string modelPath)
  20. {
  21. ModelPath = modelPath;
  22. }
  23. /// <summary>
  24. /// Gets or sets the path to the model file.
  25. /// </summary>
  26. public string ModelPath { get; set; }
  27. /// <summary>
  28. /// Gets or sets the size of the context.
  29. /// </summary>
  30. public uint? ContextSize { get; set; }
  31. /// <summary>
  32. /// Gets or sets the seed value.
  33. /// </summary>
  34. public uint? Seed { get; set; }
  35. /// <summary>
  36. /// Gets or sets the number of GPU layers.
  37. /// </summary>
  38. public int? GpuLayerCount { get; set; }
  39. /// <summary>
  40. /// main_gpu interpretation depends on split_mode:
  41. /// <list type="bullet">
  42. /// <item>
  43. /// <term>None</term>
  44. /// <description>The GPU that is used for the entire mode.</description>
  45. /// </item>
  46. /// <item>
  47. /// <term>Row</term>
  48. /// <description>The GPU that is used for small tensors and intermediate results.</description>
  49. /// </item>
  50. /// <item>
  51. /// <term>Layer</term>
  52. /// <description>Ignored.</description>
  53. /// </item>
  54. /// </list>
  55. /// </summary>
  56. /// <value></value>
  57. public int MainGpu { get; set; } = 0;
  58. /// <summary>
  59. /// How to split the model across multiple GPUs
  60. /// </summary>
  61. /// <value></value>
  62. public GPUSplitMode SplitMode { get; set; } = GPUSplitMode.None;
  63. /// <summary>
  64. /// Set the default inference parameters.
  65. /// </summary>
  66. public InferenceParams? DefaultInferenceParams { get; set; }
  67. }
  68. }