You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaContext.cs 17 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464
  1. using LLama.Exceptions;
  2. using LLama.Native;
  3. using System;
  4. using System.Collections.Generic;
  5. using System.Linq;
  6. using System.Text;
  7. using System.IO;
  8. using System.IO.MemoryMappedFiles;
  9. using LLama.Common;
  10. using System.Runtime.InteropServices;
  11. using System.Threading.Tasks;
  12. using LLama.Extensions;
  13. using LLama.Abstractions;
  14. using LLama.Sampling;
  15. using Microsoft.Extensions.Logging;
  16. using System.Threading;
  17. namespace LLama
  18. {
  19. /// <summary>
  20. /// A llama_context, which holds all the context required to interact with a model
  21. /// </summary>
  22. public sealed class LLamaContext
  23. : IDisposable
  24. {
  25. private readonly ILogger? _logger;
  26. /// <summary>
  27. /// Total number of tokens in vocabulary of this model
  28. /// </summary>
  29. public int VocabCount => NativeHandle.VocabCount;
  30. /// <summary>
  31. /// Total number of tokens in the context
  32. /// </summary>
  33. public uint ContextSize => NativeHandle.ContextSize;
  34. /// <summary>
  35. /// Dimension of embedding vectors
  36. /// </summary>
  37. public int EmbeddingSize => NativeHandle.EmbeddingSize;
  38. /// <summary>
  39. /// The context params set for this context
  40. /// </summary>
  41. public IContextParams Params { get; }
  42. /// <summary>
  43. /// The native handle, which is used to be passed to the native APIs
  44. /// </summary>
  45. /// <remarks>Be careful how you use this!</remarks>
  46. public SafeLLamaContextHandle NativeHandle { get; }
  47. /// <summary>
  48. /// The encoding set for this model to deal with text input.
  49. /// </summary>
  50. public Encoding Encoding { get; }
  51. private uint _generationThreads;
  52. private uint _batchThreads;
  53. /// <summary>
  54. /// Get or set the number of threads to use for generation
  55. /// </summary>
  56. public uint GenerationThreads
  57. {
  58. get => _generationThreads;
  59. set
  60. {
  61. _generationThreads = value;
  62. NativeHandle.SetThreads(_generationThreads, _batchThreads);
  63. }
  64. }
  65. /// <summary>
  66. /// Get or set the number of threads to use for batch processing
  67. /// </summary>
  68. public uint BatchThreads
  69. {
  70. get => _batchThreads;
  71. set
  72. {
  73. _batchThreads = value;
  74. NativeHandle.SetThreads(_generationThreads, _batchThreads);
  75. }
  76. }
  77. /// <summary>
  78. /// Get the maximum batch size for this context
  79. /// </summary>
  80. public uint BatchSize => NativeHandle.BatchSize;
  81. /// <summary>
  82. /// Create a new LLamaContext for the given LLamaWeights
  83. /// </summary>
  84. /// <param name="model"></param>
  85. /// <param name="params"></param>
  86. /// <param name="logger"></param>
  87. /// <exception cref="ObjectDisposedException"></exception>
  88. public LLamaContext(LLamaWeights model, IContextParams @params, ILogger? logger = null)
  89. {
  90. if (model.NativeHandle.IsClosed)
  91. throw new ObjectDisposedException("Cannot create context, model weights have been disposed");
  92. Params = @params;
  93. _logger = logger;
  94. Encoding = @params.Encoding;
  95. @params.ToLlamaContextParams(out var lparams);
  96. NativeHandle = SafeLLamaContextHandle.Create(model.NativeHandle, lparams);
  97. // It's not possible to get these values from llama.cpp, store a copy of them here.
  98. _generationThreads = lparams.n_threads;
  99. _batchThreads = lparams.n_threads_batch;
  100. }
  101. /// <summary>
  102. /// Set the seed for the RNG
  103. /// </summary>
  104. /// <param name="seed"></param>
  105. public void SetSeed(uint seed)
  106. {
  107. NativeHandle.SetSeed(seed);
  108. }
  109. /// <summary>
  110. /// Tokenize a string.
  111. /// </summary>
  112. /// <param name="text"></param>
  113. /// <param name="addBos">Whether to add a bos to the text.</param>
  114. /// <param name="special">Allow tokenizing special and/or control tokens which otherwise are not exposed and treated as plaintext.</param>
  115. /// <returns></returns>
  116. public LLamaToken[] Tokenize(string text, bool addBos = true, bool special = false)
  117. {
  118. return NativeHandle.Tokenize(text, addBos, special, Encoding);
  119. }
  120. /// <summary>
  121. /// Detokenize the tokens to text.
  122. /// </summary>
  123. /// <param name="tokens"></param>
  124. /// <returns></returns>
  125. [Obsolete("Use a `StreamingTokenDecoder` instead")]
  126. public string DeTokenize(IReadOnlyList<LLamaToken> tokens)
  127. {
  128. // Do **not** use this method as an example of how to correctly use the StreamingTokenDecoder!
  129. // It should be kept around for the entire time you are decoding one stream of tokens.
  130. var decoder = new StreamingTokenDecoder(this);
  131. decoder.AddRange(tokens);
  132. return decoder.Read();
  133. }
  134. /// <summary>
  135. /// Save the state to specified path.
  136. /// </summary>
  137. /// <param name="filename"></param>
  138. public void SaveState(string filename)
  139. {
  140. // Delete that file before overwriting it
  141. if (File.Exists(filename))
  142. File.Delete(filename);
  143. // Estimate size of state to write to disk, this is always equal to or greater than the actual size
  144. var estimatedStateSize = (long)NativeApi.llama_get_state_size(NativeHandle);
  145. // Map the file and write the bytes directly to it. This saves copying the bytes into a C# array
  146. long writtenBytes;
  147. using (var file = MemoryMappedFile.CreateFromFile(filename, FileMode.Create, null, estimatedStateSize))
  148. using (var view = file.CreateViewAccessor(0, estimatedStateSize))
  149. {
  150. unsafe
  151. {
  152. byte* ptr = null;
  153. view.SafeMemoryMappedViewHandle.AcquirePointer(ref ptr);
  154. writtenBytes = (long)NativeApi.llama_copy_state_data(NativeHandle, ptr);
  155. view.SafeMemoryMappedViewHandle.ReleasePointer();
  156. }
  157. }
  158. // Truncate the file to the actual size of data that was written
  159. using (var fileStream = new FileStream(filename, FileMode.Open))
  160. fileStream.SetLength(writtenBytes);
  161. }
  162. /// <summary>
  163. /// Get the state data as an opaque handle, which can be loaded later using <see cref="LoadState(State)"/>
  164. /// </summary>
  165. /// <remarks>Use <see cref="SaveState"/> if you intend to save this state to disk.</remarks>
  166. /// <returns></returns>
  167. public State GetState()
  168. {
  169. var stateSize = NativeHandle.GetStateSize();
  170. // Allocate a chunk of memory large enough to hold the entire state
  171. var memory = Marshal.AllocHGlobal((nint)stateSize);
  172. try
  173. {
  174. // Copy the state data into memory, discover the actual size required
  175. var actualSize = NativeHandle.GetState(memory, stateSize);
  176. // Shrink to size
  177. memory = Marshal.ReAllocHGlobal(memory, (nint)actualSize);
  178. // Wrap memory in a "state"
  179. var state = new State(memory, actualSize);
  180. // Set memory to zero, to prevent it being freed in finally block
  181. memory = IntPtr.Zero;
  182. return state;
  183. }
  184. finally
  185. {
  186. if (memory != IntPtr.Zero)
  187. Marshal.FreeHGlobal(memory);
  188. }
  189. }
  190. /// <summary>
  191. /// Load the state from specified path.
  192. /// </summary>
  193. /// <param name="filename"></param>
  194. /// <exception cref="RuntimeError"></exception>
  195. public void LoadState(string filename)
  196. {
  197. // Map state file into memory and pass that pointer directly to `llama_set_state_data` to load from
  198. using (var file = MemoryMappedFile.CreateFromFile(filename, FileMode.Open, null))
  199. using (var view = file.CreateViewAccessor())
  200. {
  201. unsafe
  202. {
  203. byte* ptr = null;
  204. view.SafeMemoryMappedViewHandle.AcquirePointer(ref ptr);
  205. NativeApi.llama_set_state_data(NativeHandle, ptr);
  206. view.SafeMemoryMappedViewHandle.ReleasePointer();
  207. }
  208. }
  209. }
  210. /// <summary>
  211. /// Load the state from memory.
  212. /// </summary>
  213. /// <param name="state"></param>
  214. /// <exception cref="RuntimeError"></exception>
  215. public void LoadState(State state)
  216. {
  217. unsafe
  218. {
  219. NativeHandle.SetState((byte*)state.DangerousGetHandle().ToPointer());
  220. }
  221. }
  222. /// <summary>
  223. /// Sample a single token from this context, using the given sampling pipeline
  224. /// </summary>
  225. /// <param name="pipeline">The pipeline to use to process the logits and to select a token</param>
  226. /// <param name="lastTokens">The tokens recently returned from the model</param>
  227. /// <returns>The selected token</returns>
  228. public LLamaToken Sample(ISamplingPipeline pipeline, ReadOnlySpan<LLamaToken> lastTokens)
  229. {
  230. var token = pipeline.Sample(NativeHandle, NativeHandle.GetLogits(), lastTokens);
  231. pipeline.Accept(NativeHandle, token);
  232. return token;
  233. }
  234. /// <summary>
  235. /// Perform the sampling. Please don't use it unless you fully know what it does.
  236. /// </summary>
  237. /// <param name="candidates"></param>
  238. /// <param name="mirostat_mu"></param>
  239. /// <param name="temperature"></param>
  240. /// <param name="mirostat"></param>
  241. /// <param name="mirostatTau"></param>
  242. /// <param name="mirostatEta"></param>
  243. /// <param name="topK"></param>
  244. /// <param name="topP"></param>
  245. /// <param name="tfsZ"></param>
  246. /// <param name="typicalP"></param>
  247. /// <param name="grammar"></param>
  248. /// <param name="minP"></param>
  249. /// <returns></returns>
  250. public LLamaToken Sample(LLamaTokenDataArray candidates, ref float? mirostat_mu, float temperature, MirostatType mirostat,
  251. float mirostatTau, float mirostatEta, int topK, float topP, float tfsZ, float typicalP,
  252. SafeLLamaGrammarHandle? grammar, float minP)
  253. {
  254. LLamaToken id;
  255. if (grammar != null)
  256. {
  257. candidates.ApplyGrammar(NativeHandle, grammar);
  258. }
  259. if (temperature <= 0)
  260. {
  261. // Greedy sampling
  262. id = candidates.SampleTokenGreedy(NativeHandle);
  263. }
  264. else
  265. {
  266. var mu = mirostat_mu ?? (2 * mirostatTau);
  267. {
  268. if (mirostat == MirostatType.Mirostat)
  269. {
  270. const int mirostat_m = 100;
  271. candidates.Temperature(NativeHandle, temperature);
  272. id = candidates.SampleTokenMirostat(NativeHandle, mirostatTau, mirostatEta, mirostat_m, ref mu);
  273. }
  274. else if (mirostat == MirostatType.Mirostat2)
  275. {
  276. candidates.Temperature(NativeHandle, temperature);
  277. id = candidates.SampleTokenMirostat2(NativeHandle, mirostatTau, mirostatEta, ref mu);
  278. }
  279. else
  280. {
  281. candidates.TopK(NativeHandle, topK);
  282. candidates.TailFree(NativeHandle, tfsZ);
  283. candidates.LocallyTypical(NativeHandle, typicalP);
  284. candidates.TopP(NativeHandle, topP);
  285. candidates.MinP(NativeHandle, minP);
  286. candidates.Temperature(NativeHandle, temperature);
  287. id = candidates.SampleToken(NativeHandle);
  288. }
  289. }
  290. mirostat_mu = mu;
  291. }
  292. grammar?.AcceptToken(NativeHandle, id);
  293. return id;
  294. }
  295. /// <summary>
  296. /// Apply the penalty for the tokens. Please don't use it unless you fully know what it does.
  297. /// </summary>
  298. /// <param name="logits_i"></param>
  299. /// <param name="lastTokens"></param>
  300. /// <param name="logitBias"></param>
  301. /// <param name="repeatLastTokensCount"></param>
  302. /// <param name="repeatPenalty"></param>
  303. /// <param name="alphaFrequency"></param>
  304. /// <param name="alphaPresence"></param>
  305. /// <param name="penalizeNL"></param>
  306. /// <returns></returns>
  307. public LLamaTokenDataArray ApplyPenalty(int logits_i, IEnumerable<LLamaToken> lastTokens, Dictionary<LLamaToken, float>? logitBias = null,
  308. int repeatLastTokensCount = 64, float repeatPenalty = 1.1f, float alphaFrequency = .0f, float alphaPresence = .0f,
  309. bool penalizeNL = true)
  310. {
  311. var logits = NativeHandle.GetLogitsIth(logits_i);
  312. // Apply params.logit_bias map
  313. if (logitBias is not null)
  314. {
  315. foreach (var (key, value) in logitBias)
  316. logits[(int)key] += value;
  317. }
  318. // Save the newline logit value
  319. var nl_token = NativeApi.llama_token_nl(NativeHandle.ModelHandle);
  320. var nl_logit = logits[(int)nl_token];
  321. // Convert logits into token candidates
  322. var candidates_p = LLamaTokenDataArray.Create(logits);
  323. // Extract most recently returned tokens
  324. var last_n_repeat = Math.Min((int)ContextSize, repeatLastTokensCount);
  325. var last_n_array = lastTokens.TakeLast(last_n_repeat).ToArray();
  326. // Apply penalties to candidates
  327. candidates_p.RepetitionPenalty(NativeHandle, last_n_array, repeatPenalty, alphaFrequency, alphaPresence);
  328. // Restore newline token logit value if necessary
  329. if (!penalizeNL)
  330. {
  331. var candidatesSpan = candidates_p.data.Span;
  332. for (var i = 0; i < candidates_p.data.Length; i++)
  333. {
  334. ref var item = ref candidatesSpan[i];
  335. if (item.id == nl_token)
  336. item.logit = nl_logit;
  337. }
  338. candidates_p.sorted = false;
  339. }
  340. return candidates_p;
  341. }
  342. #region eval overloads
  343. /// <summary>
  344. /// </summary>
  345. /// <param name="batch"></param>
  346. public DecodeResult Decode(LLamaBatch batch)
  347. {
  348. if (batch.TokenCount == 0)
  349. return 0;
  350. if (batch.TokenCount > Params.BatchSize)
  351. throw new ArgumentException("Input contains more tokens than configured batch size", nameof(batch));
  352. return (DecodeResult)NativeHandle.Decode(batch);
  353. }
  354. /// <summary>
  355. /// </summary>
  356. /// <param name="batch"></param>
  357. /// <param name="cancellationToken"></param>
  358. public Task<DecodeResult> DecodeAsync(LLamaBatch batch, CancellationToken cancellationToken = default)
  359. {
  360. return Task.Run(() => Decode(batch), cancellationToken);
  361. }
  362. #endregion
  363. /// <inheritdoc />
  364. public void Dispose()
  365. {
  366. NativeHandle.Dispose();
  367. }
  368. /// <summary>
  369. /// The state of this model, which can be reloaded later
  370. /// </summary>
  371. public class State
  372. : SafeLLamaHandleBase
  373. {
  374. private ulong _size;
  375. internal State(IntPtr memory, ulong size)
  376. : base(memory, true)
  377. {
  378. _size = size;
  379. }
  380. /// <inheritdoc />
  381. protected override bool ReleaseHandle()
  382. {
  383. Marshal.FreeHGlobal(handle);
  384. return true;
  385. }
  386. /// <summary>
  387. /// Convert this state to a byte array
  388. /// </summary>
  389. /// <returns></returns>
  390. public byte[] ToByteArray()
  391. {
  392. var bytes = new byte[_size];
  393. Marshal.Copy(handle, bytes, 0, (int)_size);
  394. return bytes;
  395. }
  396. /// <summary>
  397. /// Load state from a byte array
  398. /// </summary>
  399. /// <param name="bytes"></param>
  400. /// <returns></returns>
  401. public static State FromByteArray(byte[] bytes)
  402. {
  403. var memory = Marshal.AllocHGlobal(bytes.Length);
  404. Marshal.Copy(bytes, 0, memory, bytes.Length);
  405. return new State(memory, (ulong)bytes.Length);
  406. }
  407. }
  408. }
  409. }