You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaContext.cs 19 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527
  1. using LLama.Exceptions;
  2. using LLama.Native;
  3. using System;
  4. using System.Collections.Generic;
  5. using System.Linq;
  6. using System.Text;
  7. using System.IO;
  8. using System.IO.MemoryMappedFiles;
  9. using LLama.Common;
  10. using System.Runtime.InteropServices;
  11. using LLama.Extensions;
  12. using LLama.Abstractions;
  13. using Microsoft.Extensions.Logging;
  14. namespace LLama
  15. {
  16. using llama_token = Int32;
  17. /// <summary>
  18. /// A llama_context, which holds all the context required to interact with a model
  19. /// </summary>
  20. public sealed class LLamaContext
  21. : IDisposable
  22. {
  23. private readonly ILogger? _logger;
  24. private readonly Encoding _encoding;
  25. private readonly SafeLLamaContextHandle _ctx;
  26. /// <summary>
  27. /// Total number of tokens in vocabulary of this model
  28. /// </summary>
  29. public int VocabCount => _ctx.VocabCount;
  30. /// <summary>
  31. /// Total number of tokens in the context
  32. /// </summary>
  33. public int ContextSize => _ctx.ContextSize;
  34. /// <summary>
  35. /// Dimension of embedding vectors
  36. /// </summary>
  37. public int EmbeddingSize => _ctx.EmbeddingSize;
  38. /// <summary>
  39. /// Get the number of tokens in the KV Cache for this context
  40. /// </summary>
  41. public int KVCacheTokenCount => _ctx.KVCacheTokenCount;
  42. /// <summary>
  43. /// The model params set for this model.
  44. /// </summary>
  45. public IModelParams Params { get; set; }
  46. /// <summary>
  47. /// The native handle, which is used to be passed to the native APIs
  48. /// </summary>
  49. /// <remarks>Be careful how you use this!</remarks>
  50. public SafeLLamaContextHandle NativeHandle => _ctx;
  51. /// <summary>
  52. /// The encoding set for this model to deal with text input.
  53. /// </summary>
  54. public Encoding Encoding => _encoding;
  55. /// <summary>
  56. ///
  57. /// </summary>
  58. /// <param name="params">Model params.</param>
  59. /// <param name="logger">The logger.</param>
  60. [Obsolete("Use the LLamaWeights.CreateContext instead")]
  61. public LLamaContext(IModelParams @params, ILogger? logger = null)
  62. {
  63. Params = @params;
  64. _logger = logger;
  65. _encoding = @params.Encoding;
  66. _logger?.LogInformation($"[LLamaContext] Initializing LLama model with params: {this.Params}");
  67. _ctx = Utils.InitLLamaContextFromModelParams(Params);
  68. }
  69. internal LLamaContext(SafeLLamaContextHandle nativeContext, IModelParams @params, ILogger? logger = null)
  70. {
  71. Params = @params;
  72. _logger = logger;
  73. _encoding = @params.Encoding;
  74. _ctx = nativeContext;
  75. }
  76. /// <summary>
  77. /// Create a new LLamaContext for the given LLamaWeights
  78. /// </summary>
  79. /// <param name="model"></param>
  80. /// <param name="params"></param>
  81. /// <param name="logger"></param>
  82. /// <exception cref="ObjectDisposedException"></exception>
  83. public LLamaContext(LLamaWeights model, IModelParams @params, ILogger? logger = null)
  84. {
  85. if (model.NativeHandle.IsClosed)
  86. throw new ObjectDisposedException("Cannot create context, model weights have been disposed");
  87. Params = @params;
  88. _logger = logger;
  89. _encoding = @params.Encoding;
  90. using var pin = @params.ToLlamaContextParams(out var lparams);
  91. _ctx = SafeLLamaContextHandle.Create(model.NativeHandle, lparams);
  92. }
  93. /// <summary>
  94. /// Create a copy of the current state of this context
  95. /// </summary>
  96. /// <returns></returns>
  97. public LLamaContext Clone()
  98. {
  99. using var pin = Params.ToLlamaContextParams(out var lparams);
  100. var clone = _ctx.Clone(lparams);
  101. return new LLamaContext(clone, Params);
  102. }
  103. /// <summary>
  104. /// Tokenize a string.
  105. /// </summary>
  106. /// <param name="text"></param>
  107. /// <param name="addBos">Whether to add a bos to the text.</param>
  108. /// <returns></returns>
  109. public llama_token[] Tokenize(string text, bool addBos = true)
  110. {
  111. return _ctx.Tokenize(text, addBos, _encoding);
  112. }
  113. /// <summary>
  114. /// Detokenize the tokens to text.
  115. /// </summary>
  116. /// <param name="tokens"></param>
  117. /// <returns></returns>
  118. public string DeTokenize(IEnumerable<llama_token> tokens)
  119. {
  120. var sb = new StringBuilder();
  121. foreach (var token in tokens)
  122. _ctx.TokenToString(token, _encoding, sb);
  123. return sb.ToString();
  124. }
  125. /// <summary>
  126. /// Save the state to specified path.
  127. /// </summary>
  128. /// <param name="filename"></param>
  129. public void SaveState(string filename)
  130. {
  131. // Delete that file before overwriting it
  132. if (File.Exists(filename))
  133. File.Delete(filename);
  134. // Estimate size of state to write to disk, this is always equal to or greater than the actual size
  135. var estimatedStateSize = (long)NativeApi.llama_get_state_size(_ctx);
  136. // Map the file and write the bytes directly to it. This saves copying the bytes into a C# array
  137. long writtenBytes;
  138. using (var file = MemoryMappedFile.CreateFromFile(filename, FileMode.Create, null, estimatedStateSize))
  139. using (var view = file.CreateViewAccessor(0, estimatedStateSize))
  140. {
  141. unsafe
  142. {
  143. byte* ptr = null;
  144. view.SafeMemoryMappedViewHandle.AcquirePointer(ref ptr);
  145. writtenBytes = (long)NativeApi.llama_copy_state_data(_ctx, ptr);
  146. view.SafeMemoryMappedViewHandle.ReleasePointer();
  147. }
  148. }
  149. // Truncate the file to the actual size of data that was written
  150. using (var fileStream = new FileStream(filename, FileMode.Open))
  151. fileStream.SetLength(writtenBytes);
  152. }
  153. /// <summary>
  154. /// Get the state data as a byte array.
  155. /// </summary>
  156. /// <returns></returns>
  157. [Obsolete("Use `GetState` instead, this supports larger states (over 2GB)")]
  158. public byte[] GetStateData()
  159. {
  160. var stateSize = NativeApi.llama_get_state_size(_ctx);
  161. byte[] stateMemory = new byte[stateSize];
  162. NativeApi.llama_copy_state_data(_ctx, stateMemory);
  163. return stateMemory;
  164. }
  165. /// <summary>
  166. /// Get the state data as an opaque handle
  167. /// </summary>
  168. /// <returns></returns>
  169. public State GetState()
  170. {
  171. var stateSize = _ctx.GetStateSize();
  172. unsafe
  173. {
  174. // Allocate a chunk of memory large enough to hold the entire state
  175. var memory = Marshal.AllocHGlobal((nint)stateSize);
  176. try
  177. {
  178. // Copy the state data into memory, discover the actual size required
  179. var actualSize = _ctx.GetState(memory, stateSize);
  180. // Shrink to size
  181. memory = Marshal.ReAllocHGlobal(memory, (nint)actualSize);
  182. // Wrap memory in a state and return it
  183. memory = IntPtr.Zero;
  184. return new State(memory);
  185. }
  186. finally
  187. {
  188. if (memory != IntPtr.Zero)
  189. Marshal.FreeHGlobal(memory);
  190. }
  191. }
  192. }
  193. /// <summary>
  194. /// Load the state from specified path.
  195. /// </summary>
  196. /// <param name="filename"></param>
  197. /// <exception cref="RuntimeError"></exception>
  198. public void LoadState(string filename)
  199. {
  200. // Map state file into memory and pass that pointer directly to `llama_set_state_data` to load from
  201. using (var file = MemoryMappedFile.CreateFromFile(filename, FileMode.Open, null))
  202. using (var view = file.CreateViewAccessor())
  203. {
  204. unsafe
  205. {
  206. byte* ptr = null;
  207. view.SafeMemoryMappedViewHandle.AcquirePointer(ref ptr);
  208. NativeApi.llama_set_state_data(_ctx, ptr);
  209. view.SafeMemoryMappedViewHandle.ReleasePointer();
  210. }
  211. }
  212. }
  213. /// <summary>
  214. /// Load the state from memory.
  215. /// </summary>
  216. /// <param name="stateData"></param>
  217. /// <exception cref="RuntimeError"></exception>
  218. public void LoadState(byte[] stateData)
  219. {
  220. int stateSize = (int)NativeApi.llama_get_state_size(_ctx);
  221. if (stateData.Length > stateSize)
  222. {
  223. throw new RuntimeError("Failed to validate state size.");
  224. }
  225. NativeApi.llama_set_state_data(_ctx, stateData);
  226. }
  227. /// <summary>
  228. /// Load the state from memory.
  229. /// </summary>
  230. /// <param name="state"></param>
  231. /// <exception cref="RuntimeError"></exception>
  232. public void LoadState(State state)
  233. {
  234. unsafe
  235. {
  236. _ctx.SetState((byte*)state.DangerousGetHandle().ToPointer());
  237. }
  238. }
  239. /// <summary>
  240. /// Perform the sampling. Please don't use it unless you fully know what it does.
  241. /// </summary>
  242. /// <param name="candidates"></param>
  243. /// <param name="mirostat_mu"></param>
  244. /// <param name="temperature"></param>
  245. /// <param name="mirostat"></param>
  246. /// <param name="mirostatTau"></param>
  247. /// <param name="mirostatEta"></param>
  248. /// <param name="topK"></param>
  249. /// <param name="topP"></param>
  250. /// <param name="tfsZ"></param>
  251. /// <param name="typicalP"></param>
  252. /// <param name="grammar"></param>
  253. /// <returns></returns>
  254. public llama_token Sample(LLamaTokenDataArray candidates, ref float? mirostat_mu, float temperature = 0.8f, MirostatType mirostat = MirostatType.Disable,
  255. float mirostatTau = 5.0f, float mirostatEta = 0.1f, int topK = 40, float topP = 0.95f, float tfsZ = 1.0f, float typicalP = 1.0f,
  256. SafeLLamaGrammarHandle? grammar = null)
  257. {
  258. llama_token id;
  259. if (grammar != null)
  260. {
  261. SamplingApi.llama_sample_grammar(_ctx, candidates, grammar);
  262. }
  263. if (temperature <= 0)
  264. {
  265. // Greedy sampling
  266. id = SamplingApi.llama_sample_token_greedy(_ctx, candidates);
  267. }
  268. else
  269. {
  270. var mu = mirostat_mu ?? (2 * mirostatTau);
  271. {
  272. if (mirostat == MirostatType.Mirostat)
  273. {
  274. const int mirostat_m = 100;
  275. SamplingApi.llama_sample_temperature(_ctx, candidates, temperature);
  276. id = SamplingApi.llama_sample_token_mirostat(_ctx, candidates, mirostatTau, mirostatEta, mirostat_m, ref mu);
  277. }
  278. else if (mirostat == MirostatType.Mirostat2)
  279. {
  280. SamplingApi.llama_sample_temperature(_ctx, candidates, temperature);
  281. id = SamplingApi.llama_sample_token_mirostat_v2(_ctx, candidates, mirostatTau, mirostatEta, ref mu);
  282. }
  283. else
  284. {
  285. // Temperature sampling
  286. SamplingApi.llama_sample_top_k(_ctx, candidates, topK, 1);
  287. SamplingApi.llama_sample_tail_free(_ctx, candidates, tfsZ, 1);
  288. SamplingApi.llama_sample_typical(_ctx, candidates, typicalP, 1);
  289. SamplingApi.llama_sample_top_p(_ctx, candidates, topP, 1);
  290. SamplingApi.llama_sample_temperature(_ctx, candidates, temperature);
  291. id = SamplingApi.llama_sample_token(_ctx, candidates);
  292. }
  293. }
  294. mirostat_mu = mu;
  295. }
  296. if (grammar != null)
  297. {
  298. NativeApi.llama_grammar_accept_token(_ctx, grammar, id);
  299. }
  300. return id;
  301. }
  302. /// <summary>
  303. /// Apply the penalty for the tokens. Please don't use it unless you fully know what it does.
  304. /// </summary>
  305. /// <param name="lastTokens"></param>
  306. /// <param name="logitBias"></param>
  307. /// <param name="repeatLastTokensCount"></param>
  308. /// <param name="repeatPenalty"></param>
  309. /// <param name="alphaFrequency"></param>
  310. /// <param name="alphaPresence"></param>
  311. /// <param name="penalizeNL"></param>
  312. /// <returns></returns>
  313. public LLamaTokenDataArray ApplyPenalty(IEnumerable<llama_token> lastTokens, Dictionary<llama_token, float>? logitBias = null,
  314. int repeatLastTokensCount = 64, float repeatPenalty = 1.1f, float alphaFrequency = .0f, float alphaPresence = .0f,
  315. bool penalizeNL = true)
  316. {
  317. var logits = _ctx.GetLogits();
  318. // Apply params.logit_bias map
  319. if (logitBias is not null)
  320. {
  321. foreach (var (key, value) in logitBias)
  322. logits[key] += value;
  323. }
  324. // Save the newline logit value
  325. var nl_token = NativeApi.llama_token_nl(_ctx);
  326. var nl_logit = logits[nl_token];
  327. // Convert logits into token candidates
  328. var candidates_p = LLamaTokenDataArray.Create(logits);
  329. // Extract most recently returned tokens
  330. var last_n_repeat = Math.Min(ContextSize, repeatLastTokensCount);
  331. var last_n_array = lastTokens.TakeLast(last_n_repeat).ToArray();
  332. // Apply penalties to candidates
  333. SamplingApi.llama_sample_repetition_penalty(_ctx, candidates_p, last_n_array, repeatPenalty);
  334. SamplingApi.llama_sample_frequency_and_presence_penalties(_ctx, candidates_p, last_n_array, alphaFrequency, alphaPresence);
  335. // Restore newline token logit value if necessary
  336. if (!penalizeNL)
  337. {
  338. var candidatesSpan = candidates_p.data.Span;
  339. for (var i = 0; i < candidates_p.data.Length; i++)
  340. {
  341. ref var item = ref candidatesSpan[i];
  342. if (item.id == nl_token)
  343. item.logit = nl_logit;
  344. }
  345. candidates_p.sorted = false;
  346. }
  347. return candidates_p;
  348. }
  349. #region eval overloads
  350. /// <summary>
  351. ///
  352. /// </summary>
  353. /// <param name="tokens"></param>
  354. /// <param name="pastTokensCount"></param>
  355. /// <returns>The updated `pastTokensCount`.</returns>
  356. /// <exception cref="RuntimeError"></exception>
  357. public int Eval(llama_token[] tokens, llama_token pastTokensCount)
  358. {
  359. return Eval(tokens.AsSpan(), pastTokensCount);
  360. }
  361. /// <summary>
  362. ///
  363. /// </summary>
  364. /// <param name="tokens"></param>
  365. /// <param name="pastTokensCount"></param>
  366. /// <returns>The updated `pastTokensCount`.</returns>
  367. /// <exception cref="RuntimeError"></exception>
  368. public int Eval(List<llama_token> tokens, llama_token pastTokensCount)
  369. {
  370. #if NET5_0_OR_GREATER
  371. var span = CollectionsMarshal.AsSpan(tokens);
  372. return Eval(span, pastTokensCount);
  373. #else
  374. // on netstandard2.0 we can't use CollectionsMarshal to get directly at the internal memory of
  375. // the list. Instead rent an array and copy the data into it. This avoids an allocation, but can't
  376. // avoid the copying.
  377. var rented = System.Buffers.ArrayPool<llama_token>.Shared.Rent(tokens.Count);
  378. try
  379. {
  380. tokens.CopyTo(rented, 0);
  381. return Eval(rented, pastTokensCount);
  382. }
  383. finally
  384. {
  385. System.Buffers.ArrayPool<llama_token>.Shared.Return(rented);
  386. }
  387. #endif
  388. }
  389. /// <summary>
  390. ///
  391. /// </summary>
  392. /// <param name="tokens"></param>
  393. /// <param name="pastTokensCount"></param>
  394. /// <returns>The updated `pastTokensCount`.</returns>
  395. /// <exception cref="RuntimeError"></exception>
  396. public int Eval(ReadOnlyMemory<llama_token> tokens, llama_token pastTokensCount)
  397. {
  398. return Eval(tokens.Span, pastTokensCount);
  399. }
  400. /// <summary>
  401. ///
  402. /// </summary>
  403. /// <param name="tokens"></param>
  404. /// <param name="pastTokensCount"></param>
  405. /// <returns>The updated `pastTokensCount`.</returns>
  406. /// <exception cref="RuntimeError"></exception>
  407. public int Eval(ReadOnlySpan<llama_token> tokens, llama_token pastTokensCount)
  408. {
  409. var total = tokens.Length;
  410. for(var i = 0; i < total; i += Params.BatchSize)
  411. {
  412. var n_eval = total - i;
  413. if (n_eval > Params.BatchSize)
  414. {
  415. n_eval = Params.BatchSize;
  416. }
  417. if (!_ctx.Eval(tokens.Slice(i, n_eval), pastTokensCount, Params.Threads))
  418. {
  419. _logger?.LogError($"[LLamaContext] Failed to eval.");
  420. throw new RuntimeError("Failed to eval.");
  421. }
  422. pastTokensCount += n_eval;
  423. }
  424. return pastTokensCount;
  425. }
  426. #endregion
  427. /// <summary>
  428. /// Convert a token into a string
  429. /// </summary>
  430. /// <param name="token"></param>
  431. /// <returns></returns>
  432. public string TokenToString(llama_token token)
  433. {
  434. return NativeHandle.TokenToString(token, Encoding);
  435. }
  436. /// <summary>
  437. /// Append a single token to a string builder
  438. /// </summary>
  439. /// <param name="token">Token to decode</param>
  440. /// <param name="dest">string builder to append the result to</param>
  441. public void TokenToString(llama_token token, StringBuilder dest)
  442. {
  443. NativeHandle.TokenToString(token, Encoding, dest);
  444. }
  445. /// <inheritdoc />
  446. public void Dispose()
  447. {
  448. _ctx.Dispose();
  449. }
  450. /// <summary>
  451. /// The state of this model, which can be reloaded later
  452. /// </summary>
  453. public class State
  454. : SafeLLamaHandleBase
  455. {
  456. internal State(IntPtr memory)
  457. : base(memory)
  458. {
  459. }
  460. /// <inheritdoc />
  461. protected override bool ReleaseHandle()
  462. {
  463. Marshal.FreeHGlobal(handle);
  464. return true;
  465. }
  466. }
  467. }
  468. }