You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaContext.cs 19 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516
  1. using LLama.Exceptions;
  2. using LLama.Native;
  3. using System;
  4. using System.Buffers;
  5. using System.Collections.Generic;
  6. using System.Linq;
  7. using System.Text;
  8. using System.IO;
  9. using System.IO.MemoryMappedFiles;
  10. using LLama.Common;
  11. using System.Runtime.InteropServices;
  12. using LLama.Extensions;
  13. using Microsoft.Win32.SafeHandles;
  14. using LLama.Abstractions;
  15. namespace LLama
  16. {
  17. using llama_token = Int32;
  18. /// <summary>
  19. /// A llama_context, which holds all the context required to interact with a model
  20. /// </summary>
  21. public class LLamaContext
  22. : IDisposable
  23. {
  24. private readonly ILLamaLogger? _logger;
  25. private readonly Encoding _encoding;
  26. private readonly SafeLLamaContextHandle _ctx;
  27. /// <summary>
  28. /// Total number of tokens in vocabulary of this model
  29. /// </summary>
  30. public int VocabCount => _ctx.VocabCount;
  31. /// <summary>
  32. /// Total number of tokens in the context
  33. /// </summary>
  34. public int ContextSize => _ctx.ContextSize;
  35. /// <summary>
  36. /// Dimension of embedding vectors
  37. /// </summary>
  38. public int EmbeddingSize => _ctx.EmbeddingSize;
  39. /// <summary>
  40. /// The model params set for this model.
  41. /// </summary>
  42. public IModelParams Params { get; set; }
  43. /// <summary>
  44. /// The native handle, which is used to be passed to the native APIs
  45. /// </summary>
  46. /// <remarks>Be careful how you use this!</remarks>
  47. public SafeLLamaContextHandle NativeHandle => _ctx;
  48. /// <summary>
  49. /// The encoding set for this model to deal with text input.
  50. /// </summary>
  51. public Encoding Encoding => _encoding;
  52. /// <summary>
  53. ///
  54. /// </summary>
  55. /// <param name="params">Model params.</param>
  56. /// <param name="encoding">Encoding to deal with text input.</param>
  57. /// <param name="logger">The logger.</param>
  58. [Obsolete("Use the LLamaWeights.CreateContext instead")]
  59. public LLamaContext(IModelParams @params, string encoding = "UTF-8", ILLamaLogger? logger = null)
  60. {
  61. Params = @params;
  62. _logger = logger;
  63. _encoding = Encoding.GetEncoding(encoding);
  64. _logger?.Log(nameof(LLamaContext), $"Initializing LLama model with params: {this.Params}", ILLamaLogger.LogLevel.Info);
  65. _ctx = Utils.InitLLamaContextFromModelParams(Params);
  66. }
  67. internal LLamaContext(SafeLLamaContextHandle nativeContext, IModelParams @params, Encoding encoding, ILLamaLogger? logger = null)
  68. {
  69. Params = @params;
  70. _logger = logger;
  71. _encoding = encoding;
  72. _ctx = nativeContext;
  73. }
  74. /// <summary>
  75. /// Create a new LLamaContext for the given LLamaWeights
  76. /// </summary>
  77. /// <param name="model"></param>
  78. /// <param name="params"></param>
  79. /// <param name="encoding"></param>
  80. /// <param name="logger"></param>
  81. /// <exception cref="ObjectDisposedException"></exception>
  82. public LLamaContext(LLamaWeights model, IModelParams @params, Encoding encoding, ILLamaLogger? logger = null)
  83. {
  84. if (model.NativeHandle.IsClosed)
  85. throw new ObjectDisposedException("Cannot create context, model weights have been disposed");
  86. Params = @params;
  87. _logger = logger;
  88. _encoding = encoding;
  89. using var pin = @params.ToLlamaContextParams(out var lparams);
  90. _ctx = SafeLLamaContextHandle.Create(model.NativeHandle, lparams);
  91. }
  92. /// <summary>
  93. /// Create a copy of the current state of this context
  94. /// </summary>
  95. /// <returns></returns>
  96. public LLamaContext Clone()
  97. {
  98. using var pin = Params.ToLlamaContextParams(out var lparams);
  99. // Create a blank new context for the model
  100. var ctx = new LLamaContext(SafeLLamaContextHandle.Create(NativeHandle.ModelHandle, lparams), Params, _encoding);
  101. // Copy across the state
  102. using var state = GetState();
  103. ctx.LoadState(state);
  104. return ctx;
  105. }
  106. /// <summary>
  107. /// Tokenize a string.
  108. /// </summary>
  109. /// <param name="text"></param>
  110. /// <param name="addBos">Whether to add a bos to the text.</param>
  111. /// <returns></returns>
  112. public llama_token[] Tokenize(string text, bool addBos = true)
  113. {
  114. return _ctx.Tokenize(text, addBos, _encoding);
  115. }
  116. /// <summary>
  117. /// Detokenize the tokens to text.
  118. /// </summary>
  119. /// <param name="tokens"></param>
  120. /// <returns></returns>
  121. public string DeTokenize(IEnumerable<llama_token> tokens)
  122. {
  123. StringBuilder sb = new();
  124. foreach(var token in tokens)
  125. sb.Append(_ctx.TokenToString(token, _encoding));
  126. return sb.ToString();
  127. }
  128. /// <summary>
  129. /// Save the state to specified path.
  130. /// </summary>
  131. /// <param name="filename"></param>
  132. public void SaveState(string filename)
  133. {
  134. // Delete that file before overwriting it
  135. if (File.Exists(filename))
  136. File.Delete(filename);
  137. // Estimate size of state to write to disk, this is always equal to or greater than the actual size
  138. var estimatedStateSize = (long)NativeApi.llama_get_state_size(_ctx);
  139. // Map the file and write the bytes directly to it. This saves copying the bytes into a C# array
  140. long writtenBytes;
  141. using (var file = MemoryMappedFile.CreateFromFile(filename, FileMode.Create, null, estimatedStateSize))
  142. using (var view = file.CreateViewAccessor(0, estimatedStateSize))
  143. {
  144. unsafe
  145. {
  146. byte* ptr = null;
  147. view.SafeMemoryMappedViewHandle.AcquirePointer(ref ptr);
  148. writtenBytes = (long)NativeApi.llama_copy_state_data(_ctx, ptr);
  149. view.SafeMemoryMappedViewHandle.ReleasePointer();
  150. }
  151. }
  152. // Truncate the file to the actual size of data that was written
  153. using (var fileStream = new FileStream(filename, FileMode.Open))
  154. fileStream.SetLength(writtenBytes);
  155. }
  156. /// <summary>
  157. /// Get the state data as a byte array.
  158. /// </summary>
  159. /// <returns></returns>
  160. [Obsolete("Use `GetState` instead, this supports larger states (over 2GB)")]
  161. public byte[] GetStateData()
  162. {
  163. var stateSize = NativeApi.llama_get_state_size(_ctx);
  164. byte[] stateMemory = new byte[stateSize];
  165. NativeApi.llama_copy_state_data(_ctx, stateMemory);
  166. return stateMemory;
  167. }
  168. /// <summary>
  169. /// Get the state data as an opaque handle
  170. /// </summary>
  171. /// <returns></returns>
  172. public State GetState()
  173. {
  174. var stateSize = NativeApi.llama_get_state_size(_ctx);
  175. unsafe
  176. {
  177. var bigMemory = Marshal.AllocHGlobal((nint)stateSize);
  178. var smallMemory = IntPtr.Zero;
  179. try
  180. {
  181. // Copy the state data into "big memory", discover the actual size required
  182. var actualSize = NativeApi.llama_copy_state_data(_ctx, (byte*)bigMemory);
  183. // Allocate a smaller buffer
  184. smallMemory = Marshal.AllocHGlobal((nint)actualSize);
  185. // Copy into the smaller buffer and free the large one to save excess memory usage
  186. Buffer.MemoryCopy(bigMemory.ToPointer(), smallMemory.ToPointer(), actualSize, actualSize);
  187. Marshal.FreeHGlobal(bigMemory);
  188. bigMemory = IntPtr.Zero;
  189. return new State(smallMemory);
  190. }
  191. catch
  192. {
  193. if (bigMemory != IntPtr.Zero)
  194. Marshal.FreeHGlobal(bigMemory);
  195. if (smallMemory != IntPtr.Zero)
  196. Marshal.FreeHGlobal(smallMemory);
  197. throw;
  198. }
  199. }
  200. }
  201. /// <summary>
  202. /// Load the state from specified path.
  203. /// </summary>
  204. /// <param name="filename"></param>
  205. /// <exception cref="RuntimeError"></exception>
  206. public void LoadState(string filename)
  207. {
  208. // Map state file into memory and pass that pointer directly to `llama_set_state_data` to load from
  209. using (var file = MemoryMappedFile.CreateFromFile(filename, FileMode.Open, null))
  210. using (var view = file.CreateViewAccessor())
  211. {
  212. unsafe
  213. {
  214. byte* ptr = null;
  215. view.SafeMemoryMappedViewHandle.AcquirePointer(ref ptr);
  216. NativeApi.llama_set_state_data(_ctx, ptr);
  217. view.SafeMemoryMappedViewHandle.ReleasePointer();
  218. }
  219. }
  220. }
  221. /// <summary>
  222. /// Load the state from memory.
  223. /// </summary>
  224. /// <param name="stateData"></param>
  225. /// <exception cref="RuntimeError"></exception>
  226. public void LoadState(byte[] stateData)
  227. {
  228. int stateSize = (int)NativeApi.llama_get_state_size(_ctx);
  229. if (stateData.Length > stateSize)
  230. {
  231. throw new RuntimeError("Failed to validate state size.");
  232. }
  233. NativeApi.llama_set_state_data(_ctx, stateData);
  234. }
  235. /// <summary>
  236. /// Load the state from memory.
  237. /// </summary>
  238. /// <param name="state"></param>
  239. /// <exception cref="RuntimeError"></exception>
  240. public void LoadState(State state)
  241. {
  242. unsafe
  243. {
  244. NativeApi.llama_set_state_data(_ctx, (byte*)state.DangerousGetHandle().ToPointer());
  245. }
  246. }
  247. /// <summary>
  248. /// Perform the sampling. Please don't use it unless you fully know what it does.
  249. /// </summary>
  250. /// <param name="candidates"></param>
  251. /// <param name="mirostat_mu"></param>
  252. /// <param name="temperature"></param>
  253. /// <param name="mirostat"></param>
  254. /// <param name="mirostatTau"></param>
  255. /// <param name="mirostatEta"></param>
  256. /// <param name="topK"></param>
  257. /// <param name="topP"></param>
  258. /// <param name="tfsZ"></param>
  259. /// <param name="typicalP"></param>
  260. /// <returns></returns>
  261. public llama_token Sample(LLamaTokenDataArray candidates, ref float? mirostat_mu, float temperature = 0.8f, MirostatType mirostat = MirostatType.Disable,
  262. float mirostatTau = 5.0f, float mirostatEta = 0.1f, int topK = 40, float topP = 0.95f, float tfsZ = 1.0f, float typicalP = 1.0f)
  263. {
  264. llama_token id;
  265. if (temperature <= 0)
  266. {
  267. // Greedy sampling
  268. id = SamplingApi.llama_sample_token_greedy(_ctx, candidates);
  269. }
  270. else
  271. {
  272. var mu = mirostat_mu ?? (2 * mirostatTau);
  273. {
  274. if (mirostat == MirostatType.Mirostat)
  275. {
  276. const int mirostat_m = 100;
  277. SamplingApi.llama_sample_temperature(_ctx, candidates, temperature);
  278. id = SamplingApi.llama_sample_token_mirostat(_ctx, candidates, mirostatTau, mirostatEta, mirostat_m, ref mu);
  279. }
  280. else if (mirostat == MirostatType.Mirostat2)
  281. {
  282. SamplingApi.llama_sample_temperature(_ctx, candidates, temperature);
  283. id = SamplingApi.llama_sample_token_mirostat_v2(_ctx, candidates, mirostatTau, mirostatEta, ref mu);
  284. }
  285. else
  286. {
  287. // Temperature sampling
  288. SamplingApi.llama_sample_top_k(_ctx, candidates, topK, 1);
  289. SamplingApi.llama_sample_tail_free(_ctx, candidates, tfsZ, 1);
  290. SamplingApi.llama_sample_typical(_ctx, candidates, typicalP, 1);
  291. SamplingApi.llama_sample_top_p(_ctx, candidates, topP, 1);
  292. SamplingApi.llama_sample_temperature(_ctx, candidates, temperature);
  293. id = SamplingApi.llama_sample_token(_ctx, candidates);
  294. }
  295. }
  296. mirostat_mu = mu;
  297. }
  298. return id;
  299. }
  300. /// <summary>
  301. /// Apply the penalty for the tokens. Please don't use it unless you fully know what it does.
  302. /// </summary>
  303. /// <param name="lastTokens"></param>
  304. /// <param name="logitBias"></param>
  305. /// <param name="repeatLastTokensCount"></param>
  306. /// <param name="repeatPenalty"></param>
  307. /// <param name="alphaFrequency"></param>
  308. /// <param name="alphaPresence"></param>
  309. /// <param name="penalizeNL"></param>
  310. /// <returns></returns>
  311. public LLamaTokenDataArray ApplyPenalty(IEnumerable<llama_token> lastTokens, Dictionary<llama_token, float>? logitBias = null,
  312. int repeatLastTokensCount = 64, float repeatPenalty = 1.1f, float alphaFrequency = .0f, float alphaPresence = .0f,
  313. bool penalizeNL = true)
  314. {
  315. var n_vocab = _ctx.VocabCount;
  316. var logits = _ctx.GetLogits();
  317. // Apply params.logit_bias map
  318. if(logitBias is not null)
  319. {
  320. foreach (var (key, value) in logitBias)
  321. {
  322. logits[key] += value;
  323. }
  324. }
  325. var candidates = new LLamaTokenData[n_vocab];
  326. for (llama_token token_id = 0; token_id < n_vocab; token_id++)
  327. candidates[token_id] = new LLamaTokenData(token_id, logits[token_id], 0.0f);
  328. LLamaTokenDataArray candidates_p = new LLamaTokenDataArray(candidates);
  329. // Apply penalties
  330. float nl_logit = logits[NativeApi.llama_token_nl()];
  331. int lastTokensCount = lastTokens.Count();
  332. var last_n_repeat = Math.Min(Math.Min(lastTokensCount, repeatLastTokensCount), ContextSize);
  333. SamplingApi.llama_sample_repetition_penalty(_ctx, candidates_p,
  334. lastTokens.Skip(lastTokensCount - last_n_repeat).ToArray(),
  335. (ulong)last_n_repeat, repeatPenalty);
  336. SamplingApi.llama_sample_frequency_and_presence_penalties(_ctx, candidates_p,
  337. lastTokens.Skip(lastTokensCount - last_n_repeat).ToArray(),
  338. (ulong)last_n_repeat, alphaFrequency, alphaPresence);
  339. if (!penalizeNL)
  340. {
  341. logits[NativeApi.llama_token_nl()] = nl_logit;
  342. }
  343. return candidates_p;
  344. }
  345. #region eval overloads
  346. /// <summary>
  347. ///
  348. /// </summary>
  349. /// <param name="tokens"></param>
  350. /// <param name="pastTokensCount"></param>
  351. /// <returns>The updated `pastTokensCount`.</returns>
  352. /// <exception cref="RuntimeError"></exception>
  353. public int Eval(llama_token[] tokens, llama_token pastTokensCount)
  354. {
  355. return Eval(tokens.AsSpan(), pastTokensCount);
  356. }
  357. /// <summary>
  358. ///
  359. /// </summary>
  360. /// <param name="tokens"></param>
  361. /// <param name="pastTokensCount"></param>
  362. /// <returns>The updated `pastTokensCount`.</returns>
  363. /// <exception cref="RuntimeError"></exception>
  364. public int Eval(List<llama_token> tokens, llama_token pastTokensCount)
  365. {
  366. #if NET5_0_OR_GREATER
  367. var span = CollectionsMarshal.AsSpan(tokens);
  368. return Eval(span, pastTokensCount);
  369. #else
  370. // on netstandard2.0 we can't use collections marshal to get directly at the internal memory of
  371. // the list. Instead rent an array and copy the data into it. This avoids an allocation, but can't
  372. // avoid the copying.
  373. var rented = ArrayPool<llama_token>.Shared.Rent(tokens.Count);
  374. try
  375. {
  376. tokens.CopyTo(rented, 0);
  377. return Eval(rented, pastTokensCount);
  378. }
  379. finally
  380. {
  381. ArrayPool<llama_token>.Shared.Return(rented);
  382. }
  383. #endif
  384. }
  385. /// <summary>
  386. ///
  387. /// </summary>
  388. /// <param name="tokens"></param>
  389. /// <param name="pastTokensCount"></param>
  390. /// <returns>The updated `pastTokensCount`.</returns>
  391. /// <exception cref="RuntimeError"></exception>
  392. public int Eval(ReadOnlyMemory<llama_token> tokens, llama_token pastTokensCount)
  393. {
  394. return Eval(tokens.Span, pastTokensCount);
  395. }
  396. /// <summary>
  397. ///
  398. /// </summary>
  399. /// <param name="tokens"></param>
  400. /// <param name="pastTokensCount"></param>
  401. /// <returns>The updated `pastTokensCount`.</returns>
  402. /// <exception cref="RuntimeError"></exception>
  403. public int Eval(ReadOnlySpan<llama_token> tokens, llama_token pastTokensCount)
  404. {
  405. int total = tokens.Length;
  406. for(int i = 0; i < total; i += Params.BatchSize)
  407. {
  408. int n_eval = total - i;
  409. if(n_eval > Params.BatchSize)
  410. {
  411. n_eval = Params.BatchSize;
  412. }
  413. if (!_ctx.Eval(tokens.Slice(i, n_eval), pastTokensCount, Params.Threads))
  414. {
  415. _logger?.Log(nameof(LLamaContext), "Failed to eval.", ILLamaLogger.LogLevel.Error);
  416. throw new RuntimeError("Failed to eval.");
  417. }
  418. pastTokensCount += n_eval;
  419. }
  420. return pastTokensCount;
  421. }
  422. #endregion
  423. internal IEnumerable<string> GenerateResult(IEnumerable<llama_token> ids)
  424. {
  425. foreach(var id in ids)
  426. yield return _ctx.TokenToString(id, _encoding);
  427. }
  428. /// <summary>
  429. /// Convert a token into a string
  430. /// </summary>
  431. /// <param name="token"></param>
  432. /// <returns></returns>
  433. public string TokenToString(llama_token token)
  434. {
  435. return NativeHandle.TokenToString(token, Encoding);
  436. }
  437. /// <inheritdoc />
  438. public virtual void Dispose()
  439. {
  440. GC.SuppressFinalize(this);
  441. _ctx.Dispose();
  442. }
  443. /// <summary>
  444. /// The state of this model, which can be reloaded later
  445. /// </summary>
  446. public class State
  447. : SafeLLamaHandleBase
  448. {
  449. internal State(IntPtr memory)
  450. : base(memory)
  451. {
  452. }
  453. /// <inheritdoc />
  454. protected override bool ReleaseHandle()
  455. {
  456. Marshal.FreeHGlobal(handle);
  457. return true;
  458. }
  459. }
  460. }
  461. }