You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaContext.cs 17 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448
  1. using LLama.Exceptions;
  2. using LLama.Native;
  3. using System;
  4. using System.Collections.Generic;
  5. using System.Linq;
  6. using System.Text;
  7. using System.IO;
  8. using System.IO.MemoryMappedFiles;
  9. using LLama.Common;
  10. using System.Runtime.InteropServices;
  11. using LLama.Extensions;
  12. using Microsoft.Win32.SafeHandles;
  13. using LLama.Abstractions;
  14. namespace LLama
  15. {
  16. using llama_token = Int32;
  17. /// <summary>
  18. /// A llama_context, which holds all the context required to interact with a model
  19. /// </summary>
  20. public class LLamaContext
  21. : IDisposable
  22. {
  23. private readonly ILLamaLogger? _logger;
  24. private readonly Encoding _encoding;
  25. private readonly SafeLLamaContextHandle _ctx;
  26. /// <summary>
  27. /// Total number of tokens in vocabulary of this model
  28. /// </summary>
  29. public int VocabCount => _ctx.VocabCount;
  30. /// <summary>
  31. /// Total number of tokens in the context
  32. /// </summary>
  33. public int ContextSize => _ctx.ContextSize;
  34. /// <summary>
  35. /// Dimension of embedding vectors
  36. /// </summary>
  37. public int EmbeddingCount => _ctx.EmbeddingCount;
  38. /// <summary>
  39. /// The model params set for this model.
  40. /// </summary>
  41. public IModelParams Params { get; set; }
  42. /// <summary>
  43. /// The native handle, which is used to be passed to the native APIs
  44. /// </summary>
  45. /// <remarks>Be careful how you use this!</remarks>
  46. public SafeLLamaContextHandle NativeHandle => _ctx;
  47. /// <summary>
  48. /// The encoding set for this model to deal with text input.
  49. /// </summary>
  50. public Encoding Encoding => _encoding;
  51. /// <summary>
  52. ///
  53. /// </summary>
  54. /// <param name="params">Model params.</param>
  55. /// <param name="encoding">Encoding to deal with text input.</param>
  56. /// <param name="logger">The logger.</param>
  57. [Obsolete("Use the LLamaWeights.CreateContext instead")]
  58. public LLamaContext(IModelParams @params, string encoding = "UTF-8", ILLamaLogger? logger = null)
  59. {
  60. Params = @params;
  61. _logger = logger;
  62. _encoding = Encoding.GetEncoding(encoding);
  63. _logger?.Log(nameof(LLamaContext), $"Initializing LLama model with params: {this.Params}", ILLamaLogger.LogLevel.Info);
  64. _ctx = Utils.InitLLamaContextFromModelParams(Params);
  65. }
  66. internal LLamaContext(SafeLLamaContextHandle nativeContext, IModelParams @params, Encoding encoding, ILLamaLogger? logger = null)
  67. {
  68. Params = @params;
  69. _logger = logger;
  70. _encoding = encoding;
  71. _ctx = nativeContext;
  72. }
  73. /// <summary>
  74. /// Create a new LLamaContext for the given LLamaWeights
  75. /// </summary>
  76. /// <param name="model"></param>
  77. /// <param name="params"></param>
  78. /// <param name="encoding"></param>
  79. /// <param name="logger"></param>
  80. /// <exception cref="ObjectDisposedException"></exception>
  81. public LLamaContext(LLamaWeights model, IModelParams @params, Encoding encoding, ILLamaLogger? logger = null)
  82. {
  83. if (model.NativeHandle.IsClosed)
  84. throw new ObjectDisposedException("Cannot create context, model weights have been disposed");
  85. Params = @params;
  86. _logger = logger;
  87. _encoding = encoding;
  88. using var pin = @params.ToLlamaContextParams(out var lparams);
  89. _ctx = SafeLLamaContextHandle.Create(model.NativeHandle, lparams);
  90. }
  91. /// <summary>
  92. /// Create a copy of the current state of this context
  93. /// </summary>
  94. /// <returns></returns>
  95. public LLamaContext Clone()
  96. {
  97. using var pin = Params.ToLlamaContextParams(out var lparams);
  98. // Create a blank new context for the model
  99. var ctx = new LLamaContext(SafeLLamaContextHandle.Create(NativeHandle.ModelHandle, lparams), Params, _encoding);
  100. // Copy across the state
  101. using var state = GetState();
  102. ctx.LoadState(state);
  103. return ctx;
  104. }
  105. /// <summary>
  106. /// Tokenize a string.
  107. /// </summary>
  108. /// <param name="text"></param>
  109. /// <param name="addBos">Whether to add a bos to the text.</param>
  110. /// <returns></returns>
  111. public llama_token[] Tokenize(string text, bool addBos = true)
  112. {
  113. return _ctx.Tokenize(text, addBos, _encoding);
  114. }
  115. /// <summary>
  116. /// Detokenize the tokens to text.
  117. /// </summary>
  118. /// <param name="tokens"></param>
  119. /// <returns></returns>
  120. public string DeTokenize(IEnumerable<llama_token> tokens)
  121. {
  122. StringBuilder sb = new();
  123. foreach(var token in tokens)
  124. sb.Append(_ctx.TokenToString(token, _encoding));
  125. return sb.ToString();
  126. }
  127. /// <summary>
  128. /// Save the state to specified path.
  129. /// </summary>
  130. /// <param name="filename"></param>
  131. public void SaveState(string filename)
  132. {
  133. // Delete that file before overwriting it
  134. if (File.Exists(filename))
  135. File.Delete(filename);
  136. // Estimate size of state to write to disk, this is always equal to or greater than the actual size
  137. var estimatedStateSize = (long)NativeApi.llama_get_state_size(_ctx);
  138. // Map the file and write the bytes directly to it. This saves copying the bytes into a C# array
  139. long writtenBytes;
  140. using (var file = MemoryMappedFile.CreateFromFile(filename, FileMode.Create, null, estimatedStateSize))
  141. using (var view = file.CreateViewAccessor(0, estimatedStateSize))
  142. {
  143. unsafe
  144. {
  145. byte* ptr = null;
  146. view.SafeMemoryMappedViewHandle.AcquirePointer(ref ptr);
  147. writtenBytes = (long)NativeApi.llama_copy_state_data(_ctx, ptr);
  148. view.SafeMemoryMappedViewHandle.ReleasePointer();
  149. }
  150. }
  151. // Truncate the file to the actual size of data that was written
  152. using (var fileStream = new FileStream(filename, FileMode.Open))
  153. fileStream.SetLength(writtenBytes);
  154. }
  155. /// <summary>
  156. /// Get the state data as a byte array.
  157. /// </summary>
  158. /// <returns></returns>
  159. [Obsolete("Use `GetState` instead, this supports larger states (over 2GB)")]
  160. public byte[] GetStateData()
  161. {
  162. var stateSize = NativeApi.llama_get_state_size(_ctx);
  163. byte[] stateMemory = new byte[stateSize];
  164. NativeApi.llama_copy_state_data(_ctx, stateMemory);
  165. return stateMemory;
  166. }
  167. /// <summary>
  168. /// Get the state data as an opaque handle
  169. /// </summary>
  170. /// <returns></returns>
  171. public State GetState()
  172. {
  173. var stateSize = NativeApi.llama_get_state_size(_ctx);
  174. unsafe
  175. {
  176. var bigMemory = Marshal.AllocHGlobal((nint)stateSize);
  177. var smallMemory = IntPtr.Zero;
  178. try
  179. {
  180. // Copy the state data into "big memory", discover the actual size required
  181. var actualSize = NativeApi.llama_copy_state_data(_ctx, (byte*)bigMemory);
  182. // Allocate a smaller buffer
  183. smallMemory = Marshal.AllocHGlobal((nint)actualSize);
  184. // Copy into the smaller buffer and free the large one to save excess memory usage
  185. Buffer.MemoryCopy(bigMemory.ToPointer(), smallMemory.ToPointer(), actualSize, actualSize);
  186. Marshal.FreeHGlobal(bigMemory);
  187. bigMemory = IntPtr.Zero;
  188. return new State(smallMemory);
  189. }
  190. catch
  191. {
  192. if (bigMemory != IntPtr.Zero)
  193. Marshal.FreeHGlobal(bigMemory);
  194. if (smallMemory != IntPtr.Zero)
  195. Marshal.FreeHGlobal(smallMemory);
  196. throw;
  197. }
  198. }
  199. }
  200. /// <summary>
  201. /// Load the state from specified path.
  202. /// </summary>
  203. /// <param name="filename"></param>
  204. /// <exception cref="RuntimeError"></exception>
  205. public void LoadState(string filename)
  206. {
  207. // Map state file into memory and pass that pointer directly to `llama_set_state_data` to load from
  208. using (var file = MemoryMappedFile.CreateFromFile(filename, FileMode.Open, null))
  209. using (var view = file.CreateViewAccessor())
  210. {
  211. unsafe
  212. {
  213. byte* ptr = null;
  214. view.SafeMemoryMappedViewHandle.AcquirePointer(ref ptr);
  215. NativeApi.llama_set_state_data(_ctx, ptr);
  216. view.SafeMemoryMappedViewHandle.ReleasePointer();
  217. }
  218. }
  219. }
  220. /// <summary>
  221. /// Load the state from memory.
  222. /// </summary>
  223. /// <param name="stateData"></param>
  224. /// <exception cref="RuntimeError"></exception>
  225. public void LoadState(byte[] stateData)
  226. {
  227. int stateSize = (int)NativeApi.llama_get_state_size(_ctx);
  228. if (stateData.Length > stateSize)
  229. {
  230. throw new RuntimeError("Failed to validate state size.");
  231. }
  232. NativeApi.llama_set_state_data(_ctx, stateData);
  233. }
  234. /// <summary>
  235. /// Load the state from memory.
  236. /// </summary>
  237. /// <param name="state"></param>
  238. /// <exception cref="RuntimeError"></exception>
  239. public void LoadState(State state)
  240. {
  241. unsafe
  242. {
  243. NativeApi.llama_set_state_data(_ctx, (byte*)state.DangerousGetHandle().ToPointer());
  244. }
  245. }
  246. /// <summary>
  247. /// Perform the sampling. Please don't use it unless you fully know what it does.
  248. /// </summary>
  249. /// <param name="candidates"></param>
  250. /// <param name="mirostat_mu"></param>
  251. /// <param name="temperature"></param>
  252. /// <param name="mirostat"></param>
  253. /// <param name="mirostatTau"></param>
  254. /// <param name="mirostatEta"></param>
  255. /// <param name="topK"></param>
  256. /// <param name="topP"></param>
  257. /// <param name="tfsZ"></param>
  258. /// <param name="typicalP"></param>
  259. /// <returns></returns>
  260. public llama_token Sample(LLamaTokenDataArray candidates, ref float? mirostat_mu, float temperature = 0.8f, MirostatType mirostat = MirostatType.Disable,
  261. float mirostatTau = 5.0f, float mirostatEta = 0.1f, int topK = 40, float topP = 0.95f, float tfsZ = 1.0f, float typicalP = 1.0f)
  262. {
  263. llama_token id;
  264. if (temperature <= 0)
  265. {
  266. // Greedy sampling
  267. id = SamplingApi.llama_sample_token_greedy(_ctx, candidates);
  268. }
  269. else
  270. {
  271. var mu = mirostat_mu ?? (2 * mirostatTau);
  272. {
  273. if (mirostat == MirostatType.Mirostat)
  274. {
  275. const int mirostat_m = 100;
  276. SamplingApi.llama_sample_temperature(_ctx, candidates, temperature);
  277. id = SamplingApi.llama_sample_token_mirostat(_ctx, candidates, mirostatTau, mirostatEta, mirostat_m, ref mu);
  278. }
  279. else if (mirostat == MirostatType.Mirostat2)
  280. {
  281. SamplingApi.llama_sample_temperature(_ctx, candidates, temperature);
  282. id = SamplingApi.llama_sample_token_mirostat_v2(_ctx, candidates, mirostatTau, mirostatEta, ref mu);
  283. }
  284. else
  285. {
  286. // Temperature sampling
  287. SamplingApi.llama_sample_top_k(_ctx, candidates, topK, 1);
  288. SamplingApi.llama_sample_tail_free(_ctx, candidates, tfsZ, 1);
  289. SamplingApi.llama_sample_typical(_ctx, candidates, typicalP, 1);
  290. SamplingApi.llama_sample_top_p(_ctx, candidates, topP, 1);
  291. SamplingApi.llama_sample_temperature(_ctx, candidates, temperature);
  292. id = SamplingApi.llama_sample_token(_ctx, candidates);
  293. }
  294. }
  295. mirostat_mu = mu;
  296. }
  297. return id;
  298. }
  299. /// <summary>
  300. /// Apply the penalty for the tokens. Please don't use it unless you fully know what it does.
  301. /// </summary>
  302. /// <param name="lastTokens"></param>
  303. /// <param name="logitBias"></param>
  304. /// <param name="repeatLastTokensCount"></param>
  305. /// <param name="repeatPenalty"></param>
  306. /// <param name="alphaFrequency"></param>
  307. /// <param name="alphaPresence"></param>
  308. /// <param name="penalizeNL"></param>
  309. /// <returns></returns>
  310. public LLamaTokenDataArray ApplyPenalty(IEnumerable<llama_token> lastTokens, Dictionary<llama_token, float>? logitBias = null,
  311. int repeatLastTokensCount = 64, float repeatPenalty = 1.1f, float alphaFrequency = .0f, float alphaPresence = .0f,
  312. bool penalizeNL = true)
  313. {
  314. var n_vocab = _ctx.VocabCount;
  315. var logits = _ctx.GetLogits();
  316. // Apply params.logit_bias map
  317. if(logitBias is not null)
  318. {
  319. foreach (var (key, value) in logitBias)
  320. {
  321. logits[key] += value;
  322. }
  323. }
  324. var candidates = new LLamaTokenData[n_vocab];
  325. for (llama_token token_id = 0; token_id < n_vocab; token_id++)
  326. candidates[token_id] = new LLamaTokenData(token_id, logits[token_id], 0.0f);
  327. LLamaTokenDataArray candidates_p = new LLamaTokenDataArray(candidates);
  328. // Apply penalties
  329. float nl_logit = logits[NativeApi.llama_token_nl()];
  330. int lastTokensCount = lastTokens.Count();
  331. var last_n_repeat = Math.Min(Math.Min(lastTokensCount, repeatLastTokensCount), ContextSize);
  332. SamplingApi.llama_sample_repetition_penalty(_ctx, candidates_p,
  333. lastTokens.Skip(lastTokensCount - last_n_repeat).ToArray(),
  334. (ulong)last_n_repeat, repeatPenalty);
  335. SamplingApi.llama_sample_frequency_and_presence_penalties(_ctx, candidates_p,
  336. lastTokens.Skip(lastTokensCount - last_n_repeat).ToArray(),
  337. (ulong)last_n_repeat, alphaFrequency, alphaPresence);
  338. if (!penalizeNL)
  339. {
  340. logits[NativeApi.llama_token_nl()] = nl_logit;
  341. }
  342. return candidates_p;
  343. }
  344. /// <summary>
  345. ///
  346. /// </summary>
  347. /// <param name="tokens"></param>
  348. /// <param name="pastTokensCount"></param>
  349. /// <returns>The updated `pastTokensCount`.</returns>
  350. /// <exception cref="RuntimeError"></exception>
  351. public llama_token Eval(llama_token[] tokens, llama_token pastTokensCount)
  352. {
  353. int total = tokens.Length;
  354. for(int i = 0; i < total; i += Params.BatchSize)
  355. {
  356. int n_eval = total - i;
  357. if(n_eval > Params.BatchSize)
  358. {
  359. n_eval = Params.BatchSize;
  360. }
  361. if (!_ctx.Eval(tokens.AsMemory(i, n_eval), pastTokensCount, Params.Threads))
  362. {
  363. _logger?.Log(nameof(LLamaContext), "Failed to eval.", ILLamaLogger.LogLevel.Error);
  364. throw new RuntimeError("Failed to eval.");
  365. }
  366. pastTokensCount += n_eval;
  367. }
  368. return pastTokensCount;
  369. }
  370. internal IEnumerable<string> GenerateResult(IEnumerable<llama_token> ids)
  371. {
  372. foreach(var id in ids)
  373. yield return _ctx.TokenToString(id, _encoding);
  374. }
  375. /// <inheritdoc />
  376. public virtual void Dispose()
  377. {
  378. _ctx.Dispose();
  379. }
  380. /// <summary>
  381. /// The state of this model, which can be reloaded later
  382. /// </summary>
  383. public class State
  384. : SafeHandleZeroOrMinusOneIsInvalid
  385. {
  386. internal State(IntPtr memory)
  387. : base(true)
  388. {
  389. SetHandle(memory);
  390. }
  391. /// <inheritdoc />
  392. protected override bool ReleaseHandle()
  393. {
  394. Marshal.FreeHGlobal(handle);
  395. return true;
  396. }
  397. }
  398. }
  399. }