You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaInstructExecutor.cs 9.7 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256
  1. using LLama.Abstractions;
  2. using LLama.Common;
  3. using LLama.Native;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.IO;
  7. using System.Linq;
  8. using System.Text;
  9. using System.Text.Json;
  10. using System.Text.Json.Serialization;
  11. using LLama.Extensions;
  12. namespace LLama
  13. {
  14. using llama_token = Int32;
  15. /// <summary>
  16. /// The LLama executor for instruct mode.
  17. /// </summary>
  18. public class InstructExecutor : StatefulExecutorBase
  19. {
  20. bool _is_prompt_run = true;
  21. string _instructionPrefix;
  22. llama_token[] _inp_pfx;
  23. llama_token[] _inp_sfx;
  24. /// <summary>
  25. ///
  26. /// </summary>
  27. /// <param name="context"></param>
  28. /// <param name="instructionPrefix"></param>
  29. /// <param name="instructionSuffix"></param>
  30. public InstructExecutor(LLamaContext context, string instructionPrefix = "\n\n### Instruction:\n\n",
  31. string instructionSuffix = "\n\n### Response:\n\n") : base(context)
  32. {
  33. _inp_pfx = Context.Tokenize(instructionPrefix, true);
  34. _inp_sfx = Context.Tokenize(instructionSuffix, false);
  35. _instructionPrefix = instructionPrefix;
  36. }
  37. /// <inheritdoc />
  38. public override ExecutorBaseState GetStateData()
  39. {
  40. InstructExecutorState state = new()
  41. {
  42. ConsumedSessionCount = _n_session_consumed,
  43. EmbedInps = _embed_inps,
  44. IsPromptRun = _is_prompt_run,
  45. ConsumedTokensCount = _consumedTokensCount,
  46. Embeds = _embeds,
  47. LastTokens = _last_n_tokens.ToArray(),
  48. InputPrefixTokens = _inp_pfx,
  49. InputSuffixTokens = _inp_sfx,
  50. MatchingSessionTokensCount = _n_matching_session_tokens,
  51. PastTokensCount = _pastTokensCount,
  52. SessionFilePath = _pathSession,
  53. SessionTokens = _session_tokens,
  54. LastTokensCapacity = _last_n_tokens.Capacity,
  55. MirostatMu = MirostatMu
  56. };
  57. return state;
  58. }
  59. /// <inheritdoc />
  60. public override void LoadState(ExecutorBaseState data)
  61. {
  62. if(data is InstructExecutorState state)
  63. {
  64. _n_session_consumed = state.ConsumedSessionCount;
  65. _embed_inps = state.EmbedInps;
  66. _is_prompt_run = state.IsPromptRun;
  67. _consumedTokensCount = state.ConsumedTokensCount;
  68. _embeds = state.Embeds;
  69. _last_n_tokens = new FixedSizeQueue<llama_token>(state.LastTokensCapacity, state.LastTokens);
  70. _inp_pfx = state.InputPrefixTokens;
  71. _inp_sfx = state.InputSuffixTokens;
  72. _n_matching_session_tokens = state.MatchingSessionTokensCount;
  73. _pastTokensCount = state.PastTokensCount;
  74. _pathSession = state.SessionFilePath;
  75. _session_tokens = state.SessionTokens;
  76. }
  77. else
  78. {
  79. throw new ArgumentException("Invalid state data type.");
  80. }
  81. }
  82. /// <inheritdoc />
  83. public override void SaveState(string filename)
  84. {
  85. var state = (InstructExecutorState)GetStateData();
  86. using (var fs = new FileStream(filename, FileMode.Create, FileAccess.Write))
  87. {
  88. JsonSerializer.Serialize(fs, state);
  89. }
  90. }
  91. /// <inheritdoc />
  92. public override void LoadState(string filename)
  93. {
  94. using (var fs = new FileStream(filename, FileMode.Open, FileAccess.Read))
  95. {
  96. var state = JsonSerializer.Deserialize<InstructExecutorState>(fs);
  97. LoadState(state);
  98. }
  99. }
  100. /// <inheritdoc />
  101. protected override bool GetLoopCondition(InferStateArgs args)
  102. {
  103. return args.RemainedTokens != 0 || _is_prompt_run;
  104. }
  105. /// <inheritdoc />
  106. protected override void PreprocessInputs(string text, InferStateArgs args)
  107. {
  108. args.Antiprompts ??= new List<string>();
  109. args.Antiprompts.Add(_instructionPrefix);
  110. if (_is_prompt_run)
  111. {
  112. // When running the first input (prompt) in inteactive mode, we should specially process it.
  113. _embed_inps = Context.Tokenize(text, true).ToList();
  114. }
  115. else
  116. {
  117. if (!text.EndsWith("\n"))
  118. {
  119. text += "\n";
  120. }
  121. _consumedTokensCount = _embed_inps.Count;
  122. _embed_inps.AddRange(_inp_pfx);
  123. var line_inp = Context.Tokenize(text, false);
  124. _embed_inps.AddRange(line_inp);
  125. _embed_inps.AddRange(_inp_sfx);
  126. args.RemainedTokens -= line_inp.Length;
  127. }
  128. }
  129. /// <inheritdoc />
  130. protected override bool PostProcess(IInferenceParams inferenceParams, InferStateArgs args, out IEnumerable<string>? extraOutputs)
  131. {
  132. extraOutputs = null;
  133. if (_embed_inps.Count <= _consumedTokensCount)
  134. {
  135. if (_last_n_tokens.Items.TokensEndsWithAnyString(args.Antiprompts, Context.NativeHandle.ModelHandle, Context.Encoding))
  136. {
  137. args.WaitForInput = true;
  138. return true;
  139. }
  140. if (_pastTokensCount > 0 && args.WaitForInput)
  141. {
  142. extraOutputs = new[] { "\n> " };
  143. return true;
  144. }
  145. }
  146. if (_embeds.Count > 0 && _embeds.Last() == NativeApi.llama_token_eos(Context.NativeHandle))
  147. {
  148. args.WaitForInput = true;
  149. }
  150. if (args.RemainedTokens <= 0 && inferenceParams.MaxTokens != -1)
  151. {
  152. args.RemainedTokens = inferenceParams.MaxTokens;
  153. args.WaitForInput = true;
  154. }
  155. return false;
  156. }
  157. /// <inheritdoc />
  158. protected override void InferInternal(IInferenceParams inferenceParams, InferStateArgs args)
  159. {
  160. if (_embeds.Count > 0)
  161. {
  162. _is_prompt_run = false;
  163. if (_pastTokensCount + _embeds.Count > Context.ContextSize)
  164. {
  165. HandleRunOutOfContext(inferenceParams.TokensKeep);
  166. }
  167. TryReuseMathingPrefix();
  168. _pastTokensCount = Context.Eval(_embeds, _pastTokensCount);
  169. if (_embeds.Count > 0 && !string.IsNullOrEmpty(_pathSession))
  170. {
  171. _session_tokens.AddRange(_embeds);
  172. _n_session_consumed = _session_tokens.Count;
  173. }
  174. }
  175. _embeds.Clear();
  176. if (_embed_inps.Count <= _consumedTokensCount && !args.WaitForInput)
  177. {
  178. var repeat_last_n = inferenceParams.RepeatLastTokensCount < 0 ? Context.ContextSize : inferenceParams.RepeatLastTokensCount;
  179. // optionally save the session on first sample (for faster prompt loading next time)
  180. if (!string.IsNullOrEmpty(_pathSession) && args.NeedToSaveSession)
  181. {
  182. args.NeedToSaveSession = false;
  183. SaveSessionFile(_pathSession);
  184. }
  185. var tokenDataArray = Context.ApplyPenalty(_last_n_tokens, inferenceParams.LogitBias, repeat_last_n,
  186. inferenceParams.RepeatPenalty, inferenceParams.FrequencyPenalty, inferenceParams.PresencePenalty, inferenceParams.PenalizeNL);
  187. var mu = MirostatMu;
  188. var id = Context.Sample(
  189. tokenDataArray, ref mu, inferenceParams.Temperature, inferenceParams.Mirostat, inferenceParams.MirostatTau,
  190. inferenceParams.MirostatEta, inferenceParams.TopK, inferenceParams.TopP, inferenceParams.TfsZ, inferenceParams.TypicalP,
  191. inferenceParams.Grammar
  192. );
  193. MirostatMu = mu;
  194. _last_n_tokens.Enqueue(id);
  195. _embeds.Add(id);
  196. args.RemainedTokens--;
  197. args.ReturnValue = true;
  198. }
  199. else
  200. {
  201. while (_embed_inps.Count > _consumedTokensCount)
  202. {
  203. _embeds.Add(_embed_inps[_consumedTokensCount]);
  204. _last_n_tokens.Enqueue(_embed_inps[_consumedTokensCount]);
  205. _consumedTokensCount++;
  206. if (_embeds.Count >= Context.Params.BatchSize)
  207. {
  208. break;
  209. }
  210. }
  211. }
  212. }
  213. /// <summary>
  214. /// The desciptor of the state of the instruct executor.
  215. /// </summary>
  216. public class InstructExecutorState : ExecutorBaseState
  217. {
  218. /// <summary>
  219. /// Whether the executor is running for the first time (running the prompt).
  220. /// </summary>
  221. [JsonPropertyName("is_prompt_run")]
  222. public bool IsPromptRun { get; set; }
  223. /// <summary>
  224. /// Instruction prefix tokens.
  225. /// </summary>
  226. [JsonPropertyName("inp_pfx")]
  227. public llama_token[] InputPrefixTokens { get; set; }
  228. /// <summary>
  229. /// Instruction suffix tokens.
  230. /// </summary>
  231. [JsonPropertyName("inp_sfx")]
  232. public llama_token[] InputSuffixTokens { get; set; }
  233. }
  234. }
  235. }