You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaInstructExecutor.cs 10 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266
  1. using LLama.Abstractions;
  2. using LLama.Common;
  3. using LLama.Native;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.IO;
  7. using System.Linq;
  8. using System.Text;
  9. using System.Text.Json;
  10. using System.Text.Json.Serialization;
  11. namespace LLama
  12. {
  13. using llama_token = Int32;
  14. /// <summary>
  15. /// The LLama executor for instruct mode.
  16. /// </summary>
  17. public class InstructExecutor : StatefulExecutorBase
  18. {
  19. bool _is_prompt_run = true;
  20. string _instructionPrefix;
  21. llama_token[] _inp_pfx;
  22. llama_token[] _inp_sfx;
  23. /// <summary>
  24. ///
  25. /// </summary>
  26. /// <param name="context"></param>
  27. /// <param name="instructionPrefix"></param>
  28. /// <param name="instructionSuffix"></param>
  29. public InstructExecutor(LLamaContext context, string instructionPrefix = "\n\n### Instruction:\n\n",
  30. string instructionSuffix = "\n\n### Response:\n\n") : base(context)
  31. {
  32. _inp_pfx = Context.Tokenize(instructionPrefix, true);
  33. _inp_sfx = Context.Tokenize(instructionSuffix, false);
  34. _instructionPrefix = instructionPrefix;
  35. }
  36. /// <inheritdoc />
  37. public override ExecutorBaseState GetStateData()
  38. {
  39. InstructExecutorState state = new()
  40. {
  41. ConsumedSessionCount = _n_session_consumed,
  42. EmbedInps = _embed_inps,
  43. IsPromptRun = _is_prompt_run,
  44. ConsumedTokensCount = _consumedTokensCount,
  45. Embeds = _embeds,
  46. LastTokens = _last_n_tokens.ToArray(),
  47. InputPrefixTokens = _inp_pfx,
  48. InputSuffixTokens = _inp_sfx,
  49. MatchingSessionTokensCount = _n_matching_session_tokens,
  50. PastTokensCount = _pastTokensCount,
  51. SessionFilePath = _pathSession,
  52. SessionTokens = _session_tokens,
  53. LastTokensCapacity = _last_n_tokens.Capacity,
  54. MirostatMu = MirostatMu
  55. };
  56. return state;
  57. }
  58. /// <inheritdoc />
  59. public override void LoadState(ExecutorBaseState data)
  60. {
  61. if(data is InstructExecutorState state)
  62. {
  63. _n_session_consumed = state.ConsumedSessionCount;
  64. _embed_inps = state.EmbedInps;
  65. _is_prompt_run = state.IsPromptRun;
  66. _consumedTokensCount = state.ConsumedTokensCount;
  67. _embeds = state.Embeds;
  68. _last_n_tokens = new FixedSizeQueue<llama_token>(state.LastTokensCapacity, state.LastTokens);
  69. _inp_pfx = state.InputPrefixTokens;
  70. _inp_sfx = state.InputSuffixTokens;
  71. _n_matching_session_tokens = state.MatchingSessionTokensCount;
  72. _pastTokensCount = state.PastTokensCount;
  73. _pathSession = state.SessionFilePath;
  74. _session_tokens = state.SessionTokens;
  75. }
  76. else
  77. {
  78. throw new ArgumentException("Invalid state data type.");
  79. }
  80. }
  81. /// <inheritdoc />
  82. public override void SaveState(string filename)
  83. {
  84. var state = (InstructExecutorState)GetStateData();
  85. using (var fs = new FileStream(filename, FileMode.OpenOrCreate, FileAccess.Write))
  86. {
  87. JsonSerializer.Serialize(fs, state);
  88. }
  89. }
  90. /// <inheritdoc />
  91. public override void LoadState(string filename)
  92. {
  93. using (var fs = new FileStream(filename, FileMode.Open, FileAccess.Read))
  94. {
  95. var state = JsonSerializer.Deserialize<InstructExecutorState>(fs);
  96. LoadState(state);
  97. }
  98. }
  99. /// <inheritdoc />
  100. protected override bool GetLoopCondition(InferStateArgs args)
  101. {
  102. return args.RemainedTokens != 0 || _is_prompt_run;
  103. }
  104. /// <inheritdoc />
  105. protected override void PreprocessInputs(string text, InferStateArgs args)
  106. {
  107. args.Antiprompts ??= new List<string>();
  108. args.Antiprompts.Add(_instructionPrefix);
  109. if (_is_prompt_run)
  110. {
  111. // When running the first input (prompt) in inteactive mode, we should specially process it.
  112. _embed_inps = Context.Tokenize(text, true).ToList();
  113. }
  114. else
  115. {
  116. if (!text.EndsWith("\n"))
  117. {
  118. text += "\n";
  119. }
  120. _consumedTokensCount = _embed_inps.Count;
  121. _embed_inps.AddRange(_inp_pfx);
  122. var line_inp = Context.Tokenize(text, false);
  123. _embed_inps.AddRange(line_inp);
  124. _embed_inps.AddRange(_inp_sfx);
  125. args.RemainedTokens -= line_inp.Length;
  126. }
  127. }
  128. /// <inheritdoc />
  129. protected override bool PostProcess(IInferenceParams inferenceParams, InferStateArgs args, out IEnumerable<string>? extraOutputs)
  130. {
  131. extraOutputs = null;
  132. if (_embed_inps.Count <= _consumedTokensCount)
  133. {
  134. if (args.Antiprompts is not null && args.Antiprompts.Count > 0)
  135. {
  136. var last_output_builder = new StringBuilder();
  137. foreach (var token in _last_n_tokens)
  138. Context.NativeHandle.TokenToString(token, Context.Encoding, last_output_builder);
  139. var last_output = last_output_builder.ToString();
  140. foreach (var antiprompt in args.Antiprompts)
  141. {
  142. if (last_output.EndsWith(antiprompt))
  143. {
  144. args.WaitForInput = true;
  145. return true;
  146. }
  147. }
  148. }
  149. if (_pastTokensCount > 0 && args.WaitForInput)
  150. {
  151. extraOutputs = new[] { "\n> " };
  152. return true;
  153. }
  154. }
  155. if (_embeds.Count > 0 && _embeds.Last() == NativeApi.llama_token_eos(Context.NativeHandle))
  156. {
  157. args.WaitForInput = true;
  158. }
  159. if (args.RemainedTokens <= 0 && inferenceParams.MaxTokens != -1)
  160. {
  161. args.RemainedTokens = inferenceParams.MaxTokens;
  162. args.WaitForInput = true;
  163. }
  164. return false;
  165. }
  166. /// <inheritdoc />
  167. protected override void InferInternal(IInferenceParams inferenceParams, InferStateArgs args)
  168. {
  169. if (_embeds.Count > 0)
  170. {
  171. _is_prompt_run = false;
  172. if (_pastTokensCount + _embeds.Count > Context.ContextSize)
  173. {
  174. HandleRunOutOfContext(inferenceParams.TokensKeep);
  175. }
  176. TryReuseMathingPrefix();
  177. _pastTokensCount = Context.Eval(_embeds, _pastTokensCount);
  178. if (_embeds.Count > 0 && !string.IsNullOrEmpty(_pathSession))
  179. {
  180. _session_tokens.AddRange(_embeds);
  181. _n_session_consumed = _session_tokens.Count;
  182. }
  183. }
  184. _embeds.Clear();
  185. if (_embed_inps.Count <= _consumedTokensCount && !args.WaitForInput)
  186. {
  187. var repeat_last_n = inferenceParams.RepeatLastTokensCount < 0 ? Context.ContextSize : inferenceParams.RepeatLastTokensCount;
  188. // optionally save the session on first sample (for faster prompt loading next time)
  189. if (!string.IsNullOrEmpty(_pathSession) && args.NeedToSaveSession)
  190. {
  191. args.NeedToSaveSession = false;
  192. SaveSessionFile(_pathSession);
  193. }
  194. var tokenDataArray = Context.ApplyPenalty(_last_n_tokens, inferenceParams.LogitBias, repeat_last_n,
  195. inferenceParams.RepeatPenalty, inferenceParams.FrequencyPenalty, inferenceParams.PresencePenalty, inferenceParams.PenalizeNL);
  196. var mu = MirostatMu;
  197. var id = Context.Sample(
  198. tokenDataArray, ref mu, inferenceParams.Temperature, inferenceParams.Mirostat, inferenceParams.MirostatTau,
  199. inferenceParams.MirostatEta, inferenceParams.TopK, inferenceParams.TopP, inferenceParams.TfsZ, inferenceParams.TypicalP,
  200. inferenceParams.Grammar
  201. );
  202. MirostatMu = mu;
  203. _last_n_tokens.Enqueue(id);
  204. _embeds.Add(id);
  205. args.RemainedTokens--;
  206. args.ReturnValue = true;
  207. }
  208. else
  209. {
  210. while (_embed_inps.Count > _consumedTokensCount)
  211. {
  212. _embeds.Add(_embed_inps[_consumedTokensCount]);
  213. _last_n_tokens.Enqueue(_embed_inps[_consumedTokensCount]);
  214. _consumedTokensCount++;
  215. if (_embeds.Count >= Context.Params.BatchSize)
  216. {
  217. break;
  218. }
  219. }
  220. }
  221. }
  222. /// <summary>
  223. /// The desciptor of the state of the instruct executor.
  224. /// </summary>
  225. public class InstructExecutorState : ExecutorBaseState
  226. {
  227. /// <summary>
  228. /// Whether the executor is running for the first time (running the prompt).
  229. /// </summary>
  230. [JsonPropertyName("is_prompt_run")]
  231. public bool IsPromptRun { get; set; }
  232. /// <summary>
  233. /// Instruction prefix tokens.
  234. /// </summary>
  235. [JsonPropertyName("inp_pfx")]
  236. public llama_token[] InputPrefixTokens { get; set; }
  237. /// <summary>
  238. /// Instruction suffix tokens.
  239. /// </summary>
  240. [JsonPropertyName("inp_sfx")]
  241. public llama_token[] InputSuffixTokens { get; set; }
  242. }
  243. }
  244. }