You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaInstructExecutor.cs 10 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276
  1. using LLama.Abstractions;
  2. using LLama.Common;
  3. using LLama.Native;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.IO;
  7. using System.Linq;
  8. using System.Text.Json;
  9. using System.Text.Json.Serialization;
  10. using System.Threading.Tasks;
  11. using LLama.Extensions;
  12. using Microsoft.Extensions.Logging;
  13. namespace LLama
  14. {
  15. /// <summary>
  16. /// The LLama executor for instruct mode.
  17. /// </summary>
  18. public class InstructExecutor
  19. : StatefulExecutorBase
  20. {
  21. private bool _is_prompt_run = true;
  22. private readonly string _instructionPrefix;
  23. private LLamaToken[] _inp_pfx;
  24. private LLamaToken[] _inp_sfx;
  25. /// <summary>
  26. ///
  27. /// </summary>
  28. /// <param name="context"></param>
  29. /// <param name="instructionPrefix"></param>
  30. /// <param name="instructionSuffix"></param>
  31. /// <param name="logger"></param>
  32. public InstructExecutor(LLamaContext context,
  33. string instructionPrefix = "\n\n### Instruction:\n\n",
  34. string instructionSuffix = "\n\n### Response:\n\n",
  35. ILogger? logger = null)
  36. : base(context, logger)
  37. {
  38. _inp_pfx = Context.Tokenize(instructionPrefix, true);
  39. _inp_sfx = Context.Tokenize(instructionSuffix, false);
  40. _instructionPrefix = instructionPrefix;
  41. }
  42. /// <inheritdoc />
  43. public override ExecutorBaseState GetStateData()
  44. {
  45. InstructExecutorState state = new()
  46. {
  47. ConsumedSessionCount = _n_session_consumed,
  48. EmbedInps = _embed_inps,
  49. IsPromptRun = _is_prompt_run,
  50. ConsumedTokensCount = _consumedTokensCount,
  51. Embeds = _embeds,
  52. LastTokens = _last_n_tokens.ToArray(),
  53. InputPrefixTokens = _inp_pfx,
  54. InputSuffixTokens = _inp_sfx,
  55. MatchingSessionTokensCount = _n_matching_session_tokens,
  56. PastTokensCount = _pastTokensCount,
  57. SessionFilePath = _pathSession,
  58. SessionTokens = _session_tokens,
  59. LastTokensCapacity = _last_n_tokens.Capacity,
  60. MirostatMu = MirostatMu
  61. };
  62. return state;
  63. }
  64. /// <inheritdoc />
  65. public override Task LoadState(ExecutorBaseState data)
  66. {
  67. if(data is InstructExecutorState state)
  68. {
  69. _n_session_consumed = state.ConsumedSessionCount;
  70. _embed_inps = state.EmbedInps;
  71. _is_prompt_run = state.IsPromptRun;
  72. _consumedTokensCount = state.ConsumedTokensCount;
  73. _embeds = state.Embeds;
  74. _last_n_tokens = new FixedSizeQueue<LLamaToken>(state.LastTokensCapacity, state.LastTokens);
  75. _inp_pfx = state.InputPrefixTokens;
  76. _inp_sfx = state.InputSuffixTokens;
  77. _n_matching_session_tokens = state.MatchingSessionTokensCount;
  78. _pastTokensCount = state.PastTokensCount;
  79. _pathSession = state.SessionFilePath;
  80. _session_tokens = state.SessionTokens;
  81. }
  82. else
  83. {
  84. throw new ArgumentException("Invalid state data type.");
  85. }
  86. return Task.CompletedTask;
  87. }
  88. /// <inheritdoc />
  89. public override async Task SaveState(string filename)
  90. {
  91. var state = (InstructExecutorState)GetStateData();
  92. using (var fs = new FileStream(filename, FileMode.Create, FileAccess.Write))
  93. {
  94. await JsonSerializer.SerializeAsync(fs, state);
  95. }
  96. }
  97. /// <inheritdoc />
  98. public override async Task LoadState(string filename)
  99. {
  100. using (var fs = new FileStream(filename, FileMode.Open, FileAccess.Read))
  101. {
  102. var state = await JsonSerializer.DeserializeAsync<InstructExecutorState>(fs);
  103. await LoadState(state);
  104. }
  105. }
  106. /// <inheritdoc />
  107. protected override Task<bool> GetLoopCondition(InferStateArgs args)
  108. {
  109. return Task.FromResult(args.RemainedTokens != 0 || _is_prompt_run);
  110. }
  111. /// <inheritdoc />
  112. protected override Task PreprocessInputs(string text, InferStateArgs args)
  113. {
  114. args.Antiprompts ??= new List<string>();
  115. args.Antiprompts.Add(_instructionPrefix);
  116. if (_is_prompt_run)
  117. {
  118. // When running the first input (prompt) in inteactive mode, we should specially process it.
  119. _embed_inps = Context.Tokenize(text, true).ToList();
  120. }
  121. else
  122. {
  123. if (!text.EndsWith("\n"))
  124. {
  125. text += "\n";
  126. }
  127. _consumedTokensCount = _embed_inps.Count;
  128. _embed_inps.AddRange(_inp_pfx);
  129. var line_inp = Context.Tokenize(text, false);
  130. _embed_inps.AddRange(line_inp);
  131. _embed_inps.AddRange(_inp_sfx);
  132. args.RemainedTokens -= line_inp.Length;
  133. }
  134. return Task.CompletedTask;
  135. }
  136. /// <inheritdoc />
  137. protected override async Task<(bool, IReadOnlyList<string>)> PostProcess(IInferenceParams inferenceParams, InferStateArgs args)
  138. {
  139. if (_embed_inps.Count <= _consumedTokensCount)
  140. {
  141. if (_last_n_tokens.TokensEndsWithAnyString(args.Antiprompts, Context.NativeHandle.ModelHandle, Context.Encoding))
  142. {
  143. args.WaitForInput = true;
  144. return (true, Array.Empty<string>());
  145. }
  146. if (_pastTokensCount > 0 && args.WaitForInput)
  147. {
  148. return (true, new[] { "\n> " });
  149. }
  150. }
  151. if (_embeds.Count > 0 && _embeds.Last() == NativeApi.llama_token_eos(Context.NativeHandle.ModelHandle))
  152. {
  153. args.WaitForInput = true;
  154. }
  155. if (args.RemainedTokens <= 0 && inferenceParams.MaxTokens != -1)
  156. {
  157. args.RemainedTokens = inferenceParams.MaxTokens;
  158. args.WaitForInput = true;
  159. }
  160. return (false, Array.Empty<string>());
  161. }
  162. /// <inheritdoc />
  163. protected override Task InferInternal(IInferenceParams inferenceParams, InferStateArgs args)
  164. {
  165. if (_embeds.Count > 0)
  166. {
  167. _is_prompt_run = false;
  168. if (_pastTokensCount + _embeds.Count > Context.ContextSize)
  169. {
  170. HandleRunOutOfContext(inferenceParams.TokensKeep);
  171. }
  172. TryReuseMathingPrefix();
  173. _pastTokensCount = Context.Eval(_embeds, _pastTokensCount);
  174. if (_embeds.Count > 0 && !string.IsNullOrEmpty(_pathSession))
  175. {
  176. _session_tokens.AddRange(_embeds);
  177. _n_session_consumed = _session_tokens.Count;
  178. }
  179. }
  180. _embeds.Clear();
  181. if (_embed_inps.Count <= _consumedTokensCount && !args.WaitForInput)
  182. {
  183. var repeat_last_n = inferenceParams.RepeatLastTokensCount < 0 ? Context.ContextSize : inferenceParams.RepeatLastTokensCount;
  184. // optionally save the session on first sample (for faster prompt loading next time)
  185. if (!string.IsNullOrEmpty(_pathSession) && args.NeedToSaveSession)
  186. {
  187. args.NeedToSaveSession = false;
  188. SaveSessionFile(_pathSession);
  189. }
  190. LLamaToken id;
  191. if (inferenceParams.SamplingPipeline is not null)
  192. {
  193. id = inferenceParams.SamplingPipeline.Sample(Context.NativeHandle, Context.NativeHandle.GetLogits(), _last_n_tokens.ToArray());
  194. }
  195. else
  196. {
  197. var tokenDataArray = Context.ApplyPenalty(0, _last_n_tokens, inferenceParams.LogitBias, repeat_last_n,
  198. inferenceParams.RepeatPenalty, inferenceParams.FrequencyPenalty, inferenceParams.PresencePenalty, inferenceParams.PenalizeNL);
  199. var mu = MirostatMu;
  200. id = Context.Sample(
  201. tokenDataArray, ref mu, inferenceParams.Temperature, inferenceParams.Mirostat, inferenceParams.MirostatTau,
  202. inferenceParams.MirostatEta, inferenceParams.TopK, inferenceParams.TopP, inferenceParams.TfsZ, inferenceParams.TypicalP, inferenceParams.Grammar,
  203. inferenceParams.MinP
  204. );
  205. MirostatMu = mu;
  206. }
  207. _last_n_tokens.Enqueue(id);
  208. _embeds.Add(id);
  209. args.RemainedTokens--;
  210. args.ReturnValue = true;
  211. }
  212. else
  213. {
  214. while (_embed_inps.Count > _consumedTokensCount)
  215. {
  216. _embeds.Add(_embed_inps[_consumedTokensCount]);
  217. _last_n_tokens.Enqueue(_embed_inps[_consumedTokensCount]);
  218. _consumedTokensCount++;
  219. if (_embeds.Count >= Context.Params.BatchSize)
  220. {
  221. break;
  222. }
  223. }
  224. }
  225. return Task.CompletedTask;
  226. }
  227. /// <summary>
  228. /// The desciptor of the state of the instruct executor.
  229. /// </summary>
  230. public class InstructExecutorState : ExecutorBaseState
  231. {
  232. /// <summary>
  233. /// Whether the executor is running for the first time (running the prompt).
  234. /// </summary>
  235. [JsonPropertyName("is_prompt_run")]
  236. public bool IsPromptRun { get; set; }
  237. /// <summary>
  238. /// Instruction prefix tokens.
  239. /// </summary>
  240. [JsonPropertyName("inp_pfx")]
  241. public LLamaToken[] InputPrefixTokens { get; set; }
  242. /// <summary>
  243. /// Instruction suffix tokens.
  244. /// </summary>
  245. [JsonPropertyName("inp_sfx")]
  246. public LLamaToken[] InputSuffixTokens { get; set; }
  247. }
  248. }
  249. }