You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaInteractExecutor.cs 9.4 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248
  1. using LLama.Common;
  2. using LLama.Native;
  3. using LLama.Abstractions;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.IO;
  7. using System.Linq;
  8. using System.Text.Json;
  9. using System.Text.Json.Serialization;
  10. using System.Threading.Tasks;
  11. using LLama.Extensions;
  12. using Microsoft.Extensions.Logging;
  13. namespace LLama
  14. {
  15. using llama_token = Int32;
  16. /// <summary>
  17. /// The LLama executor for interactive mode.
  18. /// </summary>
  19. public class InteractiveExecutor : StatefulExecutorBase
  20. {
  21. private bool _is_prompt_run = true;
  22. private readonly llama_token _llama_token_newline;
  23. /// <summary>
  24. ///
  25. /// </summary>
  26. /// <param name="context"></param>
  27. /// <param name="logger"></param>
  28. public InteractiveExecutor(LLamaContext context, ILogger? logger = null)
  29. : base(context, logger)
  30. {
  31. _llama_token_newline = NativeApi.llama_token_nl(Context.NativeHandle);
  32. }
  33. /// <inheritdoc />
  34. public override ExecutorBaseState GetStateData()
  35. {
  36. InteractiveExecutorState state = new()
  37. {
  38. ConsumedSessionCount = _n_session_consumed,
  39. EmbedInps = _embed_inps,
  40. IsPromptRun = _is_prompt_run,
  41. ConsumedTokensCount = _consumedTokensCount,
  42. Embeds = _embeds,
  43. LastTokens = _last_n_tokens.ToArray(),
  44. MatchingSessionTokensCount = _n_matching_session_tokens,
  45. PastTokensCount = _pastTokensCount,
  46. SessionFilePath = _pathSession,
  47. SessionTokens = _session_tokens,
  48. LastTokensCapacity = _last_n_tokens.Capacity,
  49. MirostatMu = MirostatMu
  50. };
  51. return state;
  52. }
  53. /// <inheritdoc />
  54. public override Task LoadState(ExecutorBaseState data)
  55. {
  56. if (data is InteractiveExecutorState state)
  57. {
  58. _n_session_consumed = state.ConsumedSessionCount;
  59. _embed_inps = state.EmbedInps;
  60. _is_prompt_run = state.IsPromptRun;
  61. _consumedTokensCount = state.ConsumedTokensCount;
  62. _embeds = state.Embeds;
  63. _last_n_tokens = new FixedSizeQueue<llama_token>(state.LastTokensCapacity, state.LastTokens);
  64. _n_matching_session_tokens = state.MatchingSessionTokensCount;
  65. _pastTokensCount = state.PastTokensCount;
  66. _pathSession = state.SessionFilePath;
  67. _session_tokens = state.SessionTokens;
  68. }
  69. else
  70. throw new ArgumentException("Invalid state data type.");
  71. return Task.CompletedTask;
  72. }
  73. /// <inheritdoc />
  74. public override async Task SaveState(string filename)
  75. {
  76. var state = (InteractiveExecutorState)GetStateData();
  77. using(var fs = new FileStream(filename, FileMode.Create, FileAccess.Write))
  78. {
  79. await JsonSerializer.SerializeAsync(fs, state);
  80. }
  81. }
  82. /// <inheritdoc />
  83. public override async Task LoadState(string filename)
  84. {
  85. using (var fs = new FileStream(filename, FileMode.Open, FileAccess.Read))
  86. {
  87. var state = await JsonSerializer.DeserializeAsync<InteractiveExecutorState>(fs);
  88. await LoadState(state);
  89. }
  90. }
  91. /// <summary>
  92. /// Define whether to continue the loop to generate responses.
  93. /// </summary>
  94. /// <returns></returns>
  95. protected override Task<bool> GetLoopCondition(InferStateArgs args)
  96. {
  97. return Task.FromResult(args.RemainedTokens != 0 && !args.WaitForInput || _is_prompt_run);
  98. }
  99. /// <inheritdoc />
  100. protected override Task PreprocessInputs(string text, InferStateArgs args)
  101. {
  102. if (_is_prompt_run)
  103. {
  104. // When running the first input (prompt) in inteactive mode, we should specially process it.
  105. _embed_inps = Context.Tokenize(text, true).ToList();
  106. }
  107. else
  108. {
  109. if (!text.EndsWith("\n"))
  110. {
  111. text += "\n";
  112. }
  113. var line_inp = Context.Tokenize(text, false);
  114. _embed_inps.AddRange(line_inp);
  115. args.RemainedTokens -= line_inp.Length;
  116. }
  117. return Task.CompletedTask;
  118. }
  119. /// <summary>
  120. /// Return whether to break the generation.
  121. /// </summary>
  122. /// <param name="inferenceParams"></param>
  123. /// <param name="args"></param>
  124. /// <returns></returns>
  125. protected override async Task<(bool, IReadOnlyList<string>)> PostProcess(IInferenceParams inferenceParams, InferStateArgs args)
  126. {
  127. if (_embed_inps.Count <= _consumedTokensCount)
  128. {
  129. if (_last_n_tokens.Items.TokensEndsWithAnyString(args.Antiprompts, Context.NativeHandle.ModelHandle, Context.Encoding))
  130. args.WaitForInput = true;
  131. if (_pastTokensCount > 0 && args.WaitForInput)
  132. return (true, Array.Empty<string>());
  133. }
  134. if (_embeds.Count > 0 && _embeds.Last() == NativeApi.llama_token_eos(Context.NativeHandle))
  135. {
  136. return (true, new[] { " [end of text]\n" });
  137. }
  138. if (args.RemainedTokens <= 0 && inferenceParams.MaxTokens != -1)
  139. {
  140. args.RemainedTokens = inferenceParams.MaxTokens;
  141. args.WaitForInput = true;
  142. }
  143. return (false, Array.Empty<string>());
  144. }
  145. /// <inheritdoc />
  146. protected override async Task InferInternal(IInferenceParams inferenceParams, InferStateArgs args)
  147. {
  148. if (_embeds.Count > 0)
  149. {
  150. _is_prompt_run = false;
  151. if (_pastTokensCount + _embeds.Count > Context.ContextSize)
  152. {
  153. HandleRunOutOfContext(inferenceParams.TokensKeep);
  154. }
  155. TryReuseMathingPrefix();
  156. _pastTokensCount = Context.Eval(_embeds, _pastTokensCount);
  157. if (_embeds.Count > 0 && !string.IsNullOrEmpty(_pathSession))
  158. {
  159. _session_tokens.AddRange(_embeds);
  160. _n_session_consumed = _session_tokens.Count;
  161. }
  162. }
  163. _embeds.Clear();
  164. if (_embed_inps.Count <= _consumedTokensCount && !args.WaitForInput)
  165. {
  166. var repeat_last_n = inferenceParams.RepeatLastTokensCount < 0 ? Context.ContextSize : inferenceParams.RepeatLastTokensCount;
  167. // optionally save the session on first sample (for faster prompt loading next time)
  168. if (!string.IsNullOrEmpty(_pathSession) && args.NeedToSaveSession)
  169. {
  170. args.NeedToSaveSession = false;
  171. SaveSessionFile(_pathSession);
  172. }
  173. var tokenDataArray = Context.ApplyPenalty(_last_n_tokens, inferenceParams.LogitBias, repeat_last_n,
  174. inferenceParams.RepeatPenalty, inferenceParams.FrequencyPenalty, inferenceParams.PresencePenalty, inferenceParams.PenalizeNL);
  175. var mu = MirostatMu;
  176. var id = Context.Sample(
  177. tokenDataArray, ref mu, inferenceParams.Temperature, inferenceParams.Mirostat, inferenceParams.MirostatTau,
  178. inferenceParams.MirostatEta, inferenceParams.TopK, inferenceParams.TopP, inferenceParams.TfsZ, inferenceParams.TypicalP,
  179. inferenceParams.Grammar
  180. );
  181. MirostatMu = mu;
  182. _last_n_tokens.Enqueue(id);
  183. if (id == NativeApi.llama_token_eos(Context.NativeHandle))
  184. {
  185. id = _llama_token_newline;
  186. if (args.Antiprompts is not null && args.Antiprompts.Count > 0)
  187. {
  188. var first_antiprompt = Context.Tokenize(args.Antiprompts[0], false);
  189. _embed_inps.AddRange(first_antiprompt);
  190. }
  191. }
  192. _embeds.Add(id);
  193. args.RemainedTokens--;
  194. args.ReturnValue = true;
  195. }
  196. else
  197. {
  198. while (_embed_inps.Count > _consumedTokensCount)
  199. {
  200. _embeds.Add(_embed_inps[_consumedTokensCount]);
  201. _last_n_tokens.Enqueue(_embed_inps[_consumedTokensCount]);
  202. _consumedTokensCount++;
  203. if (_embeds.Count >= Context.Params.BatchSize)
  204. {
  205. break;
  206. }
  207. }
  208. }
  209. }
  210. /// <summary>
  211. /// The descriptor of the state of the interactive executor.
  212. /// </summary>
  213. public class InteractiveExecutorState
  214. : ExecutorBaseState
  215. {
  216. /// <summary>
  217. /// Whether the executor is running for the first time (running the prompt).
  218. /// </summary>
  219. [JsonPropertyName("is_prompt_run")]
  220. public bool IsPromptRun { get; set; }
  221. }
  222. }
  223. }