You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaInteractExecutor.cs 14 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337
  1. using LLama.Common;
  2. using LLama.Native;
  3. using LLama.Abstractions;
  4. using System;
  5. using System.Collections.Generic;
  6. using System.IO;
  7. using System.Linq;
  8. using System.Text.Json;
  9. using System.Text.Json.Serialization;
  10. using System.Threading.Tasks;
  11. using LLama.Exceptions;
  12. using LLama.Extensions;
  13. using Microsoft.Extensions.Logging;
  14. using System.Net.Http;
  15. namespace LLama
  16. {
  17. /// <summary>
  18. /// The LLama executor for interactive mode.
  19. /// </summary>
  20. public class InteractiveExecutor : StatefulExecutorBase
  21. {
  22. private bool _is_prompt_run = true;
  23. private readonly LLamaToken _llama_token_newline;
  24. // LLava
  25. private int _EmbedImagePosition = -1;
  26. private List<SafeLlavaImageEmbedHandle> _imageEmbedHandles = new List<SafeLlavaImageEmbedHandle>();
  27. private bool _imageInPrompt = false;
  28. /// <summary>
  29. ///
  30. /// </summary>
  31. /// <param name="context"></param>
  32. /// <param name="logger"></param>
  33. public InteractiveExecutor(LLamaContext context, ILogger? logger = null)
  34. : base(context, logger)
  35. {
  36. _llama_token_newline = NativeApi.llama_token_nl(Context.NativeHandle.ModelHandle);
  37. }
  38. public InteractiveExecutor(LLamaContext context, LLavaWeights clipModel, ILogger? logger = null)
  39. : base(context, clipModel, logger)
  40. {
  41. _llama_token_newline = NativeApi.llama_token_nl(Context.NativeHandle.ModelHandle);
  42. }
  43. /// <inheritdoc />
  44. public override ExecutorBaseState GetStateData()
  45. {
  46. InteractiveExecutorState state = new()
  47. {
  48. ConsumedSessionCount = _n_session_consumed,
  49. EmbedInps = _embed_inps.ToArray(),
  50. IsPromptRun = _is_prompt_run,
  51. ConsumedTokensCount = _consumedTokensCount,
  52. Embeds = _embeds.ToArray(),
  53. LastTokens = _last_n_tokens.ToArray(),
  54. MatchingSessionTokensCount = _n_matching_session_tokens,
  55. PastTokensCount = _pastTokensCount,
  56. SessionFilePath = _pathSession,
  57. SessionTokens = _session_tokens.ToArray(),
  58. LastTokensCapacity = _last_n_tokens.Capacity,
  59. MirostatMu = MirostatMu
  60. };
  61. return state;
  62. }
  63. /// <inheritdoc />
  64. public override Task LoadState(ExecutorBaseState data)
  65. {
  66. if (data is InteractiveExecutorState state)
  67. {
  68. _n_session_consumed = state.ConsumedSessionCount;
  69. _embed_inps = state.EmbedInps.ToList();
  70. _is_prompt_run = state.IsPromptRun;
  71. _consumedTokensCount = state.ConsumedTokensCount;
  72. _embeds = state.Embeds.ToList();
  73. _last_n_tokens = new FixedSizeQueue<LLamaToken>(state.LastTokensCapacity, state.LastTokens);
  74. _n_matching_session_tokens = state.MatchingSessionTokensCount;
  75. _pastTokensCount = state.PastTokensCount;
  76. _pathSession = state.SessionFilePath;
  77. _session_tokens = state.SessionTokens.ToList();
  78. }
  79. else
  80. throw new ArgumentException("Invalid state data type.");
  81. return Task.CompletedTask;
  82. }
  83. /// <inheritdoc />
  84. public override async Task SaveState(string filename)
  85. {
  86. var state = (InteractiveExecutorState)GetStateData();
  87. using(var fs = new FileStream(filename, FileMode.Create, FileAccess.Write))
  88. {
  89. await JsonSerializer.SerializeAsync(fs, state);
  90. }
  91. }
  92. /// <inheritdoc />
  93. public override async Task LoadState(string filename)
  94. {
  95. using (var fs = new FileStream(filename, FileMode.Open, FileAccess.Read))
  96. {
  97. var state = await JsonSerializer.DeserializeAsync<InteractiveExecutorState>(fs);
  98. await LoadState(state);
  99. }
  100. }
  101. /// <summary>
  102. /// Define whether to continue the loop to generate responses.
  103. /// </summary>
  104. /// <returns></returns>
  105. protected override Task<bool> GetLoopCondition(InferStateArgs args)
  106. {
  107. return Task.FromResult(args.RemainedTokens != 0 && !args.WaitForInput || _is_prompt_run);
  108. }
  109. /// <inheritdoc />
  110. protected override Task PreprocessInputs(string text, InferStateArgs args)
  111. {
  112. if (_is_prompt_run)
  113. {
  114. // When running the first input (prompt) in interactive mode, we should specially process it.
  115. if (!this.IsMultiModal)
  116. {
  117. _embed_inps = Context.Tokenize(text, true).ToList();
  118. }
  119. else
  120. {
  121. PreprocessLlava(text, args, true );
  122. }
  123. }
  124. else
  125. {
  126. if (!text.EndsWith("\n"))
  127. {
  128. text += "\n";
  129. }
  130. var line_inp = Context.Tokenize(text, false);
  131. _embed_inps.AddRange(line_inp);
  132. args.RemainedTokens -= line_inp.Length;
  133. }
  134. return Task.CompletedTask;
  135. }
  136. private Task PreprocessLlava(string text, InferStateArgs args, bool addBos = true )
  137. {
  138. int usedTokens = 0;
  139. // If the prompt contains the tag <image> extract this.
  140. _imageInPrompt = text.Contains("<image>");
  141. if (_imageInPrompt && ClipModel != null)
  142. {
  143. foreach (var image in Images)
  144. {
  145. _imageEmbedHandles.Add(SafeLlavaImageEmbedHandle.CreateFromMemory(ClipModel.NativeHandle, Context, image));
  146. }
  147. int imageIndex = text.IndexOf("<image>");
  148. // Tokenize segment 1 (before <image> tag)
  149. string preImagePrompt = text.Substring(0, imageIndex);
  150. var segment1 = Context.Tokenize(preImagePrompt, addBos );
  151. // Remember the position to add the image embeddings
  152. _EmbedImagePosition = segment1.Length;
  153. string postImagePrompt = text.Substring(imageIndex + 7);
  154. var segment2 = Context.Tokenize(postImagePrompt, false);
  155. _embed_inps.AddRange(segment1);
  156. _embed_inps.AddRange(segment2);
  157. usedTokens += (segment1.Length + segment2.Length);
  158. }
  159. else
  160. {
  161. _embed_inps = Context.Tokenize(text, true).ToList();
  162. }
  163. return Task.CompletedTask;
  164. }
  165. /// <summary>
  166. /// Return whether to break the generation.
  167. /// </summary>
  168. /// <param name="inferenceParams"></param>
  169. /// <param name="args"></param>
  170. /// <returns></returns>
  171. protected override async Task<(bool, IReadOnlyList<string>)> PostProcess(IInferenceParams inferenceParams, InferStateArgs args)
  172. {
  173. if (_embed_inps.Count <= _consumedTokensCount)
  174. {
  175. if (_last_n_tokens.TokensEndsWithAnyString(args.Antiprompts, Context.NativeHandle.ModelHandle, Context.Encoding))
  176. args.WaitForInput = true;
  177. if (_pastTokensCount > 0 && args.WaitForInput)
  178. return (true, Array.Empty<string>());
  179. }
  180. if (_embeds.Count > 0 && _embeds.Last() == NativeApi.llama_token_eos(Context.NativeHandle.ModelHandle))
  181. {
  182. return (true, new[] { " [end of text]\n" });
  183. }
  184. if (args.RemainedTokens <= 0 && inferenceParams.MaxTokens != -1)
  185. {
  186. args.RemainedTokens = inferenceParams.MaxTokens;
  187. args.WaitForInput = true;
  188. }
  189. return (false, Array.Empty<string>());
  190. }
  191. /// <inheritdoc />
  192. protected override Task InferInternal(IInferenceParams inferenceParams, InferStateArgs args)
  193. {
  194. var batch = new LLamaBatch();
  195. if (_embeds.Count > 0)
  196. {
  197. _is_prompt_run = false;
  198. if (_pastTokensCount + _embeds.Count > Context.ContextSize)
  199. {
  200. HandleRunOutOfContext(inferenceParams.TokensKeep);
  201. }
  202. TryReuseMathingPrefix();
  203. // Changes to support Multi-Modal LLMs.
  204. //
  205. (DecodeResult, int) header, end, result;
  206. if (IsMultiModal && _EmbedImagePosition > 0)
  207. {
  208. // Tokens previous to the images
  209. header = Context.NativeHandle.Decode(_embeds.GetRange(0, _EmbedImagePosition), LLamaSeqId.Zero, batch, ref _pastTokensCount);
  210. if (header.Item1 != DecodeResult.Ok) throw new LLamaDecodeError(header.Item1);
  211. // Images
  212. foreach( var image in _imageEmbedHandles )
  213. ClipModel.EvalImageEmbed(Context, image, ref _pastTokensCount);
  214. // Post-image Tokens
  215. end = Context.NativeHandle.Decode(_embeds.GetRange(_EmbedImagePosition, _embeds.Count - _EmbedImagePosition), LLamaSeqId.Zero, batch, ref _pastTokensCount);
  216. _EmbedImagePosition = -1;
  217. _imageEmbedHandles.Clear();
  218. }
  219. else
  220. {
  221. result = Context.NativeHandle.Decode(_embeds, LLamaSeqId.Zero, batch, ref _pastTokensCount);
  222. if (result.Item1 != DecodeResult.Ok) throw new LLamaDecodeError(result.Item1);
  223. }
  224. if (_embeds.Count > 0 && !string.IsNullOrEmpty(_pathSession))
  225. {
  226. _session_tokens.AddRange(_embeds);
  227. _n_session_consumed = _session_tokens.Count;
  228. }
  229. }
  230. _embeds.Clear();
  231. if (_embed_inps.Count <= _consumedTokensCount && !args.WaitForInput)
  232. {
  233. var repeat_last_n = inferenceParams.RepeatLastTokensCount < 0 ? (int)Context.ContextSize : inferenceParams.RepeatLastTokensCount;
  234. // optionally save the session on first sample (for faster prompt loading next time)
  235. if (!string.IsNullOrEmpty(_pathSession) && args.NeedToSaveSession)
  236. {
  237. args.NeedToSaveSession = false;
  238. SaveSessionFile(_pathSession);
  239. }
  240. LLamaToken id;
  241. if (inferenceParams.SamplingPipeline is not null)
  242. {
  243. id = inferenceParams.SamplingPipeline.Sample(Context.NativeHandle, Context.NativeHandle.GetLogitsIth(batch.TokenCount - 1), _last_n_tokens.ToArray());
  244. inferenceParams.SamplingPipeline.Accept(Context.NativeHandle, id);
  245. }
  246. else
  247. {
  248. var tokenDataArray = Context.ApplyPenalty(batch.TokenCount - 1, _last_n_tokens, inferenceParams.LogitBias, repeat_last_n,
  249. inferenceParams.RepeatPenalty, inferenceParams.FrequencyPenalty, inferenceParams.PresencePenalty, inferenceParams.PenalizeNL);
  250. var mu = MirostatMu;
  251. id = Context.Sample(
  252. tokenDataArray, ref mu, inferenceParams.Temperature, inferenceParams.Mirostat, inferenceParams.MirostatTau,
  253. inferenceParams.MirostatEta, inferenceParams.TopK, inferenceParams.TopP, inferenceParams.TfsZ, inferenceParams.TypicalP, inferenceParams.Grammar,
  254. inferenceParams.MinP
  255. );
  256. MirostatMu = mu;
  257. }
  258. _last_n_tokens.Enqueue(id);
  259. if (id == NativeApi.llama_token_eos(Context.NativeHandle.ModelHandle))
  260. {
  261. id = _llama_token_newline;
  262. if (args.Antiprompts is not null && args.Antiprompts.Count > 0)
  263. {
  264. var first_antiprompt = Context.Tokenize(args.Antiprompts[0], false);
  265. _embed_inps.AddRange(first_antiprompt);
  266. }
  267. }
  268. _embeds.Add(id);
  269. args.RemainedTokens--;
  270. args.ReturnValue = true;
  271. }
  272. else
  273. {
  274. while (_embed_inps.Count > _consumedTokensCount)
  275. {
  276. _embeds.Add(_embed_inps[_consumedTokensCount]);
  277. _last_n_tokens.Enqueue(_embed_inps[_consumedTokensCount]);
  278. _consumedTokensCount++;
  279. if (_embeds.Count >= Context.Params.BatchSize)
  280. {
  281. break;
  282. }
  283. }
  284. }
  285. return Task.CompletedTask;
  286. }
  287. /// <summary>
  288. /// The descriptor of the state of the interactive executor.
  289. /// </summary>
  290. public class InteractiveExecutorState
  291. : ExecutorBaseState
  292. {
  293. /// <summary>
  294. /// Whether the executor is running for the first time (running the prompt).
  295. /// </summary>
  296. [JsonPropertyName("is_prompt_run")]
  297. public bool IsPromptRun { get; set; }
  298. }
  299. }
  300. }