You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

InteractiveModeExecute.cs 1.5 kB

12345678910111213141516171819202122232425262728293031323334353637383940
  1. using LLama.Common;
  2. using System;
  3. using System.Collections.Generic;
  4. using System.Linq;
  5. using System.Text;
  6. using System.Threading.Tasks;
  7. namespace LLama.Examples.NewVersion
  8. {
  9. public class InteractiveModeExecute
  10. {
  11. public async static Task Run()
  12. {
  13. Console.Write("Please input your model path: ");
  14. string modelPath = Console.ReadLine();
  15. var prompt = File.ReadAllText("Assets/chat-with-bob.txt").Trim();
  16. InteractiveExecutor ex = new(new LLamaModel(new ModelParams(modelPath, contextSize: 256)));
  17. Console.ForegroundColor = ConsoleColor.Yellow;
  18. Console.WriteLine("The executor has been enabled. In this example, the prompt is printed, the maximum tokens is set to 128 and the context size is 256. (an example for small scale usage)");
  19. Console.ForegroundColor = ConsoleColor.White;
  20. Console.Write(prompt);
  21. var inferenceParams = new InferenceParams() { Temperature = 0.6f, AntiPrompts = new List<string> { "User:" }, MaxTokens = 128 };
  22. while (true)
  23. {
  24. await foreach (var text in ex.InferAsync(prompt, inferenceParams))
  25. {
  26. Console.Write(text);
  27. }
  28. Console.ForegroundColor = ConsoleColor.Green;
  29. prompt = Console.ReadLine();
  30. Console.ForegroundColor = ConsoleColor.White;
  31. }
  32. }
  33. }
  34. }

C#/.NET上易用的LLM高性能推理框架,支持LLaMA和LLaVA系列模型。