You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

StatelessModeExecute.cs 1.9 kB

12345678910111213141516171819202122232425262728293031323334353637383940
  1. using LLama.Common;
  2. namespace LLama.Examples.NewVersion
  3. {
  4. public class StatelessModeExecute
  5. {
  6. public static void Run()
  7. {
  8. Console.Write("Please input your model path: ");
  9. var modelPath = Console.ReadLine();
  10. var parameters = new ModelParams(modelPath, contextSize: 1024, seed: 1337, gpuLayerCount: 5);
  11. using var model = LLamaWeights.LoadFromFile(parameters);
  12. var ex = new StatelessExecutor(model, parameters);
  13. Console.ForegroundColor = ConsoleColor.Yellow;
  14. Console.WriteLine("The executor has been enabled. In this example, the inference is an one-time job. That says, the previous input and response has " +
  15. "no impact on the current response. Now you can ask it questions. Note that in this example, no prompt was set for LLM and the maximum response tokens is 50. " +
  16. "It may not perform well because of lack of prompt. This is also an example that could indicate the improtance of prompt in LLM. To improve it, you can add " +
  17. "a prompt for it yourself!");
  18. Console.ForegroundColor = ConsoleColor.White;
  19. var inferenceParams = new InferenceParams() { Temperature = 0.6f, AntiPrompts = new List<string> { "Question:", "#", "Question: ", ".\n" }, MaxTokens = 50 };
  20. while (true)
  21. {
  22. Console.Write("\nQuestion: ");
  23. Console.ForegroundColor = ConsoleColor.Green;
  24. var prompt = Console.ReadLine();
  25. Console.ForegroundColor = ConsoleColor.White;
  26. Console.Write("Answer: ");
  27. prompt = $"Question: {prompt?.Trim()} Answer: ";
  28. foreach (var text in ex.Infer(prompt, inferenceParams))
  29. {
  30. Console.Write(text);
  31. }
  32. }
  33. }
  34. }
  35. }