You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

GrammarJsonResponse.cs 1.9 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. using LLama.Common;
  2. using LLama.Grammars;
  3. namespace LLama.Examples.Examples
  4. {
  5. public class GrammarJsonResponse
  6. {
  7. public static async Task Run()
  8. {
  9. string modelPath = UserSettings.GetModelPath();
  10. var gbnf = File.ReadAllText("Assets/json.gbnf").Trim();
  11. var grammar = Grammar.Parse(gbnf, "root");
  12. var parameters = new ModelParams(modelPath)
  13. {
  14. Seed = 1337,
  15. GpuLayerCount = 5
  16. };
  17. using var model = LLamaWeights.LoadFromFile(parameters);
  18. var ex = new StatelessExecutor(model, parameters);
  19. Console.ForegroundColor = ConsoleColor.Yellow;
  20. Console.WriteLine("The executor has been enabled. In this example, the LLM will follow your instructions and always respond in a JSON format. For example, you can input \"Tell me the attributes of a good dish\"");
  21. Console.ForegroundColor = ConsoleColor.White;
  22. using var grammarInstance = grammar.CreateInstance();
  23. var inferenceParams = new InferenceParams()
  24. {
  25. Temperature = 0.6f,
  26. AntiPrompts = new List<string> { "Question:", "#", "Question: ", ".\n" },
  27. MaxTokens = 50,
  28. Grammar = grammarInstance
  29. };
  30. while (true)
  31. {
  32. Console.Write("\nQuestion: ");
  33. Console.ForegroundColor = ConsoleColor.Green;
  34. var prompt = Console.ReadLine();
  35. Console.ForegroundColor = ConsoleColor.White;
  36. Console.Write("Answer: ");
  37. prompt = $"Question: {prompt?.Trim()} Answer: ";
  38. await foreach (var text in ex.InferAsync(prompt, inferenceParams))
  39. {
  40. Console.Write(text);
  41. }
  42. }
  43. }
  44. }
  45. }