- Renamed files and updated namespaces in Examples folder. - Moved files from NewVersion folder to Examples folder. - Removed TestRunner.cs file. - Updated Runner.cs to include new examples. - Update Program.cs to use the new Runner class instead of NewVersionTestRunner - Update LLama.Examples namespace in Program.cs - Update await NewVersionTestRunner.Run() in Program.cs to await Runner.Run()tags/v0.8.1
| @@ -4,7 +4,7 @@ using System.Text; | |||||
| using LLama.Common; | using LLama.Common; | ||||
| using LLama.Native; | using LLama.Native; | ||||
| namespace LLama.Examples.NewVersion; | |||||
| namespace LLama.Examples.Examples; | |||||
| /// <summary> | /// <summary> | ||||
| /// This demonstrates generating multiple replies to the same prompt, with a shared cache | /// This demonstrates generating multiple replies to the same prompt, with a shared cache | ||||
| @@ -1,6 +1,6 @@ | |||||
| using LLama.Common; | using LLama.Common; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class ChatSessionStripRoleName | public class ChatSessionStripRoleName | ||||
| { | { | ||||
| @@ -1,6 +1,6 @@ | |||||
| using LLama.Common; | using LLama.Common; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class ChatSessionWithRoleName | public class ChatSessionWithRoleName | ||||
| { | { | ||||
| @@ -1,4 +1,4 @@ | |||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| using LLama.Common; | using LLama.Common; | ||||
| using System; | using System; | ||||
| @@ -1,6 +1,6 @@ | |||||
| using LLama.Common; | using LLama.Common; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class GetEmbeddings | public class GetEmbeddings | ||||
| { | { | ||||
| @@ -1,7 +1,7 @@ | |||||
| using LLama.Common; | using LLama.Common; | ||||
| using LLama.Grammars; | using LLama.Grammars; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class GrammarJsonResponse | public class GrammarJsonResponse | ||||
| { | { | ||||
| @@ -1,6 +1,6 @@ | |||||
| using LLama.Common; | using LLama.Common; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class InstructModeExecute | public class InstructModeExecute | ||||
| { | { | ||||
| @@ -1,6 +1,6 @@ | |||||
| using LLama.Common; | using LLama.Common; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class InteractiveModeExecute | public class InteractiveModeExecute | ||||
| { | { | ||||
| @@ -7,7 +7,7 @@ using LLamaSharp.KernelMemory; | |||||
| using Microsoft.KernelMemory; | using Microsoft.KernelMemory; | ||||
| using Microsoft.KernelMemory.Handlers; | using Microsoft.KernelMemory.Handlers; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class KernelMemory | public class KernelMemory | ||||
| { | { | ||||
| @@ -1,6 +1,6 @@ | |||||
| using LLama.Common; | using LLama.Common; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class SaveAndLoadSession | public class SaveAndLoadSession | ||||
| { | { | ||||
| @@ -1,6 +1,6 @@ | |||||
| using LLama.Common; | using LLama.Common; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class LoadAndSaveState | public class LoadAndSaveState | ||||
| { | { | ||||
| @@ -1,4 +1,4 @@ | |||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class QuantizeModel | public class QuantizeModel | ||||
| { | { | ||||
| @@ -0,0 +1,53 @@ | |||||
| using Spectre.Console; | |||||
| namespace LLama.Examples.Examples; | |||||
| public class Runner | |||||
| { | |||||
| static Dictionary<string, Func<Task>> Examples = new() | |||||
| { | |||||
| {"Run a chat session without stripping the role names.", () => ChatSessionWithRoleName.Run()}, | |||||
| {"Run a chat session with the role names stripped.",()=> ChatSessionStripRoleName.Run()}, | |||||
| {"Interactive mode chat by using executor.",()=> InteractiveModeExecute.Run()}, | |||||
| {"Instruct mode chat by using executor.",()=> InstructModeExecute.Run()}, | |||||
| {"Stateless mode chat by using executor.",()=> StatelessModeExecute.Run()}, | |||||
| {"Load and save chat session.",()=> SaveAndLoadSession.Run()}, | |||||
| {"Load and save state of model and executor.",()=> LoadAndSaveState.Run()}, | |||||
| {"Get embeddings from LLama model.",()=> Task.Run(GetEmbeddings.Run)}, | |||||
| {"Quantize the model.",()=> Task.Run(QuantizeModel.Run)}, | |||||
| {"Automatic conversation.",()=> TalkToYourself.Run()}, | |||||
| {"Constrain response to json format using grammar.",()=> GrammarJsonResponse.Run()}, | |||||
| {"Semantic Kernel Prompt.",()=> SemanticKernelPrompt.Run()}, | |||||
| {"Semantic Kernel Chat.",()=> SemanticKernelChat.Run()}, | |||||
| {"Semantic Kernel Memory.",()=> SemanticKernelMemory.Run()}, | |||||
| {"Coding Assistant.",()=> CodingAssistant.Run()}, | |||||
| {"Batch Decoding.",()=> BatchedDecoding.Run()}, | |||||
| {"SK Kernel Memory.",()=> KernelMemory.Run()}, | |||||
| {"Exit", ()=> Task.CompletedTask} | |||||
| }; | |||||
| public static async Task Run() | |||||
| { | |||||
| AnsiConsole.Write(new Rule("LLamaSharp Examples")); | |||||
| while (true) | |||||
| { | |||||
| var choice = AnsiConsole.Prompt( | |||||
| new SelectionPrompt<string>() | |||||
| .Title("Please choose[green] an example[/] to run: ") | |||||
| .AddChoices(Examples.Keys)); | |||||
| if (Examples.TryGetValue(choice, out var example)) | |||||
| { | |||||
| if (choice == "Exit") | |||||
| { | |||||
| break; | |||||
| } | |||||
| AnsiConsole.Write(new Rule(choice)); | |||||
| await example(); | |||||
| } | |||||
| AnsiConsole.Clear(); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -3,7 +3,7 @@ using LLama.Common; | |||||
| using Microsoft.SemanticKernel.AI.ChatCompletion; | using Microsoft.SemanticKernel.AI.ChatCompletion; | ||||
| using LLamaSharp.SemanticKernel.ChatCompletion; | using LLamaSharp.SemanticKernel.ChatCompletion; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class SemanticKernelChat | public class SemanticKernelChat | ||||
| { | { | ||||
| @@ -5,7 +5,7 @@ using LLamaSharp.SemanticKernel.TextEmbedding; | |||||
| using Microsoft.SemanticKernel.AI.Embeddings; | using Microsoft.SemanticKernel.AI.Embeddings; | ||||
| using Microsoft.SemanticKernel.Plugins.Memory; | using Microsoft.SemanticKernel.Plugins.Memory; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class SemanticKernelMemory | public class SemanticKernelMemory | ||||
| { | { | ||||
| @@ -5,7 +5,7 @@ using Microsoft.SemanticKernel; | |||||
| using Microsoft.SemanticKernel.AI.TextCompletion; | using Microsoft.SemanticKernel.AI.TextCompletion; | ||||
| using LLamaSharp.SemanticKernel.TextCompletion; | using LLamaSharp.SemanticKernel.TextCompletion; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class SemanticKernelPrompt | public class SemanticKernelPrompt | ||||
| { | { | ||||
| @@ -1,7 +1,7 @@ | |||||
| using LLama.Common; | using LLama.Common; | ||||
| using LLama.Examples.Extensions; | using LLama.Examples.Extensions; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class StatelessModeExecute | public class StatelessModeExecute | ||||
| { | { | ||||
| @@ -3,7 +3,7 @@ using System.Text; | |||||
| using LLama.Abstractions; | using LLama.Abstractions; | ||||
| using LLama.Common; | using LLama.Common; | ||||
| namespace LLama.Examples.NewVersion | |||||
| namespace LLama.Examples.Examples | |||||
| { | { | ||||
| public class TalkToYourself | public class TalkToYourself | ||||
| { | { | ||||
| @@ -1,112 +0,0 @@ | |||||
| namespace LLama.Examples.NewVersion | |||||
| { | |||||
| public class NewVersionTestRunner | |||||
| { | |||||
| public static async Task Run() | |||||
| { | |||||
| Console.WriteLine("================LLamaSharp Examples (New Version)==================\n"); | |||||
| Console.WriteLine("Please input a number to choose an example to run:"); | |||||
| Console.WriteLine("0: Run a chat session without stripping the role names."); | |||||
| Console.WriteLine("1: Run a chat session with the role names stripped."); | |||||
| Console.WriteLine("2: Interactive mode chat by using executor."); | |||||
| Console.WriteLine("3: Instruct mode chat by using executor."); | |||||
| Console.WriteLine("4: Stateless mode chat by using executor."); | |||||
| Console.WriteLine("5: Load and save chat session."); | |||||
| Console.WriteLine("6: Load and save state of model and executor."); | |||||
| Console.WriteLine("7: Get embeddings from LLama model."); | |||||
| Console.WriteLine("8: Quantize the model."); | |||||
| Console.WriteLine("9: Automatic conversation."); | |||||
| Console.WriteLine("10: Constrain response to json format using grammar."); | |||||
| Console.WriteLine("11: Semantic Kernel Prompt."); | |||||
| Console.WriteLine("12: Semantic Kernel Chat."); | |||||
| Console.WriteLine("13: Semantic Kernel Memory."); | |||||
| Console.WriteLine("14: Coding Assistant."); | |||||
| Console.WriteLine("15: Batch Decoding."); | |||||
| Console.WriteLine("16: SK Kernel Memory."); | |||||
| while (true) | |||||
| { | |||||
| Console.Write("\nYour choice: "); | |||||
| int choice = int.Parse(Console.ReadLine()); | |||||
| if (choice == 0) | |||||
| { | |||||
| await ChatSessionWithRoleName.Run(); | |||||
| } | |||||
| else if (choice == 1) | |||||
| { | |||||
| await ChatSessionStripRoleName.Run(); | |||||
| } | |||||
| else if (choice == 2) | |||||
| { | |||||
| await InteractiveModeExecute.Run(); | |||||
| } | |||||
| else if (choice == 3) | |||||
| { | |||||
| await InstructModeExecute.Run(); | |||||
| } | |||||
| else if (choice == 4) | |||||
| { | |||||
| await StatelessModeExecute.Run(); | |||||
| } | |||||
| else if (choice == 5) | |||||
| { | |||||
| await SaveAndLoadSession.Run(); | |||||
| } | |||||
| else if (choice == 6) | |||||
| { | |||||
| await LoadAndSaveState.Run(); | |||||
| } | |||||
| else if (choice == 7) | |||||
| { | |||||
| GetEmbeddings.Run(); | |||||
| } | |||||
| else if (choice == 8) | |||||
| { | |||||
| QuantizeModel.Run(); | |||||
| } | |||||
| else if (choice == 9) | |||||
| { | |||||
| await TalkToYourself.Run(); | |||||
| } | |||||
| else if (choice == 10) | |||||
| { | |||||
| await GrammarJsonResponse.Run(); | |||||
| } | |||||
| else if (choice == 11) | |||||
| { | |||||
| await SemanticKernelPrompt.Run(); | |||||
| } | |||||
| else if (choice == 12) | |||||
| { | |||||
| await SemanticKernelChat.Run(); | |||||
| } | |||||
| else if (choice == 13) | |||||
| { | |||||
| await SemanticKernelMemory.Run(); | |||||
| } | |||||
| else if (choice == 14) | |||||
| { | |||||
| await CodingAssistant.Run(); | |||||
| } | |||||
| else if (choice == 15) | |||||
| { | |||||
| await BatchedDecoding.Run(); | |||||
| } | |||||
| else if (choice == 16) | |||||
| { | |||||
| await KernelMemory.Run(); | |||||
| } | |||||
| else | |||||
| { | |||||
| Console.WriteLine("Cannot parse your choice. Please select again."); | |||||
| continue; | |||||
| } | |||||
| break; | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -1,4 +1,4 @@ | |||||
| using LLama.Examples.NewVersion; | |||||
| using LLama.Examples.Examples; | |||||
| using LLama.Native; | using LLama.Native; | ||||
| Console.WriteLine("======================================================================================================"); | Console.WriteLine("======================================================================================================"); | ||||
| @@ -12,4 +12,4 @@ NativeLibraryConfig.Instance.WithCuda().WithLogs(); | |||||
| NativeApi.llama_empty_call(); | NativeApi.llama_empty_call(); | ||||
| Console.WriteLine(); | Console.WriteLine(); | ||||
| await NewVersionTestRunner.Run(); | |||||
| await Runner.Run(); | |||||