* LLama.Examples: RepoUtils.cs → ConsoleLogger.cs * LLama.Examples: Examples/Runner.cs → ExampleRunner.cs * LLama.Examples: delete unused console logger * LLama.Examples: improve splash screen appearance the llama_empty_call() no longer shows configuration information on startup, but it will display it automatically the first time a model is engaged * LLama.Examples: Runner → ExampleRunner * LLama.Examples: improve model path prompt The last used model is stored in a config file and is re-used when a blank path is provided * LLama.Examples: NativeApi.llama_empty_call() at startup * LLama.Examples: reduce console noise when saving model pathtags/0.11.0
| @@ -1,8 +1,7 @@ | |||
| using Spectre.Console; | |||
| using LLama.Examples.Examples; | |||
| namespace LLama.Examples.Examples; | |||
| public class Runner | |||
| public class ExampleRunner | |||
| { | |||
| private static readonly Dictionary<string, Func<Task>> Examples = new() | |||
| { | |||
| @@ -26,7 +25,7 @@ public class Runner | |||
| { "Batched Executor (Fork)", BatchedExecutorFork.Run }, | |||
| { "Batched Executor (Rewind)", BatchedExecutorRewind.Run }, | |||
| { "SK Kernel Memory.", KernelMemory.Run }, | |||
| { "Exit", async () => Environment.Exit(0) } | |||
| { "Exit", () => { Environment.Exit(0); return Task.CompletedTask; } } | |||
| }; | |||
| public static async Task Run() | |||
| @@ -15,8 +15,7 @@ public class BatchedExecutorFork | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| var parameters = new ModelParams(modelPath); | |||
| using var model = LLamaWeights.LoadFromFile(parameters); | |||
| @@ -16,8 +16,7 @@ public class BatchedExecutorRewind | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| var parameters = new ModelParams(modelPath); | |||
| using var model = LLamaWeights.LoadFromFile(parameters); | |||
| @@ -22,8 +22,7 @@ public class ChatChineseGB2312 | |||
| " to use https://huggingface.co/hfl/chinese-alpaca-2-7b-gguf/blob/main/ggml-model-q5_0.gguf, which has been verified by LLamaSharp developers."); | |||
| Console.ForegroundColor = ConsoleColor.White; | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| var parameters = new ModelParams(modelPath) | |||
| { | |||
| @@ -6,8 +6,7 @@ public class ChatSessionStripRoleName | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| var parameters = new ModelParams(modelPath) | |||
| { | |||
| @@ -6,8 +6,7 @@ public class ChatSessionWithHistory | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| var parameters = new ModelParams(modelPath) | |||
| { | |||
| @@ -6,8 +6,7 @@ public class ChatSessionWithRoleName | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| var parameters = new ModelParams(modelPath) | |||
| { | |||
| @@ -2,27 +2,27 @@ | |||
| { | |||
| using LLama.Common; | |||
| using System; | |||
| using System.Reflection; | |||
| internal class CodingAssistant | |||
| { | |||
| const string DefaultModelUri = "https://huggingface.co/TheBloke/CodeLlama-7B-Instruct-GGUF/resolve/main/codellama-7b-instruct.Q4_K_S.gguf"; | |||
| // Source paper with example prompts: | |||
| // https://doi.org/10.48550/arXiv.2308.12950 | |||
| const string InstructionPrefix = "[INST]"; | |||
| const string InstructionSuffix = "[/INST]"; | |||
| const string SystemInstruction = "You're an intelligent, concise coding assistant. Wrap code in ``` for readability. Don't repeat yourself. Use best practice and good coding standards."; | |||
| private static string ModelsDirectory = Path.Combine(Directory.GetParent(Assembly.GetExecutingAssembly().Location)!.FullName, "Models"); | |||
| const string SystemInstruction = "You're an intelligent, concise coding assistant. " + | |||
| "Wrap code in ``` for readability. Don't repeat yourself. " + | |||
| "Use best practice and good coding standards."; | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path (if left empty, a default model will be downloaded for you): "); | |||
| var modelPath = Console.ReadLine(); | |||
| if(string.IsNullOrWhiteSpace(modelPath) ) | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| if (!modelPath.Contains("codellama", StringComparison.InvariantCultureIgnoreCase)) | |||
| { | |||
| modelPath = await GetDefaultModel(); | |||
| Console.ForegroundColor = ConsoleColor.Yellow; | |||
| Console.WriteLine("WARNING: the model you selected is not a Code LLama model!"); | |||
| Console.WriteLine("For this example we specifically recommend 'codellama-7b-instruct.Q4_K_S.gguf'"); | |||
| Console.WriteLine("Press ENTER to continue..."); | |||
| Console.ReadLine(); | |||
| } | |||
| var parameters = new ModelParams(modelPath) | |||
| @@ -35,12 +35,14 @@ | |||
| Console.ForegroundColor = ConsoleColor.Yellow; | |||
| Console.WriteLine("The executor has been enabled. In this example, the LLM will follow your instructions." + | |||
| "\nIt's a 7B Code Llama, so it's trained for programming tasks like \"Write a C# function reading a file name from a given URI\" or \"Write some programming interview questions\"." + | |||
| "\nIt's a 7B Code Llama, so it's trained for programming tasks like \"Write a C# function reading " + | |||
| "a file name from a given URI\" or \"Write some programming interview questions\"." + | |||
| "\nWrite 'exit' to exit"); | |||
| Console.ForegroundColor = ConsoleColor.White; | |||
| var inferenceParams = new InferenceParams() { | |||
| Temperature = 0.8f, | |||
| var inferenceParams = new InferenceParams() | |||
| { | |||
| Temperature = 0.8f, | |||
| MaxTokens = -1, | |||
| }; | |||
| @@ -51,7 +53,7 @@ | |||
| { | |||
| Console.ForegroundColor = ConsoleColor.Green; | |||
| await foreach (var text in executor.InferAsync(instruction + System.Environment.NewLine, inferenceParams)) | |||
| await foreach (var text in executor.InferAsync(instruction + Environment.NewLine, inferenceParams)) | |||
| { | |||
| Console.Write(text); | |||
| } | |||
| @@ -61,34 +63,5 @@ | |||
| instruction = Console.ReadLine() ?? "Ask me for instructions."; | |||
| } | |||
| } | |||
| private static async Task<string> GetDefaultModel() | |||
| { | |||
| var uri = new Uri(DefaultModelUri); | |||
| var modelName = uri.Segments[^1]; | |||
| await Console.Out.WriteLineAsync($"The following model will be used: {modelName}"); | |||
| var modelPath = Path.Combine(ModelsDirectory, modelName); | |||
| if(!Directory.Exists(ModelsDirectory)) | |||
| { | |||
| Directory.CreateDirectory(ModelsDirectory); | |||
| } | |||
| if (File.Exists(modelPath)) | |||
| { | |||
| await Console.Out.WriteLineAsync($"Existing model found, using {modelPath}"); | |||
| } | |||
| else | |||
| { | |||
| await Console.Out.WriteLineAsync($"Model not found locally, downloading {DefaultModelUri}..."); | |||
| using var http = new HttpClient(); | |||
| await using var downloadStream = await http.GetStreamAsync(uri); | |||
| await using var fileStream = new FileStream(modelPath, FileMode.Create, FileAccess.Write); | |||
| await downloadStream.CopyToAsync(fileStream); | |||
| await Console.Out.WriteLineAsync($"Model downloaded and saved to {modelPath}"); | |||
| } | |||
| return modelPath; | |||
| } | |||
| } | |||
| } | |||
| @@ -6,9 +6,7 @@ namespace LLama.Examples.Examples | |||
| { | |||
| public static void Run() | |||
| { | |||
| Console.ForegroundColor = ConsoleColor.White; | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| Console.ForegroundColor = ConsoleColor.DarkGray; | |||
| var @params = new ModelParams(modelPath) { EmbeddingMode = true }; | |||
| @@ -7,11 +7,10 @@ namespace LLama.Examples.Examples | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| var gbnf = (await File.ReadAllTextAsync("Assets/json.gbnf")).Trim(); | |||
| var grammar = Grammar.Parse(gbnf, "root"); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| var gbnf = File.ReadAllText("Assets/json.gbnf").Trim(); | |||
| var grammar = Grammar.Parse(gbnf, "root"); | |||
| var parameters = new ModelParams(modelPath) | |||
| { | |||
| @@ -27,10 +26,10 @@ namespace LLama.Examples.Examples | |||
| Console.ForegroundColor = ConsoleColor.White; | |||
| using var grammarInstance = grammar.CreateInstance(); | |||
| var inferenceParams = new InferenceParams() | |||
| { | |||
| Temperature = 0.6f, | |||
| AntiPrompts = new List<string> { "Question:", "#", "Question: ", ".\n" }, | |||
| var inferenceParams = new InferenceParams() | |||
| { | |||
| Temperature = 0.6f, | |||
| AntiPrompts = new List<string> { "Question:", "#", "Question: ", ".\n" }, | |||
| MaxTokens = 50, | |||
| Grammar = grammarInstance | |||
| }; | |||
| @@ -6,8 +6,8 @@ namespace LLama.Examples.Examples | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| var prompt = File.ReadAllText("Assets/dan.txt").Trim(); | |||
| var parameters = new ModelParams(modelPath) | |||
| @@ -6,8 +6,8 @@ namespace LLama.Examples.Examples | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| var prompt = (await File.ReadAllTextAsync("Assets/chat-with-bob.txt")).Trim(); | |||
| var parameters = new ModelParams(modelPath) | |||
| @@ -1,12 +1,6 @@ | |||
| using System; | |||
| using System.Collections.Generic; | |||
| using System.Linq; | |||
| using System.Text; | |||
| using System.Threading.Tasks; | |||
| using LLamaSharp.KernelMemory; | |||
| using LLamaSharp.KernelMemory; | |||
| using Microsoft.KernelMemory; | |||
| using Microsoft.KernelMemory.Configuration; | |||
| using Microsoft.KernelMemory.Handlers; | |||
| namespace LLama.Examples.Examples | |||
| { | |||
| @@ -14,14 +8,18 @@ namespace LLama.Examples.Examples | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.WriteLine("Example from: https://github.com/microsoft/kernel-memory/blob/main/examples/101-using-core-nuget/Program.cs"); | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| Console.ForegroundColor = ConsoleColor.Yellow; | |||
| Console.WriteLine("This example is from : \n" + | |||
| "https://github.com/microsoft/kernel-memory/blob/main/examples/101-using-core-nuget/Program.cs"); | |||
| var searchClientConfig = new SearchClientConfig | |||
| { | |||
| MaxMatchesCount = 1, | |||
| AnswerTokens = 100, | |||
| }; | |||
| var memory = new KernelMemoryBuilder() | |||
| .WithLLamaSharpDefaults(new LLamaSharpConfig(modelPath) | |||
| { | |||
| @@ -1,5 +1,4 @@ | |||
| using DocumentFormat.OpenXml.Bibliography; | |||
| using LLama.Common; | |||
| using LLama.Common; | |||
| namespace LLama.Examples.Examples | |||
| { | |||
| @@ -7,8 +6,8 @@ namespace LLama.Examples.Examples | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| var prompt = (await File.ReadAllTextAsync("Assets/chat-with-bob.txt")).Trim(); | |||
| var parameters = new ModelParams(modelPath) | |||
| @@ -6,8 +6,8 @@ namespace LLama.Examples.Examples | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| var prompt = (await File.ReadAllTextAsync("Assets/chat-with-bob.txt")).Trim(); | |||
| var parameters = new ModelParams(modelPath) | |||
| @@ -21,9 +21,10 @@ namespace LLama.Examples.Examples | |||
| var ex = new InteractiveExecutor(context); | |||
| Console.ForegroundColor = ConsoleColor.Yellow; | |||
| Console.WriteLine("The executor has been enabled. In this example, the prompt is printed, the maximum tokens is set to 64 and the context size is 256. (an example for small scale usage)"); | |||
| Console.ForegroundColor = ConsoleColor.White; | |||
| Console.WriteLine("The executor has been enabled. In this example, the prompt is printed, " + | |||
| "the maximum tokens is set to 64 and the context size is 256. (an example for small scale usage)"); | |||
| Console.ForegroundColor = ConsoleColor.White; | |||
| Console.Write(prompt); | |||
| var inferenceParams = new InferenceParams() { Temperature = 0.6f, AntiPrompts = new List<string> { "User:" } }; | |||
| @@ -4,8 +4,7 @@ | |||
| { | |||
| public static void Run() | |||
| { | |||
| Console.Write("Please input your original model path: "); | |||
| var inputPath = Console.ReadLine(); | |||
| string inputPath = UserSettings.GetModelPath(); | |||
| Console.Write("Please input your output model path: "); | |||
| var outputPath = Console.ReadLine(); | |||
| @@ -8,9 +8,11 @@ namespace LLama.Examples.Examples | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.WriteLine("Example from: https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/KernelSyntaxExamples/Example17_ChatGPT.cs"); | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| Console.ForegroundColor = ConsoleColor.Yellow; | |||
| Console.WriteLine("This example is from: \n" + | |||
| "https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/KernelSyntaxExamples/Example17_ChatGPT.cs"); | |||
| // Load weights into memory | |||
| var parameters = new ModelParams(modelPath); | |||
| @@ -19,7 +21,8 @@ namespace LLama.Examples.Examples | |||
| var chatGPT = new LLamaSharpChatCompletion(ex); | |||
| var chatHistory = chatGPT.CreateNewChat("This is a conversation between the assistant and the user. \n\n You are a librarian, expert about books. "); | |||
| var chatHistory = chatGPT.CreateNewChat("This is a conversation between the " + | |||
| "assistant and the user. \n\n You are a librarian, expert about books. "); | |||
| Console.WriteLine("Chat content:"); | |||
| Console.WriteLine("------------------------"); | |||
| @@ -33,7 +36,8 @@ namespace LLama.Examples.Examples | |||
| await MessageOutputAsync(chatHistory); | |||
| // Second user message | |||
| chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); | |||
| chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn " + | |||
| "something new about Greece, any suggestion"); | |||
| await MessageOutputAsync(chatHistory); | |||
| // Second bot assistant message | |||
| @@ -1,9 +1,6 @@ | |||
| using LLama.Common; | |||
| using Microsoft.SemanticKernel; | |||
| using Microsoft.SemanticKernel.Memory; | |||
| using LLamaSharp.SemanticKernel.TextEmbedding; | |||
| using Microsoft.SemanticKernel.AI.Embeddings; | |||
| using Microsoft.SemanticKernel.Plugins.Memory; | |||
| namespace LLama.Examples.Examples | |||
| { | |||
| @@ -13,10 +10,10 @@ namespace LLama.Examples.Examples | |||
| public static async Task Run() | |||
| { | |||
| var loggerFactory = ConsoleLogger.LoggerFactory; | |||
| Console.WriteLine("Example from: https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs"); | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| Console.WriteLine("This example is from: \n" + | |||
| "https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs"); | |||
| var seed = 1337u; | |||
| // Load weights into memory | |||
| @@ -1,5 +1,4 @@ | |||
| using System.Security.Cryptography; | |||
| using LLama.Common; | |||
| using LLama.Common; | |||
| using LLamaSharp.SemanticKernel.ChatCompletion; | |||
| using Microsoft.SemanticKernel; | |||
| using LLamaSharp.SemanticKernel.TextCompletion; | |||
| @@ -12,9 +11,11 @@ namespace LLama.Examples.Examples | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.WriteLine("Example from: https://github.com/microsoft/semantic-kernel/blob/main/dotnet/README.md"); | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| Console.ForegroundColor = ConsoleColor.Yellow; | |||
| Console.WriteLine("This example is from: " + | |||
| "https://github.com/microsoft/semantic-kernel/blob/main/dotnet/README.md"); | |||
| // Load weights into memory | |||
| var parameters = new ModelParams(modelPath); | |||
| @@ -7,8 +7,7 @@ namespace LLama.Examples.Examples | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| var parameters = new ModelParams(modelPath) | |||
| { | |||
| @@ -1,5 +1,4 @@ | |||
| using System.Security.Cryptography; | |||
| using System.Text; | |||
| using System.Text; | |||
| using LLama.Abstractions; | |||
| using LLama.Common; | |||
| @@ -9,8 +8,7 @@ namespace LLama.Examples.Examples | |||
| { | |||
| public static async Task Run() | |||
| { | |||
| Console.Write("Please input your model path: "); | |||
| var modelPath = Console.ReadLine(); | |||
| string modelPath = UserSettings.GetModelPath(); | |||
| // Load weights into memory | |||
| var @params = new ModelParams(modelPath); | |||
| @@ -1,11 +1,20 @@ | |||
| using LLama.Examples.Examples; | |||
| using LLama.Native; | |||
| using LLama.Native; | |||
| Console.WriteLine("======================================================================================================"); | |||
| Console.WriteLine( | |||
| """ | |||
| ====================================================================================================== | |||
| __ __ ____ __ | |||
| /\ \ /\ \ /\ _`\ /\ \ | |||
| \ \ \ \ \ \ __ ___ ___ __ \ \,\L\_\\ \ \___ __ _ __ _____ | |||
| \ \ \ __\ \ \ __ /'__`\ /' __` __`\ /'__`\ \/_\__ \ \ \ _ `\ /'__`\ /\`'__\/\ '__`\ | |||
| \ \ \L\ \\ \ \L\ \/\ \L\.\_ /\ \/\ \/\ \ /\ \L\.\_ /\ \L\ \\ \ \ \ \ /\ \L\.\_\ \ \/ \ \ \L\ \ | |||
| \ \____/ \ \____/\ \__/.\_\\ \_\ \_\ \_\\ \__/.\_\\ `\____\\ \_\ \_\\ \__/.\_\\ \_\ \ \ ,__/ | |||
| \/___/ \/___/ \/__/\/_/ \/_/\/_/\/_/ \/__/\/_/ \/_____/ \/_/\/_/ \/__/\/_/ \/_/ \ \ \/ | |||
| \ \_\ | |||
| \/_/ | |||
| ====================================================================================================== | |||
| Console.WriteLine(" __ __ ____ __ \r\n/\\ \\ /\\ \\ /\\ _`\\ /\\ \\ \r\n\\ \\ \\ \\ \\ \\ __ ___ ___ __ \\ \\,\\L\\_\\\\ \\ \\___ __ _ __ _____ \r\n \\ \\ \\ __\\ \\ \\ __ /'__`\\ /' __` __`\\ /'__`\\ \\/_\\__ \\ \\ \\ _ `\\ /'__`\\ /\\`'__\\/\\ '__`\\ \r\n \\ \\ \\L\\ \\\\ \\ \\L\\ \\/\\ \\L\\.\\_ /\\ \\/\\ \\/\\ \\ /\\ \\L\\.\\_ /\\ \\L\\ \\\\ \\ \\ \\ \\ /\\ \\L\\.\\_\\ \\ \\/ \\ \\ \\L\\ \\\r\n \\ \\____/ \\ \\____/\\ \\__/.\\_\\\\ \\_\\ \\_\\ \\_\\\\ \\__/.\\_\\\\ `\\____\\\\ \\_\\ \\_\\\\ \\__/.\\_\\\\ \\_\\ \\ \\ ,__/\r\n \\/___/ \\/___/ \\/__/\\/_/ \\/_/\\/_/\\/_/ \\/__/\\/_/ \\/_____/ \\/_/\\/_/ \\/__/\\/_/ \\/_/ \\ \\ \\/ \r\n \\ \\_\\ \r\n \\/_/ "); | |||
| Console.WriteLine("======================================================================================================"); | |||
| """); | |||
| NativeLibraryConfig | |||
| .Instance | |||
| @@ -13,6 +22,5 @@ NativeLibraryConfig | |||
| .WithLogs(); | |||
| NativeApi.llama_empty_call(); | |||
| Console.WriteLine(); | |||
| await Runner.Run(); | |||
| await ExampleRunner.Run(); | |||
| @@ -1,40 +0,0 @@ | |||
| using Microsoft.Extensions.Logging; | |||
| using System; | |||
| using System.Collections.Generic; | |||
| using System.Linq; | |||
| using System.Text; | |||
| using System.Threading.Tasks; | |||
| namespace LLama.Examples | |||
| { | |||
| /// <summary> | |||
| /// Basic logger printing to console | |||
| /// </summary> | |||
| internal static class ConsoleLogger | |||
| { | |||
| internal static ILogger Logger => LoggerFactory.CreateLogger<object>(); | |||
| internal static ILoggerFactory LoggerFactory => s_loggerFactory.Value; | |||
| private static readonly Lazy<ILoggerFactory> s_loggerFactory = new(LogBuilder); | |||
| private static ILoggerFactory LogBuilder() | |||
| { | |||
| return Microsoft.Extensions.Logging.LoggerFactory.Create(builder => | |||
| { | |||
| builder.SetMinimumLevel(LogLevel.Warning); | |||
| builder.AddFilter("Microsoft", LogLevel.Trace); | |||
| builder.AddFilter("Microsoft", LogLevel.Debug); | |||
| builder.AddFilter("Microsoft", LogLevel.Information); | |||
| builder.AddFilter("Microsoft", LogLevel.Warning); | |||
| builder.AddFilter("Microsoft", LogLevel.Error); | |||
| builder.AddFilter("Microsoft", LogLevel.Warning); | |||
| builder.AddFilter("System", LogLevel.Warning); | |||
| builder.AddConsole(); | |||
| }); | |||
| } | |||
| } | |||
| } | |||
| @@ -0,0 +1,73 @@ | |||
| namespace LLama.Examples; | |||
| internal static class UserSettings | |||
| { | |||
| private static readonly string SettingsFilePath = Path.Join(AppContext.BaseDirectory, "DefaultModel.env"); | |||
| private static string? ReadDefaultModelPath() | |||
| { | |||
| if (!File.Exists(SettingsFilePath)) | |||
| return null; | |||
| string path = File.ReadAllText(SettingsFilePath).Trim(); | |||
| if (!File.Exists(path)) | |||
| return null; | |||
| return path; | |||
| } | |||
| private static void WriteDefaultModelPath(string path) | |||
| { | |||
| File.WriteAllText(SettingsFilePath, path); | |||
| } | |||
| public static string GetModelPath(bool alwaysPrompt = false) | |||
| { | |||
| string? defaultPath = ReadDefaultModelPath(); | |||
| return defaultPath is null || alwaysPrompt | |||
| ? PromptUserForPath() | |||
| : PromptUserForPathWithDefault(defaultPath); | |||
| } | |||
| private static string PromptUserForPath() | |||
| { | |||
| while (true) | |||
| { | |||
| Console.ForegroundColor = ConsoleColor.White; | |||
| Console.Write("Please input your model path: "); | |||
| string? path = Console.ReadLine(); | |||
| if (File.Exists(path)) | |||
| { | |||
| WriteDefaultModelPath(path); | |||
| return path; | |||
| } | |||
| Console.WriteLine("ERROR: invalid model file path\n"); | |||
| } | |||
| } | |||
| private static string PromptUserForPathWithDefault(string defaultPath) | |||
| { | |||
| while (true) | |||
| { | |||
| Console.ForegroundColor = ConsoleColor.White; | |||
| Console.WriteLine($"Default model: {defaultPath}"); | |||
| Console.Write($"Please input a model path (or ENTER for default): "); | |||
| string? path = Console.ReadLine(); | |||
| if (string.IsNullOrWhiteSpace(path)) | |||
| { | |||
| return defaultPath; | |||
| } | |||
| if (File.Exists(path)) | |||
| { | |||
| WriteDefaultModelPath(path); | |||
| return path; | |||
| } | |||
| Console.WriteLine("ERROR: invalid model file path\n"); | |||
| } | |||
| } | |||
| } | |||