From 4314f64b9ce5d7b6b0b93d3536e50ed9c4915af6 Mon Sep 17 00:00:00 2001 From: Yaohui Liu Date: Wed, 17 May 2023 03:40:45 +0800 Subject: [PATCH] feat: add check for backend package. --- LLama/LLamaModel.cs | 8 ++++---- LLama/LLamaSharp.csproj | 2 +- LLama/Native/NativeApi.cs | 17 ++++++++++++++++- 3 files changed, 21 insertions(+), 6 deletions(-) diff --git a/LLama/LLamaModel.cs b/LLama/LLamaModel.cs index eeebc569..7ccc80d1 100644 --- a/LLama/LLamaModel.cs +++ b/LLama/LLamaModel.cs @@ -45,7 +45,7 @@ namespace LLama public SafeLLamaContextHandle NativeHandle => _ctx; public LLamaModel(string model_path, string model_name, bool echo_input = false, bool verbose = false, int seed = 0, int n_threads = -1, int n_predict = -1, - int n_parts = -1, int n_ctx = 512, int n_batch = 512, int n_keep = 0, + int n_parts = -1, int n_ctx = 512, int n_batch = 512, int n_keep = 0, int n_gpu_layers = 0, Dictionary logit_bias = null, int top_k = 40, float top_p = 0.95f, float tfs_z = 1.00f, float typical_p = 1.00f, float temp = 0.80f, float repeat_penalty = 1.10f, int repeat_last_n = 64, float frequency_penalty = 0.00f, float presence_penalty = 0.00f, @@ -53,13 +53,13 @@ namespace LLama string path_session = "", string input_prefix = "", string input_suffix = "", List antiprompt = null, string lora_adapter = "", string lora_base = "", bool memory_f16 = true, bool random_prompt = false, bool use_color = false, bool interactive = false, - bool embedding = false, bool interactive_first = false, bool instruct = false, bool penalize_nl = true, + bool embedding = false, bool interactive_first = false, bool prompt_cache_all = false, bool instruct = false, bool penalize_nl = true, bool perplexity = false, bool use_mmap = true, bool use_mlock = false, bool mem_test = false, bool verbose_prompt = false) : this(new LLamaParams(seed, n_threads, n_predict, n_parts, n_ctx, n_batch, - n_keep, logit_bias, top_k, top_p, tfs_z, typical_p, temp, repeat_penalty, repeat_last_n, frequency_penalty, + n_keep, n_gpu_layers, logit_bias, top_k, top_p, tfs_z, typical_p, temp, repeat_penalty, repeat_last_n, frequency_penalty, presence_penalty, mirostat, mirostat_tau, mirostat_eta, model_path, prompt, path_session, input_prefix, input_suffix, antiprompt, lora_adapter, lora_base, memory_f16, random_prompt, use_color, interactive, embedding, - interactive_first, instruct, penalize_nl, perplexity, use_mmap, use_mlock, mem_test, verbose_prompt), model_name, echo_input, verbose) + interactive_first, prompt_cache_all, instruct, penalize_nl, perplexity, use_mmap, use_mlock, mem_test, verbose_prompt), model_name, echo_input, verbose) { } diff --git a/LLama/LLamaSharp.csproj b/LLama/LLamaSharp.csproj index 5ed68efd..0049d171 100644 --- a/LLama/LLamaSharp.csproj +++ b/LLama/LLamaSharp.csproj @@ -8,7 +8,7 @@ AnyCPU;x64 True - 0.2.0 + 0.2.2 Yaohui Liu, Haiping Chen SciSharp STACK true diff --git a/LLama/Native/NativeApi.cs b/LLama/Native/NativeApi.cs index 94d7504a..66986a3b 100644 --- a/LLama/Native/NativeApi.cs +++ b/LLama/Native/NativeApi.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using System.IO; using System.Runtime.InteropServices; using System.Text; +using LLama.Exceptions; namespace LLama.Native { @@ -11,10 +12,24 @@ namespace LLama.Native { static NativeApi() { - + try + { + llama_empty_call(); + } + catch (DllNotFoundException) + { + throw new RuntimeError("The native library cannot be found. It could be one of the following reasons: \n" + + "1. No LLamaSharp backend was installed. Please search LLamaSharp.Backend and install one of them. \n" + + "2. You are using a device with only CPU but installed cuda backend. Please install cpu backend instead. \n" + + "3. The backend is not compatible with your system cuda environment. Please check and fix it. If the environment is " + + "expected not to be changed, then consider build llama.cpp from source or submit an issue to LLamaSharp."); + } } private const string libraryName = "libllama"; + [DllImport("libllama", EntryPoint = "llama_mmap_supported")] + public static extern bool llama_empty_call(); + [DllImport(libraryName)] public static extern LLamaContextParams llama_context_default_params();