Browse Source

feat: add check for backend package.

tags/v0.2.2
Yaohui Liu 2 years ago
parent
commit
4314f64b9c
No known key found for this signature in database GPG Key ID: E86D01E1809BD23E
3 changed files with 21 additions and 6 deletions
  1. +4
    -4
      LLama/LLamaModel.cs
  2. +1
    -1
      LLama/LLamaSharp.csproj
  3. +16
    -1
      LLama/Native/NativeApi.cs

+ 4
- 4
LLama/LLamaModel.cs View File

@@ -45,7 +45,7 @@ namespace LLama
public SafeLLamaContextHandle NativeHandle => _ctx;

public LLamaModel(string model_path, string model_name, bool echo_input = false, bool verbose = false, int seed = 0, int n_threads = -1, int n_predict = -1,
int n_parts = -1, int n_ctx = 512, int n_batch = 512, int n_keep = 0,
int n_parts = -1, int n_ctx = 512, int n_batch = 512, int n_keep = 0, int n_gpu_layers = 0,
Dictionary<llama_token, float> logit_bias = null, int top_k = 40, float top_p = 0.95f,
float tfs_z = 1.00f, float typical_p = 1.00f, float temp = 0.80f, float repeat_penalty = 1.10f,
int repeat_last_n = 64, float frequency_penalty = 0.00f, float presence_penalty = 0.00f,
@@ -53,13 +53,13 @@ namespace LLama
string path_session = "", string input_prefix = "", string input_suffix = "",
List<string> antiprompt = null, string lora_adapter = "", string lora_base = "",
bool memory_f16 = true, bool random_prompt = false, bool use_color = false, bool interactive = false,
bool embedding = false, bool interactive_first = false, bool instruct = false, bool penalize_nl = true,
bool embedding = false, bool interactive_first = false, bool prompt_cache_all = false, bool instruct = false, bool penalize_nl = true,
bool perplexity = false, bool use_mmap = true, bool use_mlock = false, bool mem_test = false,
bool verbose_prompt = false) : this(new LLamaParams(seed, n_threads, n_predict, n_parts, n_ctx, n_batch,
n_keep, logit_bias, top_k, top_p, tfs_z, typical_p, temp, repeat_penalty, repeat_last_n, frequency_penalty,
n_keep, n_gpu_layers, logit_bias, top_k, top_p, tfs_z, typical_p, temp, repeat_penalty, repeat_last_n, frequency_penalty,
presence_penalty, mirostat, mirostat_tau, mirostat_eta, model_path, prompt, path_session, input_prefix,
input_suffix, antiprompt, lora_adapter, lora_base, memory_f16, random_prompt, use_color, interactive, embedding,
interactive_first, instruct, penalize_nl, perplexity, use_mmap, use_mlock, mem_test, verbose_prompt), model_name, echo_input, verbose)
interactive_first, prompt_cache_all, instruct, penalize_nl, perplexity, use_mmap, use_mlock, mem_test, verbose_prompt), model_name, echo_input, verbose)
{

}


+ 1
- 1
LLama/LLamaSharp.csproj View File

@@ -8,7 +8,7 @@
<Platforms>AnyCPU;x64</Platforms>
<AllowUnsafeBlocks>True</AllowUnsafeBlocks>

<Version>0.2.0</Version>
<Version>0.2.2</Version>
<Authors>Yaohui Liu, Haiping Chen</Authors>
<Company>SciSharp STACK</Company>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>


+ 16
- 1
LLama/Native/NativeApi.cs View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using LLama.Exceptions;

namespace LLama.Native
{
@@ -11,10 +12,24 @@ namespace LLama.Native
{
static NativeApi()
{
try
{
llama_empty_call();
}
catch (DllNotFoundException)
{
throw new RuntimeError("The native library cannot be found. It could be one of the following reasons: \n" +
"1. No LLamaSharp backend was installed. Please search LLamaSharp.Backend and install one of them. \n" +
"2. You are using a device with only CPU but installed cuda backend. Please install cpu backend instead. \n" +
"3. The backend is not compatible with your system cuda environment. Please check and fix it. If the environment is " +
"expected not to be changed, then consider build llama.cpp from source or submit an issue to LLamaSharp.");
}
}
private const string libraryName = "libllama";

[DllImport("libllama", EntryPoint = "llama_mmap_supported")]
public static extern bool llama_empty_call();

[DllImport(libraryName)]
public static extern LLamaContextParams llama_context_default_params();



Loading…
Cancel
Save