From 756a1ad0bad30c00fdffe6969efff497cd00eaf6 Mon Sep 17 00:00:00 2001 From: Martin Evans Date: Tue, 25 Jul 2023 23:52:34 +0100 Subject: [PATCH 1/3] Added a new way to load dependencies, performing CPU feature detection --- LLama/Native/NativeApi.cs | 57 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 55 insertions(+), 2 deletions(-) diff --git a/LLama/Native/NativeApi.cs b/LLama/Native/NativeApi.cs index e9666ea8..17176508 100644 --- a/LLama/Native/NativeApi.cs +++ b/LLama/Native/NativeApi.cs @@ -25,6 +25,9 @@ namespace LLama.Native { static NativeApi() { + // Try to load a preferred library, based on CPU feature detection + TryLoadLibrary(); + try { llama_empty_call(); @@ -35,11 +38,61 @@ namespace LLama.Native "1. No LLamaSharp backend was installed. Please search LLamaSharp.Backend and install one of them. \n" + "2. You are using a device with only CPU but installed cuda backend. Please install cpu backend instead. \n" + "3. The backend is not compatible with your system cuda environment. Please check and fix it. If the environment is " + - "expected not to be changed, then consider build llama.cpp from source or submit an issue to LLamaSharp.\n" + + "expected not to be changed, then consider build llama.cpp from source or submit an issue to LLamaSharp.\n" + "4. One of the dependency of the native library is missed.\n"); } - NativeApi.llama_backend_init(false); + llama_backend_init(false); + } + + /// + /// Try to load libllama, using CPU feature detection to try and load a more specialised DLL if possible + /// + /// The library handle to unload later, or IntPtr.Zero if no library was loaded + private static IntPtr TryLoadLibrary() + { +#if NET6_0_OR_GREATER + + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + // All of the Windows libraries, in order of preference + return TryLoad("win-cuda12/libllama.dll") + ?? TryLoad("win-cuda11/libllama.dll") +#if NET8_0_OR_GREATER + ?? TryLoad("win-avx512/libllama.dll", System.Runtime.Intrinsics.X86.Avx512.IsSupported) +#endif + ?? TryLoad("win-avx2/libllama.dll", System.Runtime.Intrinsics.X86.Avx2.IsSupported) + ?? TryLoad("win-avx/libllama.dll", System.Runtime.Intrinsics.X86.Avx.IsSupported) + ?? IntPtr.Zero; + } + + if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + return IntPtr.Zero; + } + + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + return IntPtr.Zero; + } +#endif + + return IntPtr.Zero; + +#if NET6_0_OR_GREATER + // Try to load a DLL from the path if supported. Returns null if nothing is loaded. + static IntPtr? TryLoad(string path, bool supported = true) + { + if (!supported) + return null; + + if (NativeLibrary.TryLoad(path, out var handle)) + return handle; + + return null; + } +#endif } + private const string libraryName = "libllama"; /// From dd4957471fdcc43d0688f1897d3f451e5a752a78 Mon Sep 17 00:00:00 2001 From: Martin Evans Date: Sat, 2 Sep 2023 14:10:18 +0100 Subject: [PATCH 2/3] Changed paths to match what the GitHub build action produces --- LLama/Native/NativeApi.cs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/LLama/Native/NativeApi.cs b/LLama/Native/NativeApi.cs index 17176508..f16462ac 100644 --- a/LLama/Native/NativeApi.cs +++ b/LLama/Native/NativeApi.cs @@ -55,13 +55,13 @@ namespace LLama.Native if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { // All of the Windows libraries, in order of preference - return TryLoad("win-cuda12/libllama.dll") - ?? TryLoad("win-cuda11/libllama.dll") + return TryLoad("win/cu12.1.0/libllama.dll") + ?? TryLoad("win/cu11.7.1/libllama.dll") #if NET8_0_OR_GREATER - ?? TryLoad("win-avx512/libllama.dll", System.Runtime.Intrinsics.X86.Avx512.IsSupported) + ?? TryLoad("win/avx512/libllama.dll", System.Runtime.Intrinsics.X86.Avx512.IsSupported) #endif - ?? TryLoad("win-avx2/libllama.dll", System.Runtime.Intrinsics.X86.Avx2.IsSupported) - ?? TryLoad("win-avx/libllama.dll", System.Runtime.Intrinsics.X86.Avx.IsSupported) + ?? TryLoad("win/avx2/libllama.dll", System.Runtime.Intrinsics.X86.Avx2.IsSupported) + ?? TryLoad("win/avx/libllama.dll", System.Runtime.Intrinsics.X86.Avx.IsSupported) ?? IntPtr.Zero; } From 8f58a40fb96dde3e4c9262d789cb5863d2e8f66d Mon Sep 17 00:00:00 2001 From: Martin Evans Date: Sat, 2 Sep 2023 14:21:06 +0100 Subject: [PATCH 3/3] Added Linux dependency loading --- LLama/Native/NativeApi.cs | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/LLama/Native/NativeApi.cs b/LLama/Native/NativeApi.cs index f16462ac..bce92b1f 100644 --- a/LLama/Native/NativeApi.cs +++ b/LLama/Native/NativeApi.cs @@ -55,19 +55,27 @@ namespace LLama.Native if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { // All of the Windows libraries, in order of preference - return TryLoad("win/cu12.1.0/libllama.dll") - ?? TryLoad("win/cu11.7.1/libllama.dll") + return TryLoad("cu12.1.0/libllama.dll") + ?? TryLoad("cu11.7.1/libllama.dll") #if NET8_0_OR_GREATER - ?? TryLoad("win/avx512/libllama.dll", System.Runtime.Intrinsics.X86.Avx512.IsSupported) + ?? TryLoad("avx512/libllama.dll", System.Runtime.Intrinsics.X86.Avx512.IsSupported) #endif - ?? TryLoad("win/avx2/libllama.dll", System.Runtime.Intrinsics.X86.Avx2.IsSupported) - ?? TryLoad("win/avx/libllama.dll", System.Runtime.Intrinsics.X86.Avx.IsSupported) + ?? TryLoad("avx2/libllama.dll", System.Runtime.Intrinsics.X86.Avx2.IsSupported) + ?? TryLoad("avx/libllama.dll", System.Runtime.Intrinsics.X86.Avx.IsSupported) ?? IntPtr.Zero; } if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { - return IntPtr.Zero; + // All of the Linux libraries, in order of preference + return TryLoad("cu12.1.0/libllama.so") + ?? TryLoad("cu11.7.1/libllama.so") +#if NET8_0_OR_GREATER + ?? TryLoad("avx512/libllama.so", System.Runtime.Intrinsics.X86.Avx512.IsSupported) +#endif + ?? TryLoad("avx2/libllama.so", System.Runtime.Intrinsics.X86.Avx2.IsSupported) + ?? TryLoad("avx/libllama.so", System.Runtime.Intrinsics.X86.Avx.IsSupported) + ?? IntPtr.Zero; } if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))