diff --git a/LLama.Examples/Examples/LlavaInteractiveModeExecute.cs b/LLama.Examples/Examples/LlavaInteractiveModeExecute.cs index 170bab0c..89b4ae41 100644 --- a/LLama.Examples/Examples/LlavaInteractiveModeExecute.cs +++ b/LLama.Examples/Examples/LlavaInteractiveModeExecute.cs @@ -24,9 +24,9 @@ namespace LLama.Examples.Examples using var context = model.CreateContext(parameters); // Llava Init - using var clipModel = LLavaWeights.LoadFromFile(multiModalProj); + using var clipModel = await LLavaWeights.LoadFromFileAsync(multiModalProj); - var ex = new InteractiveExecutor(context, clipModel ); + var ex = new InteractiveExecutor(context, clipModel); Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine("The executor has been enabled. In this example, the prompt is printed, the maximum tokens is set to {0} and the context size is {1}.", maxTokens, parameters.ContextSize ); diff --git a/LLama/LLavaWeights.cs b/LLama/LLavaWeights.cs index ff8959a5..9594dcdb 100644 --- a/LLama/LLavaWeights.cs +++ b/LLama/LLavaWeights.cs @@ -1,5 +1,7 @@ using System; +using System.Threading; +using System.Threading.Tasks; using LLama.Native; namespace LLama; @@ -13,9 +15,9 @@ public sealed class LLavaWeights : IDisposable /// The native handle, which is used in the native APIs /// /// Be careful how you use this! - public SafeLlavaModelHandle NativeHandle { get; } - - internal LLavaWeights(SafeLlavaModelHandle weights) + public SafeLlavaModelHandle NativeHandle { get; } + + private LLavaWeights(SafeLlavaModelHandle weights) { NativeHandle = weights; } @@ -31,6 +33,17 @@ public sealed class LLavaWeights : IDisposable return new LLavaWeights(weights); } + /// + /// Load weights into memory + /// + /// path to the "mmproj" model file + /// + /// + public static Task LoadFromFileAsync(string mmProject, CancellationToken token = default) + { + return Task.Run(() => LoadFromFile(mmProject), token); + } + /// /// Create the Image Embeddings from the bytes of an image. /// diff --git a/LLama/Native/SafeLlavaModelHandle.cs b/LLama/Native/SafeLlavaModelHandle.cs index 2edb7aee..fd898b53 100644 --- a/LLama/Native/SafeLlavaModelHandle.cs +++ b/LLama/Native/SafeLlavaModelHandle.cs @@ -39,8 +39,11 @@ namespace LLama.Native if (!fs.CanRead) throw new InvalidOperationException($"Llava MMP Model file '{modelPath}' is not readable"); - return clip_model_load(modelPath, verbosity) - ?? throw new LoadWeightsFailedException(modelPath); + var handle = clip_model_load(modelPath, verbosity); + if (handle.IsInvalid) + throw new LoadWeightsFailedException(modelPath); + + return handle; } ///