| @@ -8,7 +8,10 @@ namespace LLama.Examples.NewVersion | |||||
| { | { | ||||
| Console.Write("Please input your model path: "); | Console.Write("Please input your model path: "); | ||||
| var modelPath = Console.ReadLine(); | var modelPath = Console.ReadLine(); | ||||
| var embedder = new LLamaEmbedder(new ModelParams(modelPath)); | |||||
| var @params = new ModelParams(modelPath); | |||||
| using var weights = LLamaWeights.LoadFromFile(@params); | |||||
| var embedder = new LLamaEmbedder(weights, @params); | |||||
| while (true) | while (true) | ||||
| { | { | ||||
| @@ -8,7 +8,7 @@ namespace LLama.Examples.NewVersion | |||||
| { | { | ||||
| Console.Write("Please input your model path: "); | Console.Write("Please input your model path: "); | ||||
| var modelPath = Console.ReadLine(); | var modelPath = Console.ReadLine(); | ||||
| var prompt = File.ReadAllText("Assets/chat-with-bob.txt").Trim(); | |||||
| var prompt = (await File.ReadAllTextAsync("Assets/chat-with-bob.txt")).Trim(); | |||||
| var parameters = new ModelParams(modelPath) | var parameters = new ModelParams(modelPath) | ||||
| { | { | ||||
| @@ -44,7 +44,7 @@ namespace LLama.Examples.NewVersion | |||||
| Console.Write("Your path to save executor state: "); | Console.Write("Your path to save executor state: "); | ||||
| var executorStatePath = Console.ReadLine(); | var executorStatePath = Console.ReadLine(); | ||||
| ex.SaveState(executorStatePath); | |||||
| await ex.SaveState(executorStatePath); | |||||
| Console.ForegroundColor = ConsoleColor.Yellow; | Console.ForegroundColor = ConsoleColor.Yellow; | ||||
| Console.WriteLine("All states saved!"); | Console.WriteLine("All states saved!"); | ||||
| @@ -53,7 +53,7 @@ namespace LLama.Examples.NewVersion | |||||
| var ctx = ex.Context; | var ctx = ex.Context; | ||||
| ctx.LoadState(modelStatePath); | ctx.LoadState(modelStatePath); | ||||
| ex = new InteractiveExecutor(ctx); | ex = new InteractiveExecutor(ctx); | ||||
| ex.LoadState(executorStatePath); | |||||
| await ex.LoadState(executorStatePath); | |||||
| Console.ForegroundColor = ConsoleColor.Yellow; | Console.ForegroundColor = ConsoleColor.Yellow; | ||||
| Console.WriteLine("Loaded state!"); | Console.WriteLine("Loaded state!"); | ||||
| Console.ForegroundColor = ConsoleColor.White; | Console.ForegroundColor = ConsoleColor.White; | ||||
| @@ -5,7 +5,14 @@ namespace LLama.Unittest; | |||||
| public class LLamaEmbedderTests | public class LLamaEmbedderTests | ||||
| : IDisposable | : IDisposable | ||||
| { | { | ||||
| private readonly LLamaEmbedder _embedder = new(new ModelParams(Constants.ModelPath)); | |||||
| private readonly LLamaEmbedder _embedder; | |||||
| public LLamaEmbedderTests() | |||||
| { | |||||
| var @params = new ModelParams(Constants.ModelPath); | |||||
| using var weights = LLamaWeights.LoadFromFile(@params); | |||||
| _embedder = new(weights, @params); | |||||
| } | |||||
| public void Dispose() | public void Dispose() | ||||
| { | { | ||||
| @@ -1,6 +1,4 @@ | |||||
| using System.Text; | |||||
| using LLama.Common; | |||||
| using Newtonsoft.Json; | |||||
| using LLama.Common; | |||||
| namespace LLama.Unittest | namespace LLama.Unittest | ||||
| { | { | ||||
| @@ -40,34 +38,11 @@ namespace LLama.Unittest | |||||
| }; | }; | ||||
| var settings = new Newtonsoft.Json.JsonSerializerSettings(); | var settings = new Newtonsoft.Json.JsonSerializerSettings(); | ||||
| settings.Converters.Add(new NewtsonsoftEncodingConverter()); | |||||
| var json = Newtonsoft.Json.JsonConvert.SerializeObject(expected, settings); | var json = Newtonsoft.Json.JsonConvert.SerializeObject(expected, settings); | ||||
| var actual = Newtonsoft.Json.JsonConvert.DeserializeObject<ModelParams>(json, settings); | var actual = Newtonsoft.Json.JsonConvert.DeserializeObject<ModelParams>(json, settings); | ||||
| Assert.Equal(expected, actual); | Assert.Equal(expected, actual); | ||||
| } | } | ||||
| public class NewtsonsoftEncodingConverter : JsonConverter | |||||
| { | |||||
| public override bool CanConvert(Type objectType) | |||||
| { | |||||
| return typeof(Encoding).IsAssignableFrom(objectType); | |||||
| } | |||||
| public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) | |||||
| { | |||||
| writer.WriteValue(((Encoding)value).WebName); | |||||
| } | |||||
| public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) | |||||
| { | |||||
| return Encoding.GetEncoding((string)reader.Value); | |||||
| } | |||||
| } | |||||
| } | } | ||||
| } | } | ||||
| @@ -273,6 +273,7 @@ namespace LLama.Native | |||||
| /// <param name="n_past"></param> | /// <param name="n_past"></param> | ||||
| /// <returns>Returns 0 on success</returns> | /// <returns>Returns 0 on success</returns> | ||||
| [DllImport(libraryName, CallingConvention = CallingConvention.Cdecl)] | [DllImport(libraryName, CallingConvention = CallingConvention.Cdecl)] | ||||
| [Obsolete("use llama_decode() instead")] | |||||
| public static extern int llama_eval(SafeLLamaContextHandle ctx, llama_token* tokens, int n_tokens, int n_past); | public static extern int llama_eval(SafeLLamaContextHandle ctx, llama_token* tokens, int n_tokens, int n_past); | ||||
| /// <summary> | /// <summary> | ||||
| @@ -204,6 +204,7 @@ namespace LLama.Native | |||||
| { | { | ||||
| fixed (int* pinned = tokens) | fixed (int* pinned = tokens) | ||||
| { | { | ||||
| // the entire `eval` system needs replacing with the new batch system! | |||||
| var ret = NativeApi.llama_eval(this, pinned, tokens.Length, n_past); | var ret = NativeApi.llama_eval(this, pinned, tokens.Length, n_past); | ||||
| return ret == 0; | return ret == 0; | ||||
| } | } | ||||