| @@ -30,14 +30,24 @@ namespace LLama.Common | |||||
| /// <param name="data"></param> | /// <param name="data"></param> | ||||
| public FixedSizeQueue(int size, IEnumerable<T> data) | public FixedSizeQueue(int size, IEnumerable<T> data) | ||||
| { | { | ||||
| // Try an early check on the amount of data supplied (if possible) | |||||
| if (data.TryGetNonEnumeratedCount(out var count) && count > size) | |||||
| throw new ArgumentException($"The max size set for the quene is {size}, but got {count} initial values."); | |||||
| // Size of "data" is unknown, copy it all into a list | |||||
| _maxSize = size; | _maxSize = size; | ||||
| if(data.Count() > size) | |||||
| { | |||||
| throw new ArgumentException($"The max size set for the quene is {size}, but got {data.Count()} initial values."); | |||||
| } | |||||
| _storage = new(data); | |||||
| _storage = new List<T>(data); | |||||
| // Now check if that list is a valid size | |||||
| if (_storage.Count > _maxSize) | |||||
| throw new ArgumentException($"The max size set for the quene is {size}, but got {count} initial values."); | |||||
| } | } | ||||
| /// <summary> | |||||
| /// Replace every item in the queue with the given value | |||||
| /// </summary> | |||||
| /// <param name="value">The value to replace all items with</param> | |||||
| /// <returns>returns this</returns> | |||||
| public FixedSizeQueue<T> FillWith(T value) | public FixedSizeQueue<T> FillWith(T value) | ||||
| { | { | ||||
| for(int i = 0; i < Count; i++) | for(int i = 0; i < Count; i++) | ||||
| @@ -95,7 +95,7 @@ namespace LLama | |||||
| _pathSession = filename; | _pathSession = filename; | ||||
| if (string.IsNullOrEmpty(filename)) | if (string.IsNullOrEmpty(filename)) | ||||
| { | { | ||||
| throw new ArgumentNullException("File name cannot be empty."); | |||||
| throw new ArgumentNullException(nameof(filename), "File name cannot be empty."); | |||||
| } | } | ||||
| if (File.Exists(filename)) | if (File.Exists(filename)) | ||||
| { | { | ||||
| @@ -29,8 +29,9 @@ namespace LLama | |||||
| public StatelessExecutor(LLamaModel model) | public StatelessExecutor(LLamaModel model) | ||||
| { | { | ||||
| _model = model; | _model = model; | ||||
| var tokens = model.Tokenize(" ", true); | |||||
| Utils.Eval(_model.NativeHandle, tokens.ToArray(), 0, tokens.Count(), 0, _model.Params.Threads); | |||||
| var tokens = model.Tokenize(" ", true).ToArray(); | |||||
| Utils.Eval(_model.NativeHandle, tokens, 0, tokens.Length, 0, _model.Params.Threads); | |||||
| _originalState = model.GetState(); | _originalState = model.GetState(); | ||||
| } | } | ||||
| @@ -161,6 +161,7 @@ namespace LLama | |||||
| { | { | ||||
| _keywords = new(keywords); | _keywords = new(keywords); | ||||
| _maxKeywordLength = _keywords.Max(x => x.Length) + redundancyLength; | _maxKeywordLength = _keywords.Max(x => x.Length) + redundancyLength; | ||||
| _maxKeywordLength = _keywords.Select(x => x.Length).Max() + redundancyLength; | |||||
| _removeAllMatchedTokens = removeAllMatchedTokens; | _removeAllMatchedTokens = removeAllMatchedTokens; | ||||
| } | } | ||||
| /// <inheritdoc /> | /// <inheritdoc /> | ||||