Browse Source

Merge branch 'master' into more_multi_enumeration_fixes

tags/v0.4.2-preview
Rinne GitHub 2 years ago
parent
commit
cd015055a8
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 20 additions and 8 deletions
  1. +15
    -5
      LLama/Common/FixedSizeQueue.cs
  2. +1
    -1
      LLama/LLamaExecutorBase.cs
  3. +3
    -2
      LLama/LLamaStatelessExecutor.cs
  4. +1
    -0
      LLama/LLamaTransforms.cs

+ 15
- 5
LLama/Common/FixedSizeQueue.cs View File

@@ -30,14 +30,24 @@ namespace LLama.Common
/// <param name="data"></param>
public FixedSizeQueue(int size, IEnumerable<T> data)
{
// Try an early check on the amount of data supplied (if possible)
if (data.TryGetNonEnumeratedCount(out var count) && count > size)
throw new ArgumentException($"The max size set for the quene is {size}, but got {count} initial values.");

// Size of "data" is unknown, copy it all into a list
_maxSize = size;
if(data.Count() > size)
{
throw new ArgumentException($"The max size set for the quene is {size}, but got {data.Count()} initial values.");
}
_storage = new(data);
_storage = new List<T>(data);
// Now check if that list is a valid size
if (_storage.Count > _maxSize)
throw new ArgumentException($"The max size set for the quene is {size}, but got {count} initial values.");
}

/// <summary>
/// Replace every item in the queue with the given value
/// </summary>
/// <param name="value">The value to replace all items with</param>
/// <returns>returns this</returns>
public FixedSizeQueue<T> FillWith(T value)
{
for(int i = 0; i < Count; i++)


+ 1
- 1
LLama/LLamaExecutorBase.cs View File

@@ -95,7 +95,7 @@ namespace LLama
_pathSession = filename;
if (string.IsNullOrEmpty(filename))
{
throw new ArgumentNullException("File name cannot be empty.");
throw new ArgumentNullException(nameof(filename), "File name cannot be empty.");
}
if (File.Exists(filename))
{


+ 3
- 2
LLama/LLamaStatelessExecutor.cs View File

@@ -29,8 +29,9 @@ namespace LLama
public StatelessExecutor(LLamaModel model)
{
_model = model;
var tokens = model.Tokenize(" ", true);
Utils.Eval(_model.NativeHandle, tokens.ToArray(), 0, tokens.Count(), 0, _model.Params.Threads);
var tokens = model.Tokenize(" ", true).ToArray();
Utils.Eval(_model.NativeHandle, tokens, 0, tokens.Length, 0, _model.Params.Threads);
_originalState = model.GetState();
}



+ 1
- 0
LLama/LLamaTransforms.cs View File

@@ -161,6 +161,7 @@ namespace LLama
{
_keywords = new(keywords);
_maxKeywordLength = _keywords.Max(x => x.Length) + redundancyLength;
_maxKeywordLength = _keywords.Select(x => x.Length).Max() + redundancyLength;
_removeAllMatchedTokens = removeAllMatchedTokens;
}
/// <inheritdoc />


Loading…
Cancel
Save