Browse Source

Added checks in `Decode` to skip doing anything if the batch is empty.

pull/691/head
Martin Evans 1 year ago
parent
commit
25812762c9
1 changed files with 6 additions and 0 deletions
  1. +6
    -0
      LLama/Native/SafeLLamaContextHandle.cs

+ 6
- 0
LLama/Native/SafeLLamaContextHandle.cs View File

@@ -368,6 +368,9 @@ namespace LLama.Native
/// </returns>
public DecodeResult Decode(LLamaBatch batch)
{
if (batch.TokenCount == 0)
return DecodeResult.Ok;

lock (GlobalInferenceLock)
using (batch.ToNativeBatch(out var nb))
return (DecodeResult)llama_decode(this, nb);
@@ -383,6 +386,9 @@ namespace LLama.Native
/// <returns>A tuple, containing the decode result and the number of tokens that have <b>not</b> been decoded yet.</returns>
internal (DecodeResult, int) Decode(List<LLamaToken> tokens, LLamaSeqId id, LLamaBatch batch, ref int n_past)
{
if (tokens.Count == 0)
return (DecodeResult.Ok, 0);

var batchSize = checked((int)BatchSize);

// Evaluate the prompt, in chunks smaller than the max batch size


Loading…
Cancel
Save