Browse Source

Removed (marked as obsolete) prompting with a string for `Conversation`. Tokenization requires extra parameters (e.g. addBos, special) which require special considersation. For now it's better to tokenize using other tools and pass the tokens directly.

pull/719/head
Martin Evans 1 year ago
parent
commit
3ba49754b1
6 changed files with 8 additions and 24 deletions
  1. +1
    -1
      LLama.Examples/Examples/BatchedExecutorFork.cs
  2. +2
    -2
      LLama.Examples/Examples/BatchedExecutorGuidance.cs
  3. +1
    -1
      LLama.Examples/Examples/BatchedExecutorRewind.cs
  4. +1
    -1
      LLama.Examples/Examples/BatchedExecutorSaveAndLoad.cs
  5. +0
    -17
      LLama/Batched/BatchedExecutor.cs
  6. +3
    -2
      LLama/Batched/Conversation.cs

+ 1
- 1
LLama.Examples/Examples/BatchedExecutorFork.cs View File

@@ -32,7 +32,7 @@ public class BatchedExecutorFork

// Evaluate the initial prompt to create one conversation
using var start = executor.Create();
start.Prompt(prompt);
start.Prompt(executor.Context.Tokenize(prompt));
await executor.Infer();

// Create the root node of the tree


+ 2
- 2
LLama.Examples/Examples/BatchedExecutorGuidance.cs View File

@@ -34,9 +34,9 @@ public class BatchedExecutorGuidance

// Load the two prompts into two conversations
using var guided = executor.Create();
guided.Prompt(positivePrompt);
guided.Prompt(executor.Context.Tokenize(positivePrompt));
using var guidance = executor.Create();
guidance.Prompt(negativePrompt);
guidance.Prompt(executor.Context.Tokenize(negativePrompt));

// Run inference to evaluate prompts
await AnsiConsole


+ 1
- 1
LLama.Examples/Examples/BatchedExecutorRewind.cs View File

@@ -33,7 +33,7 @@ public class BatchedExecutorRewind

// Evaluate the initial prompt to create one conversation
using var conversation = executor.Create();
conversation.Prompt(prompt);
conversation.Prompt(executor.Context.Tokenize(prompt));
// Create the start node wrapping the conversation
var node = new Node(executor.Context);


+ 1
- 1
LLama.Examples/Examples/BatchedExecutorSaveAndLoad.cs View File

@@ -31,7 +31,7 @@ public class BatchedExecutorSaveAndLoad

// Create a conversation
var conversation = executor.Create();
conversation.Prompt(prompt);
conversation.Prompt(executor.Context.Tokenize(prompt));

// Run inference loop
var decoder = new StreamingTokenDecoder(executor.Context);


+ 0
- 17
LLama/Batched/BatchedExecutor.cs View File

@@ -55,23 +55,6 @@ public sealed class BatchedExecutor
Epoch = 1;
}

/// <summary>
/// Start a new <see cref="Conversation"/> with the given prompt
/// </summary>
/// <param name="prompt"></param>
/// <returns></returns>
[Obsolete("Use BatchedExecutor.Create instead")]
public Conversation Prompt(string prompt)
{
if (IsDisposed)
throw new ObjectDisposedException(nameof(BatchedExecutor));

var conversation = Create();
conversation.Prompt(prompt);

return conversation;
}

/// <summary>
/// Start a new <see cref="Conversation"/>
/// </summary>


+ 3
- 2
LLama/Batched/Conversation.cs View File

@@ -166,11 +166,12 @@ public sealed class Conversation
/// </summary>
/// <param name="input"></param>
/// <returns></returns>
public void Prompt(string input)
[Obsolete("Tokenize the text and pass the tokens instead")]
public void Prompt(string input, bool addBos, bool special)
{
AssertCanBePrompted();

Prompt(Executor.Context.Tokenize(input));
Prompt(Executor.Context.Tokenize(input, addBos, special));
}

/// <summary>


Loading…
Cancel
Save