Browse Source

- Passing the `ILogger` through to every call of `CreateContext`

- Passing `ILogger` into executors
tags/v0.6.0
Martin Evans 2 years ago
parent
commit
f1e5a8f995
4 changed files with 19 additions and 12 deletions
  1. +8
    -4
      LLama/LLamaInstructExecutor.cs
  2. +2
    -1
      LLama/LLamaInteractExecutor.cs
  3. +6
    -5
      LLama/LLamaStatelessExecutor.cs
  4. +3
    -2
      LLama/LLamaWeights.cs

+ 8
- 4
LLama/LLamaInstructExecutor.cs View File

@@ -17,7 +17,8 @@ namespace LLama
/// <summary>
/// The LLama executor for instruct mode.
/// </summary>
public class InstructExecutor : StatefulExecutorBase
public class InstructExecutor
: StatefulExecutorBase
{
private bool _is_prompt_run = true;
private readonly string _instructionPrefix;
@@ -28,11 +29,14 @@ namespace LLama
///
/// </summary>
/// <param name="context"></param>
/// <param name="logger"></param>
/// <param name="instructionPrefix"></param>
/// <param name="instructionSuffix"></param>
public InstructExecutor(LLamaContext context, ILogger logger = null!, string instructionPrefix = "\n\n### Instruction:\n\n",
string instructionSuffix = "\n\n### Response:\n\n") : base(context)
/// <param name="logger"></param>
public InstructExecutor(LLamaContext context,
string instructionPrefix = "\n\n### Instruction:\n\n",
string instructionSuffix = "\n\n### Response:\n\n",
ILogger? logger = null)
: base(context, logger)
{
_inp_pfx = Context.Tokenize(instructionPrefix, true);
_inp_sfx = Context.Tokenize(instructionSuffix, false);


+ 2
- 1
LLama/LLamaInteractExecutor.cs View File

@@ -27,7 +27,8 @@ namespace LLama
/// </summary>
/// <param name="context"></param>
/// <param name="logger"></param>
public InteractiveExecutor(LLamaContext context) : base(context)
public InteractiveExecutor(LLamaContext context, ILogger? logger = null)
: base(context, logger)
{
_llama_token_newline = NativeApi.llama_token_nl(Context.NativeHandle);
}


+ 6
- 5
LLama/LLamaStatelessExecutor.cs View File

@@ -21,9 +21,9 @@ namespace LLama
public class StatelessExecutor
: ILLamaExecutor
{
private readonly ILogger? _logger;
private readonly LLamaWeights _weights;
private readonly IContextParams _params;
private readonly ILogger? _logger;

/// <summary>
/// The context used by the executor when running the inference.
@@ -36,24 +36,25 @@ namespace LLama
/// <param name="weights"></param>
/// <param name="params"></param>
/// <param name="logger"></param>
public StatelessExecutor(LLamaWeights weights, IContextParams @params)
public StatelessExecutor(LLamaWeights weights, IContextParams @params, ILogger? logger = null)
{
_weights = weights;
_params = @params;
_logger = logger;

Context = _weights.CreateContext(_params);
Context = _weights.CreateContext(_params, logger);
Context.Dispose();
}

/// <inheritdoc />
public async IAsyncEnumerable<string> InferAsync(string text, IInferenceParams? inferenceParams = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
using var context = _weights.CreateContext(_params);
using var context = _weights.CreateContext(_params, _logger);
Context = context;

if (!Context.NativeHandle.IsClosed)
Context.Dispose();
Context = _weights.CreateContext(Context.Params);
Context = _weights.CreateContext(Context.Params, _logger);

if (inferenceParams != null)
{


+ 3
- 2
LLama/LLamaWeights.cs View File

@@ -81,10 +81,11 @@ namespace LLama
/// Create a llama_context using this model
/// </summary>
/// <param name="params"></param>
/// <param name="logger"></param>
/// <returns></returns>
public LLamaContext CreateContext(IContextParams @params)
public LLamaContext CreateContext(IContextParams @params, ILogger? logger = null)
{
return new LLamaContext(this, @params);
return new LLamaContext(this, @params, logger);
}
}
}

Loading…
Cancel
Save