Browse Source

Replace ILLamaLogger for ILogger

tags/v0.6.0
sa_ddam213 2 years ago
parent
commit
949b0cde16
Failed to extract signature
2 changed files with 17 additions and 17 deletions
  1. +7
    -6
      LLama/LLamaContext.cs
  2. +10
    -11
      LLama/LLamaExecutorBase.cs

+ 7
- 6
LLama/LLamaContext.cs View File

@@ -10,6 +10,7 @@ using LLama.Common;
using System.Runtime.InteropServices; using System.Runtime.InteropServices;
using LLama.Extensions; using LLama.Extensions;
using LLama.Abstractions; using LLama.Abstractions;
using Microsoft.Extensions.Logging;


namespace LLama namespace LLama
{ {
@@ -21,7 +22,7 @@ namespace LLama
public sealed class LLamaContext public sealed class LLamaContext
: IDisposable : IDisposable
{ {
private readonly ILLamaLogger? _logger;
private readonly ILogger? _logger;
private readonly Encoding _encoding; private readonly Encoding _encoding;
private readonly SafeLLamaContextHandle _ctx; private readonly SafeLLamaContextHandle _ctx;


@@ -62,18 +63,18 @@ namespace LLama
/// <param name="params">Model params.</param> /// <param name="params">Model params.</param>
/// <param name="logger">The logger.</param> /// <param name="logger">The logger.</param>
[Obsolete("Use the LLamaWeights.CreateContext instead")] [Obsolete("Use the LLamaWeights.CreateContext instead")]
public LLamaContext(IModelParams @params, ILLamaLogger? logger = null)
public LLamaContext(IModelParams @params, ILogger? logger = null)
{ {
Params = @params; Params = @params;


_logger = logger; _logger = logger;
_encoding = @params.Encoding; _encoding = @params.Encoding;


_logger?.Log(nameof(LLamaContext), $"Initializing LLama model with params: {this.Params}", ILLamaLogger.LogLevel.Info);
_logger?.LogInformation($"[LLamaContext] Initializing LLama model with params: {this.Params}");
_ctx = Utils.InitLLamaContextFromModelParams(Params); _ctx = Utils.InitLLamaContextFromModelParams(Params);
} }


internal LLamaContext(SafeLLamaContextHandle nativeContext, IModelParams @params, ILLamaLogger? logger = null)
internal LLamaContext(SafeLLamaContextHandle nativeContext, IModelParams @params, ILogger? logger = null)
{ {
Params = @params; Params = @params;


@@ -89,7 +90,7 @@ namespace LLama
/// <param name="params"></param> /// <param name="params"></param>
/// <param name="logger"></param> /// <param name="logger"></param>
/// <exception cref="ObjectDisposedException"></exception> /// <exception cref="ObjectDisposedException"></exception>
public LLamaContext(LLamaWeights model, IModelParams @params, ILLamaLogger? logger = null)
public LLamaContext(LLamaWeights model, IModelParams @params, ILogger? logger = null)
{ {
if (model.NativeHandle.IsClosed) if (model.NativeHandle.IsClosed)
throw new ObjectDisposedException("Cannot create context, model weights have been disposed"); throw new ObjectDisposedException("Cannot create context, model weights have been disposed");
@@ -471,7 +472,7 @@ namespace LLama


if (!_ctx.Eval(tokens.Slice(i, n_eval), pastTokensCount, Params.Threads)) if (!_ctx.Eval(tokens.Slice(i, n_eval), pastTokensCount, Params.Threads))
{ {
_logger?.Log(nameof(LLamaContext), "Failed to eval.", ILLamaLogger.LogLevel.Error);
_logger?.LogError($"[LLamaContext] Failed to eval.");
throw new RuntimeError("Failed to eval."); throw new RuntimeError("Failed to eval.");
} }




+ 10
- 11
LLama/LLamaExecutorBase.cs View File

@@ -2,6 +2,7 @@
using LLama.Common; using LLama.Common;
using LLama.Exceptions; using LLama.Exceptions;
using LLama.Native; using LLama.Native;
using Microsoft.Extensions.Logging;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
@@ -21,7 +22,7 @@ namespace LLama
/// <summary> /// <summary>
/// The logger used by this executor. /// The logger used by this executor.
/// </summary> /// </summary>
protected ILLamaLogger? _logger;
protected ILogger? _logger;
/// <summary> /// <summary>
/// The tokens that were already processed by the model. /// The tokens that were already processed by the model.
/// </summary> /// </summary>
@@ -73,7 +74,7 @@ namespace LLama
/// </summary> /// </summary>
/// <param name="context"></param> /// <param name="context"></param>
/// <param name="logger"></param> /// <param name="logger"></param>
protected StatefulExecutorBase(LLamaContext context, ILLamaLogger? logger = null)
protected StatefulExecutorBase(LLamaContext context, ILogger? logger = null)
{ {
Context = context; Context = context;
_logger = logger; _logger = logger;
@@ -99,20 +100,20 @@ namespace LLama
} }
if (File.Exists(filename)) if (File.Exists(filename))
{ {
_logger?.Log("LLamaExecutor", $"Attempting to load saved session from {filename}", ILLamaLogger.LogLevel.Info);
_logger?.LogInformation($"[LLamaExecutor] Attempting to load saved session from {filename}");
llama_token[] session_tokens = new llama_token[Context.ContextSize]; llama_token[] session_tokens = new llama_token[Context.ContextSize];
ulong n_token_count_out = 0; ulong n_token_count_out = 0;
if (!NativeApi.llama_load_session_file(Context.NativeHandle, _pathSession, session_tokens, (ulong)Context.ContextSize, &n_token_count_out)) if (!NativeApi.llama_load_session_file(Context.NativeHandle, _pathSession, session_tokens, (ulong)Context.ContextSize, &n_token_count_out))
{ {
_logger?.Log("LLamaExecutor", $"Failed to load session file {filename}", ILLamaLogger.LogLevel.Error);
_logger?.LogError($"[LLamaExecutor] Failed to load session file {filename}");
throw new RuntimeError($"Failed to load session file {_pathSession}"); throw new RuntimeError($"Failed to load session file {_pathSession}");
} }
_session_tokens = session_tokens.Take((int)n_token_count_out).ToList(); _session_tokens = session_tokens.Take((int)n_token_count_out).ToList();
_logger?.Log("LLamaExecutor", $"Loaded a session with prompt size of {session_tokens.Length} tokens", ILLamaLogger.LogLevel.Info);
_logger?.LogInformation($"[LLamaExecutor] Loaded a session with prompt size of {session_tokens.Length} tokens");
} }
else else
{ {
_logger?.Log("LLamaExecutor", $"Session file does not exist, will create", ILLamaLogger.LogLevel.Warning);
_logger?.LogWarning($"[LLamaExecutor] Session file does not exist, will create");
} }


_n_matching_session_tokens = 0; _n_matching_session_tokens = 0;
@@ -128,17 +129,15 @@ namespace LLama
} }
if (_n_matching_session_tokens >= _embed_inps.Count) if (_n_matching_session_tokens >= _embed_inps.Count)
{ {
_logger?.Log("LLamaExecutor", $"Session file has exact match for prompt!", ILLamaLogger.LogLevel.Info);
_logger?.LogInformation("[LLamaExecutor] Session file has exact match for prompt!");
} }
else if (_n_matching_session_tokens < _embed_inps.Count / 2) else if (_n_matching_session_tokens < _embed_inps.Count / 2)
{ {
_logger?.Log("LLamaExecutor", $"session file has low similarity to prompt ({_n_matching_session_tokens}" +
$" / {_embed_inps.Count} tokens); will mostly be reevaluated", ILLamaLogger.LogLevel.Warning);
_logger?.LogWarning($"[LLamaExecutor] Session file has low similarity to prompt ({_n_matching_session_tokens} / {_embed_inps.Count} tokens) will mostly be reevaluated");
} }
else else
{ {
_logger?.Log("LLamaExecutor", $"Session file matches {_n_matching_session_tokens} / " +
$"{_embed_inps.Count} tokens of prompt", ILLamaLogger.LogLevel.Info);
_logger?.LogInformation($"[LLamaExecutor] Session file matches {_n_matching_session_tokens} / {_embed_inps.Count} tokens of prompt");
} }
} }




Loading…
Cancel
Save