Browse Source

- Added `Obsolete` attributes to the entire `OldVersion` namespace, so they can be removed in the future

- Minor changes to cleanup some of the compiler warnings
tags/v0.5.1
Martin Evans 2 years ago
parent
commit
829f32b27d
18 changed files with 72 additions and 18 deletions
  1. +2
    -2
      LLama.Examples/NewVersion/LoadAndSaveState.cs
  2. +1
    -0
      LLama.Examples/OldVersion/ChatSession.cs
  3. +1
    -0
      LLama.Examples/OldVersion/ChatWithLLamaModel.cs
  4. +1
    -0
      LLama.Examples/OldVersion/GetEmbeddings.cs
  5. +1
    -0
      LLama.Examples/OldVersion/InstructMode.cs
  6. +1
    -0
      LLama.Examples/OldVersion/SaveAndLoadState.cs
  7. +1
    -1
      LLama/LLamaExecutorBase.cs
  8. +1
    -1
      LLama/LLamaInstructExecutor.cs
  9. +4
    -2
      LLama/LLamaInteractExecutor.cs
  10. +1
    -1
      LLama/Native/NativeApi.cs
  11. +3
    -1
      LLama/OldVersion/ChatSession.cs
  12. +3
    -1
      LLama/OldVersion/IChatModel.cs
  13. +5
    -3
      LLama/OldVersion/LLamaEmbedder.cs
  14. +6
    -1
      LLama/OldVersion/LLamaModel.cs
  15. +4
    -0
      LLama/OldVersion/LLamaParams.cs
  16. +18
    -1
      LLama/OldVersion/LLamaTypes.cs
  17. +3
    -1
      LLama/OldVersion/Utils.cs
  18. +16
    -3
      LLama/Utils.cs

+ 2
- 2
LLama.Examples/NewVersion/LoadAndSaveState.cs View File

@@ -34,11 +34,11 @@ namespace LLama.Examples.NewVersion
if (prompt == "save")
{
Console.Write("Your path to save model state: ");
string modelStatePath = Console.ReadLine();
var modelStatePath = Console.ReadLine();
ex.Context.SaveState(modelStatePath);

Console.Write("Your path to save executor state: ");
string executorStatePath = Console.ReadLine();
var executorStatePath = Console.ReadLine();
ex.SaveState(executorStatePath);

Console.ForegroundColor = ConsoleColor.Yellow;


+ 1
- 0
LLama.Examples/OldVersion/ChatSession.cs View File

@@ -7,6 +7,7 @@ using LLama.OldVersion;

namespace LLama.Examples.Old
{
[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public class ChatSession
{
LLama.OldVersion.ChatSession<LLama.OldVersion.LLamaModel> _session;


+ 1
- 0
LLama.Examples/OldVersion/ChatWithLLamaModel.cs View File

@@ -7,6 +7,7 @@ using LLama.OldVersion;

namespace LLama.Examples.Old
{
[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public class ChatWithLLamaModel
{
LLama.OldVersion.LLamaModel _model;


+ 1
- 0
LLama.Examples/OldVersion/GetEmbeddings.cs View File

@@ -7,6 +7,7 @@ using LLama.OldVersion;

namespace LLama.Examples.Old
{
[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public class GetEmbeddings
{
LLama.OldVersion.LLamaEmbedder _embedder;


+ 1
- 0
LLama.Examples/OldVersion/InstructMode.cs View File

@@ -7,6 +7,7 @@ using LLama.OldVersion;

namespace LLama.Examples.Old
{
[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public class InstructMode
{
LLama.OldVersion.LLamaModel _model;


+ 1
- 0
LLama.Examples/OldVersion/SaveAndLoadState.cs View File

@@ -7,6 +7,7 @@ using LLama.OldVersion;

namespace LLama.Examples.Old
{
[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public class SaveAndLoadState: IDisposable
{
LLama.OldVersion.LLamaModel _model;


+ 1
- 1
LLama/LLamaExecutorBase.cs View File

@@ -367,7 +367,7 @@ namespace LLama
public int MatchingSessionTokensCount { get; set; }

[JsonPropertyName("path_session")]
public string SessionFilePath { get; set; }
public string? SessionFilePath { get; set; }

[JsonPropertyName("embd")]
public List<llama_token> Embeds { get; set; }


+ 1
- 1
LLama/LLamaInstructExecutor.cs View File

@@ -84,7 +84,7 @@ namespace LLama
/// <inheritdoc />
public override void SaveState(string filename)
{
InstructExecutorState state = GetStateData() as InstructExecutorState;
InstructExecutorState state = (InstructExecutorState)GetStateData();
using (FileStream fs = new FileStream(filename, FileMode.OpenOrCreate, FileAccess.Write))
{
JsonSerializer.Serialize<InstructExecutorState>(fs, state);


+ 4
- 2
LLama/LLamaInteractExecutor.cs View File

@@ -72,10 +72,10 @@ namespace LLama
/// <inheritdoc />
public override void SaveState(string filename)
{
InteractiveExecutorState state = GetStateData() as InteractiveExecutorState;
InteractiveExecutorState state = (InteractiveExecutorState)GetStateData();
using(FileStream fs = new FileStream(filename, FileMode.OpenOrCreate, FileAccess.Write))
{
JsonSerializer.Serialize<InteractiveExecutorState>(fs, state);
JsonSerializer.Serialize(fs, state);
}
}
/// <inheritdoc />
@@ -121,7 +121,9 @@ namespace LLama
/// <summary>
/// Return whether to break the generation.
/// </summary>
/// <param name="inferenceParams"></param>
/// <param name="args"></param>
/// <param name="extraOutputs"></param>
/// <returns></returns>
protected override bool PostProcess(IInferenceParams inferenceParams, InferStateArgs args, out IEnumerable<string>? extraOutputs)
{


+ 1
- 1
LLama/Native/NativeApi.cs View File

@@ -64,7 +64,7 @@ namespace LLama.Native
/// Return NULL on failure
/// </summary>
/// <param name="path_model"></param>
/// <param name="params_"></param>
/// <param name="params"></param>
/// <returns></returns>
[DllImport(libraryName, CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr llama_load_model_from_file(string path_model, LLamaContextParams @params);


+ 3
- 1
LLama/OldVersion/ChatSession.cs View File

@@ -1,10 +1,12 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;

#pragma warning disable

namespace LLama.OldVersion
{
[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public class ChatSession<T> where T : IChatModel
{
IChatModel _model;


+ 3
- 1
LLama/OldVersion/IChatModel.cs View File

@@ -1,9 +1,11 @@
using System;
using System.Collections.Generic;
using System.Text;

#pragma warning disable

namespace LLama.OldVersion
{
[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public interface IChatModel
{
string Name { get; }


+ 5
- 3
LLama/OldVersion/LLamaEmbedder.cs View File

@@ -1,12 +1,14 @@
using LLama.Native;
using System;
using System.Collections.Generic;
using System.Text;
using LLama.Exceptions;

#pragma warning disable

namespace LLama.OldVersion
{
public class LLamaEmbedder : IDisposable
[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public class LLamaEmbedder
: IDisposable
{
SafeLLamaContextHandle _ctx;



+ 6
- 1
LLama/OldVersion/LLamaModel.cs View File

@@ -9,10 +9,15 @@ using System.Linq;
using System.Text;
using LLama.Common;

#pragma warning disable

namespace LLama.OldVersion
{
using llama_token = Int32;
public class LLamaModel : IChatModel, IDisposable

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public class LLamaModel
: IChatModel, IDisposable
{
LLamaParams _params;
SafeLLamaContextHandle _ctx;


+ 4
- 0
LLama/OldVersion/LLamaParams.cs View File

@@ -1,9 +1,13 @@
using System;
using System.Collections.Generic;

#pragma warning disable

namespace LLama.OldVersion
{
using llama_token = Int32;

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public struct LLamaParams
{
public int seed; // RNG seed


+ 18
- 1
LLama/OldVersion/LLamaTypes.cs View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Text;

#pragma warning disable

namespace LLama.OldVersion
{
@@ -9,33 +10,49 @@ namespace LLama.OldVersion
Human,
Assistant
}

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record EmbeddingUsage(int PromptTokens, int TotalTokens);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record EmbeddingData(int Index, string Object, float[] Embedding);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record Embedding(string Object, string Model, EmbeddingData[] Data, EmbeddingUsage Usage);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record CompletionLogprobs(int[] TextOffset, float[] TokenLogProbs, string[] Tokens, Dictionary<string, float>[] TopLogprobs);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record CompletionChoice(string Text, int Index, CompletionLogprobs? Logprobs, string? FinishReason);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record CompletionUsage(int PromptTokens, int CompletionTokens, int TotalTokens);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record CompletionChunk(string Id, string Object, int Created, string Model, CompletionChoice[] Choices);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record Completion(string Id, string Object, int Created, string Model, CompletionChoice[] Choices, CompletionUsage Usage);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record ChatCompletionMessage(ChatRole Role, string Content, string? Name = null);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record ChatCompletionChoice(int Index, ChatCompletionMessage Message, string? FinishReason);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record ChatCompletion(string Id, string Object, int Created, string Model, ChatCompletionChoice[] Choices, CompletionUsage Usage);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record ChatCompletionChunkDelta(string? Role, string? Content);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record ChatCompletionChunkChoice(int Index, ChatCompletionChunkDelta Delta, string? FinishReason);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record ChatCompletionChunk(string Id, string Model, string Object, int Created, ChatCompletionChunkChoice[] Choices);

[Obsolete("The entire LLama.OldVersion namespace will be removed")]
public record ChatMessageRecord(ChatCompletionMessage Message, DateTime Time);
}

+ 3
- 1
LLama/OldVersion/Utils.cs View File

@@ -3,14 +3,16 @@ using System;
using System.Collections.Generic;
using System.Text;
using LLama.Exceptions;
using System.Diagnostics;
using System.Linq;
using System.Runtime.InteropServices;
using System.IO;

#pragma warning disable

namespace LLama.OldVersion
{
using llama_token = Int32;

internal static class Utils
{
public static SafeLLamaContextHandle llama_init_from_gpt_params(ref LLamaParams @params)


+ 16
- 3
LLama/Utils.cs View File

@@ -9,6 +9,7 @@ using LLama.Extensions;
namespace LLama
{
using llama_token = Int32;

public static class Utils
{
public static SafeLLamaContextHandle InitLLamaContextFromModelParams(IModelParams @params)
@@ -20,13 +21,17 @@ namespace LLama
}

[Obsolete("Use SafeLLamaContextHandle Tokenize method instead")]
#pragma warning disable CS1591 // Missing XML comment for publicly visible type or member
public static IEnumerable<llama_token> Tokenize(SafeLLamaContextHandle ctx, string text, bool add_bos, Encoding encoding)
#pragma warning restore CS1591 // Missing XML comment for publicly visible type or member
{
return ctx.Tokenize(text, add_bos, encoding);
}

[Obsolete("Use SafeLLamaContextHandle GetLogits method instead")]
#pragma warning disable CS1591 // Missing XML comment for publicly visible type or member
public static Span<float> GetLogits(SafeLLamaContextHandle ctx, int length)
#pragma warning restore CS1591 // Missing XML comment for publicly visible type or member
{
if (length != ctx.VocabCount)
throw new ArgumentException("length must be the VocabSize");
@@ -35,33 +40,41 @@ namespace LLama
}

[Obsolete("Use SafeLLamaContextHandle Eval method instead")]
#pragma warning disable CS1591 // Missing XML comment for publicly visible type or member
public static int Eval(SafeLLamaContextHandle ctx, llama_token[] tokens, int startIndex, int n_tokens, int n_past, int n_threads)
#pragma warning restore CS1591 // Missing XML comment for publicly visible type or member
{
var slice = tokens.AsSpan().Slice(startIndex, n_tokens);
return ctx.Eval(slice, n_past, n_threads) ? 0 : 1;
}

[Obsolete("Use SafeLLamaContextHandle TokenToString method instead")]
#pragma warning disable CS1591 // Missing XML comment for publicly visible type or member
public static string TokenToString(llama_token token, SafeLLamaContextHandle ctx, Encoding encoding)
#pragma warning restore CS1591 // Missing XML comment for publicly visible type or member
{
return ctx.TokenToString(token, encoding);
}

[Obsolete("No longer used internally by LlamaSharp")]
#pragma warning disable CS1591 // Missing XML comment for publicly visible type or member
public static string PtrToString(IntPtr ptr, Encoding encoding)
#pragma warning restore CS1591 // Missing XML comment for publicly visible type or member
{
#if NET6_0_OR_GREATER
// ReSharper disable once PossibleUnintendedReferenceComparison
if(encoding == Encoding.UTF8)
{
return Marshal.PtrToStringUTF8(ptr);
return Marshal.PtrToStringUTF8(ptr)!;
}
// ReSharper disable once PossibleUnintendedReferenceComparison
else if(encoding == Encoding.Unicode)
{
return Marshal.PtrToStringUni(ptr);
return Marshal.PtrToStringUni(ptr)!;
}
else
{
return Marshal.PtrToStringAuto(ptr);
return Marshal.PtrToStringAuto(ptr)!;
}
#else
unsafe


Loading…
Cancel
Save