Browse Source

Remove embedding for now

tags/v0.5.1
Tim Miller 2 years ago
parent
commit
a81edacbfb
5 changed files with 0 additions and 250 deletions
  1. +0
    -1
      LLama.Examples/LLama.Examples.csproj
  2. +0
    -184
      LLama.Examples/NewVersion/SemanticKernelMemorySkill.cs
  3. +0
    -5
      LLama.Examples/NewVersion/TestRunner.cs
  4. +0
    -40
      LLama.Examples/RepoUtils.cs
  5. +0
    -20
      LLama.SemanticKernel/TextEmbedding/LLamaSharpEmbeddingGeneration.cs

+ 0
- 1
LLama.Examples/LLama.Examples.csproj View File

@@ -27,7 +27,6 @@
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="7.0.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="0.21.230828.2-preview" />
</ItemGroup>



+ 0
- 184
LLama.Examples/NewVersion/SemanticKernelMemorySkill.cs View File

@@ -1,184 +0,0 @@
using System.Reflection.Metadata;
using System.Security.Cryptography;
using System.Text;
using LLama.Abstractions;
using LLama.Common;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.AI.ChatCompletion;
using Microsoft.SemanticKernel.AI.Embeddings;
using Microsoft.SemanticKernel.AI.TextCompletion;
using Microsoft.SemanticKernel.Connectors.AI.LLama.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.AI.LLama.TextCompletion;
using Microsoft.SemanticKernel.Connectors.AI.LLama.TextEmbedding;
using Microsoft.SemanticKernel.Memory;
using Microsoft.SemanticKernel.Skills.Core;

namespace LLama.Examples.NewVersion
{
public class SemanticKernelMemorySkill
{
private const string MemoryCollectionName = "aboutMe";

public static async Task Run()
{
var loggerFactory = ConsoleLogger.LoggerFactory;
Console.WriteLine("Example from: https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/KernelSyntaxExamples/Example15_MemorySkill.cs");
Console.Write("Please input your model path: ");
var modelPath = Console.ReadLine();

// Load weights into memory
var parameters = new ModelParams(modelPath)
{
Seed = RandomNumberGenerator.GetInt32(int.MaxValue),
EmbeddingMode = true
, GpuLayerCount = 50
};
using var model = LLamaWeights.LoadFromFile(parameters);
using var context = model.CreateContext(parameters);
//var ex = new InteractiveExecutor(context);
var ex = new InstructExecutor(context);
var builder = new KernelBuilder();
builder.WithLoggerFactory(loggerFactory);
var embedding = new LLamaEmbedder(context);

//builder.WithAIService<IChatCompletion>("local-llama", new LLamaSharpChatCompletion(ex), true);
builder.WithAIService<ITextCompletion>("local-llama-text", new LLamaSharpTextCompletion(ex), true);
builder.WithAIService<ITextEmbeddingGeneration>("local-llama-embed", new LLamaSharpEmbeddingGeneration(embedding), true);
builder.WithMemoryStorage(new VolatileMemoryStore());
var kernel = builder.Build();
// ========= Store memories using the kernel =========

await kernel.Memory.SaveInformationAsync(MemoryCollectionName, id: "info1", text: "My name is Andrea");
await kernel.Memory.SaveInformationAsync(MemoryCollectionName, id: "info2", text: "I work as a tourist operator");
await kernel.Memory.SaveInformationAsync(MemoryCollectionName, id: "info3", text: "I've been living in Seattle since 2005");
await kernel.Memory.SaveInformationAsync(MemoryCollectionName, id: "info4", text: "I visited France and Italy five times since 2015");

// ========= Store memories using semantic function =========

// Add Memory as a skill for other functions
var memorySkill = new TextMemorySkill(kernel.Memory);
kernel.ImportSkill(memorySkill);

// Build a semantic function that saves info to memory
const string SaveFunctionDefinition = "{{save $info}}";
var memorySaver = kernel.CreateSemanticFunction(SaveFunctionDefinition);

await kernel.RunAsync(memorySaver, new()
{
[TextMemorySkill.CollectionParam] = MemoryCollectionName,
[TextMemorySkill.KeyParam] = "info5",
["info"] = "My family is from New York"
});

// ========= Test memory remember =========
Console.WriteLine("========= Example: Recalling a Memory =========");

var answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info1", loggerFactory);
Console.WriteLine("Memory associated with 'info1': {0}", answer);

answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info2", loggerFactory);
Console.WriteLine("Memory associated with 'info2': {0}", answer);

answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info3", loggerFactory);
Console.WriteLine("Memory associated with 'info3': {0}", answer);

answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info4", loggerFactory);
Console.WriteLine("Memory associated with 'info4': {0}", answer);

answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info5", loggerFactory);
Console.WriteLine("Memory associated with 'info5': {0}", answer);

// ========= Test memory recall =========
Console.WriteLine("========= Example: Recalling an Idea =========");

answer = await memorySkill.RecallAsync("where did I grow up?", MemoryCollectionName, relevance: null, limit: 2, null);
Console.WriteLine("Ask: where did I grow up?");
Console.WriteLine("Answer:\n{0}", answer);

answer = await memorySkill.RecallAsync("where do I live?", MemoryCollectionName, relevance: null, limit: 2, null);
Console.WriteLine("Ask: where do I live?");
Console.WriteLine("Answer:\n{0}", answer);

/*
Output:

Ask: where did I grow up?
Answer:
["My family is from New York","I\u0027ve been living in Seattle since 2005"]

Ask: where do I live?
Answer:
["I\u0027ve been living in Seattle since 2005","My family is from New York"]
*/

// ========= Use memory in a semantic function =========
Console.WriteLine("========= Example: Using Recall in a Semantic Function =========");

// Build a semantic function that uses memory to find facts
const string RecallFunctionDefinition = @"
Consider only the facts below when answering questions.

About me: {{recall 'where did I grow up?'}}
About me: {{recall 'where do I live?'}}

Question: {{$input}}

Answer:
";

var aboutMeOracle = kernel.CreateSemanticFunction(RecallFunctionDefinition, maxTokens: 100);

var result = await kernel.RunAsync(aboutMeOracle, new("Do I live in the same town where I grew up?")
{
[TextMemorySkill.CollectionParam] = MemoryCollectionName,
[TextMemorySkill.RelevanceParam] = "0.8"
});

Console.WriteLine("Do I live in the same town where I grew up?\n");
Console.WriteLine(result);

/*
Output:

Do I live in the same town where I grew up?

No, I do not live in the same town where I grew up since my family is from New York and I have been living in Seattle since 2005.
*/

// ========= Remove a memory =========
Console.WriteLine("========= Example: Forgetting a Memory =========");

result = await kernel.RunAsync(aboutMeOracle, new("Tell me a bit about myself")
{
["fact1"] = "What is my name?",
["fact2"] = "What do I do for a living?",
[TextMemorySkill.RelevanceParam] = ".75"
});

Console.WriteLine("Tell me a bit about myself\n");
Console.WriteLine(result);

/*
Approximate Output:
Tell me a bit about myself

My name is Andrea and my family is from New York. I work as a tourist operator.
*/

await memorySkill.RemoveAsync(MemoryCollectionName, "info1", loggerFactory);

result = await kernel.RunAsync(aboutMeOracle, new("Tell me a bit about myself"));

Console.WriteLine("Tell me a bit about myself\n");
Console.WriteLine(result);

/*
Approximate Output:
Tell me a bit about myself

I'm from a family originally from New York and I work as a tourist operator. I've been living in Seattle since 2005.
*/
}
}
}

+ 0
- 5
LLama.Examples/NewVersion/TestRunner.cs View File

@@ -20,7 +20,6 @@
Console.WriteLine("10: Constrain response to json format using grammar.");
Console.WriteLine("11: Semantic Kernel Prompt.");
Console.WriteLine("12: Semantic Kernel Chat.");
Console.WriteLine("13: Semantic Kernel Memory Skill.");

while (true)
{
@@ -79,10 +78,6 @@
{
await SemanticKernelChat.Run();
}
else if (choice == 13)
{
await SemanticKernelMemorySkill.Run();
}
else
{
Console.WriteLine("Cannot parse your choice. Please select again.");


+ 0
- 40
LLama.Examples/RepoUtils.cs View File

@@ -1,40 +0,0 @@
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace LLama.Examples
{
/// <summary>
/// Basic logger printing to console
/// </summary>
internal static class ConsoleLogger
{
internal static ILogger Logger => LoggerFactory.CreateLogger<object>();

internal static ILoggerFactory LoggerFactory => s_loggerFactory.Value;

private static readonly Lazy<ILoggerFactory> s_loggerFactory = new(LogBuilder);

private static ILoggerFactory LogBuilder()
{
return Microsoft.Extensions.Logging.LoggerFactory.Create(builder =>
{
builder.SetMinimumLevel(LogLevel.Warning);

builder.AddFilter("Microsoft", LogLevel.Trace);
builder.AddFilter("Microsoft", LogLevel.Debug);
builder.AddFilter("Microsoft", LogLevel.Information);
builder.AddFilter("Microsoft", LogLevel.Warning);
builder.AddFilter("Microsoft", LogLevel.Error);

builder.AddFilter("Microsoft", LogLevel.Warning);
builder.AddFilter("System", LogLevel.Warning);

builder.AddConsole();
});
}
}
}

+ 0
- 20
LLama.SemanticKernel/TextEmbedding/LLamaSharpEmbeddingGeneration.cs View File

@@ -1,20 +0,0 @@
using LLama;
using Microsoft.SemanticKernel.AI.Embeddings;

namespace Microsoft.SemanticKernel.Connectors.AI.LLama.TextEmbedding;

public sealed class LLamaSharpEmbeddingGeneration : ITextEmbeddingGeneration
{
private LLamaEmbedder _embedder;

public LLamaSharpEmbeddingGeneration(LLamaEmbedder embedder)
{
_embedder = embedder;
}

/// <inheritdoc/>
public async Task<IList<ReadOnlyMemory<float>>> GenerateEmbeddingsAsync(IList<string> data, CancellationToken cancellationToken = default)
{
return data.Select(text => new ReadOnlyMemory<float>(_embedder.GetEmbeddings(text))).ToList();
}
}

Loading…
Cancel
Save