diff --git a/LLama.Examples/LLama.Examples.csproj b/LLama.Examples/LLama.Examples.csproj
index 865a6321..a8abe3ae 100644
--- a/LLama.Examples/LLama.Examples.csproj
+++ b/LLama.Examples/LLama.Examples.csproj
@@ -27,7 +27,6 @@
-
diff --git a/LLama.Examples/NewVersion/SemanticKernelMemorySkill.cs b/LLama.Examples/NewVersion/SemanticKernelMemorySkill.cs
deleted file mode 100644
index ef599cdf..00000000
--- a/LLama.Examples/NewVersion/SemanticKernelMemorySkill.cs
+++ /dev/null
@@ -1,184 +0,0 @@
-using System.Reflection.Metadata;
-using System.Security.Cryptography;
-using System.Text;
-using LLama.Abstractions;
-using LLama.Common;
-using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.AI.ChatCompletion;
-using Microsoft.SemanticKernel.AI.Embeddings;
-using Microsoft.SemanticKernel.AI.TextCompletion;
-using Microsoft.SemanticKernel.Connectors.AI.LLama.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.AI.LLama.TextCompletion;
-using Microsoft.SemanticKernel.Connectors.AI.LLama.TextEmbedding;
-using Microsoft.SemanticKernel.Memory;
-using Microsoft.SemanticKernel.Skills.Core;
-
-namespace LLama.Examples.NewVersion
-{
- public class SemanticKernelMemorySkill
- {
- private const string MemoryCollectionName = "aboutMe";
-
- public static async Task Run()
- {
- var loggerFactory = ConsoleLogger.LoggerFactory;
- Console.WriteLine("Example from: https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/KernelSyntaxExamples/Example15_MemorySkill.cs");
- Console.Write("Please input your model path: ");
- var modelPath = Console.ReadLine();
-
- // Load weights into memory
- var parameters = new ModelParams(modelPath)
- {
- Seed = RandomNumberGenerator.GetInt32(int.MaxValue),
- EmbeddingMode = true
- , GpuLayerCount = 50
- };
- using var model = LLamaWeights.LoadFromFile(parameters);
- using var context = model.CreateContext(parameters);
- //var ex = new InteractiveExecutor(context);
- var ex = new InstructExecutor(context);
- var builder = new KernelBuilder();
- builder.WithLoggerFactory(loggerFactory);
- var embedding = new LLamaEmbedder(context);
-
- //builder.WithAIService("local-llama", new LLamaSharpChatCompletion(ex), true);
- builder.WithAIService("local-llama-text", new LLamaSharpTextCompletion(ex), true);
- builder.WithAIService("local-llama-embed", new LLamaSharpEmbeddingGeneration(embedding), true);
- builder.WithMemoryStorage(new VolatileMemoryStore());
- var kernel = builder.Build();
- // ========= Store memories using the kernel =========
-
- await kernel.Memory.SaveInformationAsync(MemoryCollectionName, id: "info1", text: "My name is Andrea");
- await kernel.Memory.SaveInformationAsync(MemoryCollectionName, id: "info2", text: "I work as a tourist operator");
- await kernel.Memory.SaveInformationAsync(MemoryCollectionName, id: "info3", text: "I've been living in Seattle since 2005");
- await kernel.Memory.SaveInformationAsync(MemoryCollectionName, id: "info4", text: "I visited France and Italy five times since 2015");
-
- // ========= Store memories using semantic function =========
-
- // Add Memory as a skill for other functions
- var memorySkill = new TextMemorySkill(kernel.Memory);
- kernel.ImportSkill(memorySkill);
-
- // Build a semantic function that saves info to memory
- const string SaveFunctionDefinition = "{{save $info}}";
- var memorySaver = kernel.CreateSemanticFunction(SaveFunctionDefinition);
-
- await kernel.RunAsync(memorySaver, new()
- {
- [TextMemorySkill.CollectionParam] = MemoryCollectionName,
- [TextMemorySkill.KeyParam] = "info5",
- ["info"] = "My family is from New York"
- });
-
- // ========= Test memory remember =========
- Console.WriteLine("========= Example: Recalling a Memory =========");
-
- var answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info1", loggerFactory);
- Console.WriteLine("Memory associated with 'info1': {0}", answer);
-
- answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info2", loggerFactory);
- Console.WriteLine("Memory associated with 'info2': {0}", answer);
-
- answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info3", loggerFactory);
- Console.WriteLine("Memory associated with 'info3': {0}", answer);
-
- answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info4", loggerFactory);
- Console.WriteLine("Memory associated with 'info4': {0}", answer);
-
- answer = await memorySkill.RetrieveAsync(MemoryCollectionName, "info5", loggerFactory);
- Console.WriteLine("Memory associated with 'info5': {0}", answer);
-
- // ========= Test memory recall =========
- Console.WriteLine("========= Example: Recalling an Idea =========");
-
- answer = await memorySkill.RecallAsync("where did I grow up?", MemoryCollectionName, relevance: null, limit: 2, null);
- Console.WriteLine("Ask: where did I grow up?");
- Console.WriteLine("Answer:\n{0}", answer);
-
- answer = await memorySkill.RecallAsync("where do I live?", MemoryCollectionName, relevance: null, limit: 2, null);
- Console.WriteLine("Ask: where do I live?");
- Console.WriteLine("Answer:\n{0}", answer);
-
- /*
- Output:
-
- Ask: where did I grow up?
- Answer:
- ["My family is from New York","I\u0027ve been living in Seattle since 2005"]
-
- Ask: where do I live?
- Answer:
- ["I\u0027ve been living in Seattle since 2005","My family is from New York"]
- */
-
- // ========= Use memory in a semantic function =========
- Console.WriteLine("========= Example: Using Recall in a Semantic Function =========");
-
- // Build a semantic function that uses memory to find facts
- const string RecallFunctionDefinition = @"
-Consider only the facts below when answering questions.
-
-About me: {{recall 'where did I grow up?'}}
-About me: {{recall 'where do I live?'}}
-
-Question: {{$input}}
-
-Answer:
-";
-
- var aboutMeOracle = kernel.CreateSemanticFunction(RecallFunctionDefinition, maxTokens: 100);
-
- var result = await kernel.RunAsync(aboutMeOracle, new("Do I live in the same town where I grew up?")
- {
- [TextMemorySkill.CollectionParam] = MemoryCollectionName,
- [TextMemorySkill.RelevanceParam] = "0.8"
- });
-
- Console.WriteLine("Do I live in the same town where I grew up?\n");
- Console.WriteLine(result);
-
- /*
- Output:
-
- Do I live in the same town where I grew up?
-
- No, I do not live in the same town where I grew up since my family is from New York and I have been living in Seattle since 2005.
- */
-
- // ========= Remove a memory =========
- Console.WriteLine("========= Example: Forgetting a Memory =========");
-
- result = await kernel.RunAsync(aboutMeOracle, new("Tell me a bit about myself")
- {
- ["fact1"] = "What is my name?",
- ["fact2"] = "What do I do for a living?",
- [TextMemorySkill.RelevanceParam] = ".75"
- });
-
- Console.WriteLine("Tell me a bit about myself\n");
- Console.WriteLine(result);
-
- /*
- Approximate Output:
- Tell me a bit about myself
-
- My name is Andrea and my family is from New York. I work as a tourist operator.
- */
-
- await memorySkill.RemoveAsync(MemoryCollectionName, "info1", loggerFactory);
-
- result = await kernel.RunAsync(aboutMeOracle, new("Tell me a bit about myself"));
-
- Console.WriteLine("Tell me a bit about myself\n");
- Console.WriteLine(result);
-
- /*
- Approximate Output:
- Tell me a bit about myself
-
- I'm from a family originally from New York and I work as a tourist operator. I've been living in Seattle since 2005.
- */
- }
- }
-}
diff --git a/LLama.Examples/NewVersion/TestRunner.cs b/LLama.Examples/NewVersion/TestRunner.cs
index c8a7bd31..83316510 100644
--- a/LLama.Examples/NewVersion/TestRunner.cs
+++ b/LLama.Examples/NewVersion/TestRunner.cs
@@ -20,7 +20,6 @@
Console.WriteLine("10: Constrain response to json format using grammar.");
Console.WriteLine("11: Semantic Kernel Prompt.");
Console.WriteLine("12: Semantic Kernel Chat.");
- Console.WriteLine("13: Semantic Kernel Memory Skill.");
while (true)
{
@@ -79,10 +78,6 @@
{
await SemanticKernelChat.Run();
}
- else if (choice == 13)
- {
- await SemanticKernelMemorySkill.Run();
- }
else
{
Console.WriteLine("Cannot parse your choice. Please select again.");
diff --git a/LLama.Examples/RepoUtils.cs b/LLama.Examples/RepoUtils.cs
deleted file mode 100644
index 8e728339..00000000
--- a/LLama.Examples/RepoUtils.cs
+++ /dev/null
@@ -1,40 +0,0 @@
-using Microsoft.Extensions.Logging;
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-
-namespace LLama.Examples
-{
- ///
- /// Basic logger printing to console
- ///
- internal static class ConsoleLogger
- {
- internal static ILogger Logger => LoggerFactory.CreateLogger