You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaSharpChatCompletionTests.cs 2.9 kB

📝 Update LLamaSharpChatCompletion and LLama.Unittest - Updated LLamaSharpChatCompletion class in LLama.SemanticKernel/ChatCompletion/LLamaSharpChatCompletion.cs - Changed the type of the "_model" field from "StatelessExecutor" to "ILLamaExecutor" - Updated the constructor to accept an "ILLamaExecutor" parameter instead of a "StatelessExecutor" parameter - Updated LLamaSharpChatCompletion class in LLama.SemanticKernel/LLamaSharp.SemanticKernel.csproj - Updated LLama.Unittest project in LLama.Unittest/LLama.Unittest.csproj - Added a "PackageReference" for "Moq" version 4.20.70 - Added ExtensionMethodsTests class in LLama.Unittest/SemanticKernel/ExtensionMethodsTests.cs - Added tests for the "ToLLamaSharpChatHistory" and "ToLLamaSharpInferenceParams" extension methods - Added LLamaSharpChatCompletionTests class in LLama.Unittest/SemanticKernel/LLamaSharpChatCompletionTests.cs - Added tests for the LLamaSharpChatCompletion class ℹ️ The LLamaSharpChatCompletion class in the LLama.SemanticKernel project has been updated to use the ILLamaExecutor interface instead of the StatelessExecutor class. This change allows for better abstraction and flexibility in the implementation of the LLamaSharpChatCompletion class. The LLamaSharpChatCompletion class is responsible for providing chat completion functionality in the LLamaSharp project. The LLama.Unittest project has also been updated to include tests for the LLamaSharpChatCompletion class and the extension methods used by the class.
1 year ago
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485
  1. using Xunit;
  2. using Moq;
  3. using LLama;
  4. using LLama.Abstractions;
  5. using Microsoft.SemanticKernel;
  6. using Microsoft.SemanticKernel.ChatCompletion;
  7. using Microsoft.SemanticKernel.Services;
  8. using System;
  9. using System.IO;
  10. using System.Runtime.CompilerServices;
  11. using System.Text;
  12. using static LLama.LLamaTransforms;
  13. using System.Threading.Tasks;
  14. using System.Collections.Generic;
  15. using System.Threading;
  16. namespace LLamaSharp.SemanticKernel.ChatCompletion.Tests
  17. {
  18. public class LLamaSharpChatCompletionTests
  19. {
  20. private Mock<ILLamaExecutor> mockStatelessExecutor;
  21. public LLamaSharpChatCompletionTests()
  22. {
  23. this.mockStatelessExecutor = new Mock<ILLamaExecutor>();
  24. }
  25. private LLamaSharpChatCompletion CreateLLamaSharpChatCompletion()
  26. {
  27. return new LLamaSharpChatCompletion(
  28. this.mockStatelessExecutor.Object,
  29. null,
  30. null,
  31. null);
  32. }
  33. [Fact]
  34. public async Task GetChatMessageContentsAsync_StateUnderTest_ExpectedBehavior()
  35. {
  36. // Arrange
  37. var unitUnderTest = this.CreateLLamaSharpChatCompletion();
  38. ChatHistory chatHistory = new ChatHistory();
  39. PromptExecutionSettings? executionSettings = null;
  40. Kernel? kernel = null;
  41. CancellationToken cancellationToken = default;
  42. mockStatelessExecutor.Setup(e => e.InferAsync(It.IsAny<string>(), It.IsAny<IInferenceParams>(), It.IsAny<CancellationToken>()))
  43. .Returns(new List<string> { "test" }.ToAsyncEnumerable());
  44. // Act
  45. var result = await unitUnderTest.GetChatMessageContentsAsync(
  46. chatHistory,
  47. executionSettings,
  48. kernel,
  49. cancellationToken);
  50. // Assert
  51. Assert.True(result.Count > 0);
  52. }
  53. [Fact]
  54. public async Task GetStreamingChatMessageContentsAsync_StateUnderTest_ExpectedBehavior()
  55. {
  56. // Arrange
  57. var unitUnderTest = this.CreateLLamaSharpChatCompletion();
  58. ChatHistory chatHistory = new ChatHistory();
  59. PromptExecutionSettings? executionSettings = null;
  60. Kernel? kernel = null;
  61. CancellationToken cancellationToken = default;
  62. mockStatelessExecutor.Setup(e => e.InferAsync(It.IsAny<string>(), It.IsAny<IInferenceParams>(), It.IsAny<CancellationToken>()))
  63. .Returns(new List<string> { "test" }.ToAsyncEnumerable());
  64. // Act
  65. await foreach (var result in unitUnderTest.GetStreamingChatMessageContentsAsync(
  66. chatHistory,
  67. executionSettings,
  68. kernel,
  69. cancellationToken))
  70. {
  71. // Assert
  72. Assert.NotNull(result);
  73. }
  74. }
  75. }
  76. }