| @@ -6,7 +6,7 @@ int choice = 3; | |||||
| if(choice == 0) | if(choice == 0) | ||||
| { | { | ||||
| ChatSession chat = new(@"D:\development\llama\weights\LLaMA\7B\ggml-model-q4_0.bin", @"D:\development\llama\llama.cpp\prompts\chat-with-bob.txt", new string[] { "User:" }); | |||||
| ChatSession chat = new(@"C:\Users\haipi\Source\repos\ggml-model-q4_0.bin", @"C:\Users\haipi\Source\repos\SciSharp\LLamaSharp\LLama.Examples\Assets\chat-with-bob.txt", new string[] { "User:" }); | |||||
| chat.Run(); | chat.Run(); | ||||
| } | } | ||||
| else if(choice == 1) | else if(choice == 1) | ||||
| @@ -0,0 +1,28 @@ | |||||
| using LLama.WebAPI.Models; | |||||
| using LLama.WebAPI.Services; | |||||
| using Microsoft.AspNetCore.Mvc; | |||||
| using System; | |||||
| namespace LLama.WebAPI.Controllers | |||||
| { | |||||
| [ApiController] | |||||
| [Route("[controller]")] | |||||
| public class ChatController : ControllerBase | |||||
| { | |||||
| private readonly ChatService _service; | |||||
| private readonly ILogger<ChatController> _logger; | |||||
| public ChatController(ILogger<ChatController> logger, | |||||
| ChatService service) | |||||
| { | |||||
| _logger = logger; | |||||
| _service = service; | |||||
| } | |||||
| [HttpPost("Send")] | |||||
| public string SendMessage([FromBody] SendMessageInput input) | |||||
| { | |||||
| return _service.Send(input); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,17 @@ | |||||
| <Project Sdk="Microsoft.NET.Sdk.Web"> | |||||
| <PropertyGroup> | |||||
| <TargetFramework>net6.0</TargetFramework> | |||||
| <Nullable>enable</Nullable> | |||||
| <ImplicitUsings>enable</ImplicitUsings> | |||||
| </PropertyGroup> | |||||
| <ItemGroup> | |||||
| <PackageReference Include="Swashbuckle.AspNetCore" Version="6.2.3" /> | |||||
| </ItemGroup> | |||||
| <ItemGroup> | |||||
| <ProjectReference Include="..\LLama\LLamaSharp.csproj" /> | |||||
| </ItemGroup> | |||||
| </Project> | |||||
| @@ -0,0 +1,6 @@ | |||||
| namespace LLama.WebAPI.Models; | |||||
| public class SendMessageInput | |||||
| { | |||||
| public string Text { get; set; } | |||||
| } | |||||
| @@ -0,0 +1,27 @@ | |||||
| using LLama.WebAPI.Services; | |||||
| var builder = WebApplication.CreateBuilder(args); | |||||
| // Add services to the container. | |||||
| builder.Services.AddControllers(); | |||||
| // Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle | |||||
| builder.Services.AddEndpointsApiExplorer(); | |||||
| builder.Services.AddSwaggerGen(); | |||||
| builder.Services.AddSingleton<ChatService>(); | |||||
| var app = builder.Build(); | |||||
| // Configure the HTTP request pipeline. | |||||
| if (app.Environment.IsDevelopment()) | |||||
| { | |||||
| app.UseSwagger(); | |||||
| app.UseSwaggerUI(); | |||||
| } | |||||
| app.UseAuthorization(); | |||||
| app.MapControllers(); | |||||
| app.Run(); | |||||
| @@ -0,0 +1,33 @@ | |||||
| using LLama.WebAPI.Models; | |||||
| namespace LLama.WebAPI.Services; | |||||
| public class ChatService | |||||
| { | |||||
| private readonly ChatSession<LLamaModel> _session; | |||||
| public ChatService() | |||||
| { | |||||
| LLamaModel model = new(new LLamaParams(model: @"ggml-model-q4_0.bin", n_ctx: 512, interactive: true, repeat_penalty: 1.0f, verbose_prompt: false)); | |||||
| _session = new ChatSession<LLamaModel>(model) | |||||
| .WithPromptFile(@"Assets\chat-with-bob.txt") | |||||
| .WithAntiprompt(new string[] { "User:" }); | |||||
| } | |||||
| public string Send(SendMessageInput input) | |||||
| { | |||||
| Console.ForegroundColor = ConsoleColor.Green; | |||||
| Console.WriteLine(input.Text); | |||||
| Console.ForegroundColor = ConsoleColor.White; | |||||
| var outputs = _session.Chat(input.Text); | |||||
| var result = ""; | |||||
| foreach (var output in outputs) | |||||
| { | |||||
| Console.Write(output); | |||||
| result += output; | |||||
| } | |||||
| return result; | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,8 @@ | |||||
| { | |||||
| "Logging": { | |||||
| "LogLevel": { | |||||
| "Default": "Information", | |||||
| "Microsoft.AspNetCore": "Warning" | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,9 @@ | |||||
| { | |||||
| "Logging": { | |||||
| "LogLevel": { | |||||
| "Default": "Information", | |||||
| "Microsoft.AspNetCore": "Warning" | |||||
| } | |||||
| }, | |||||
| "AllowedHosts": "*" | |||||
| } | |||||
| @@ -8,7 +8,7 @@ | |||||
| <Platforms>AnyCPU;x64</Platforms> | <Platforms>AnyCPU;x64</Platforms> | ||||
| <AllowUnsafeBlocks>True</AllowUnsafeBlocks> | <AllowUnsafeBlocks>True</AllowUnsafeBlocks> | ||||
| <Version>0.1.1</Version> | |||||
| <Version>0.2.0</Version> | |||||
| <Authors>Yaohui Liu, Haiping Chen</Authors> | <Authors>Yaohui Liu, Haiping Chen</Authors> | ||||
| <Company>SciSharp STACK</Company> | <Company>SciSharp STACK</Company> | ||||
| <GeneratePackageOnBuild>true</GeneratePackageOnBuild> | <GeneratePackageOnBuild>true</GeneratePackageOnBuild> | ||||
| @@ -26,7 +26,7 @@ | |||||
| <PackageLicenseExpression>MIT</PackageLicenseExpression> | <PackageLicenseExpression>MIT</PackageLicenseExpression> | ||||
| <PackageOutputPath>packages</PackageOutputPath> | <PackageOutputPath>packages</PackageOutputPath> | ||||
| <Platforms>AnyCPU;x64</Platforms> | <Platforms>AnyCPU;x64</Platforms> | ||||
| <PackageId>LLama</PackageId> | |||||
| <PackageId>LLamaSharp</PackageId> | |||||
| <Configurations>Debug;Release;GPU</Configurations> | <Configurations>Debug;Release;GPU</Configurations> | ||||
| </PropertyGroup> | </PropertyGroup> | ||||
| @@ -11,6 +11,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LLamaSharp", "LLama\LLamaSh | |||||
| EndProject | EndProject | ||||
| Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "WPFDemo", "WPFDemo\WPFDemo.csproj", "{1E952A70-B720-4F76-9856-EC3B4259A80B}" | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "WPFDemo", "WPFDemo\WPFDemo.csproj", "{1E952A70-B720-4F76-9856-EC3B4259A80B}" | ||||
| EndProject | EndProject | ||||
| Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LLama.WebAPI", "LLama.WebAPI\LLama.WebAPI.csproj", "{D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}" | |||||
| EndProject | |||||
| Global | Global | ||||
| GlobalSection(SolutionConfigurationPlatforms) = preSolution | GlobalSection(SolutionConfigurationPlatforms) = preSolution | ||||
| Debug|Any CPU = Debug|Any CPU | Debug|Any CPU = Debug|Any CPU | ||||
| @@ -69,6 +71,18 @@ Global | |||||
| {1E952A70-B720-4F76-9856-EC3B4259A80B}.Release|Any CPU.Build.0 = Release|Any CPU | {1E952A70-B720-4F76-9856-EC3B4259A80B}.Release|Any CPU.Build.0 = Release|Any CPU | ||||
| {1E952A70-B720-4F76-9856-EC3B4259A80B}.Release|x64.ActiveCfg = Release|Any CPU | {1E952A70-B720-4F76-9856-EC3B4259A80B}.Release|x64.ActiveCfg = Release|Any CPU | ||||
| {1E952A70-B720-4F76-9856-EC3B4259A80B}.Release|x64.Build.0 = Release|Any CPU | {1E952A70-B720-4F76-9856-EC3B4259A80B}.Release|x64.Build.0 = Release|Any CPU | ||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU | |||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.Debug|Any CPU.Build.0 = Debug|Any CPU | |||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.Debug|x64.ActiveCfg = Debug|Any CPU | |||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.Debug|x64.Build.0 = Debug|Any CPU | |||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.GPU|Any CPU.ActiveCfg = Debug|Any CPU | |||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.GPU|Any CPU.Build.0 = Debug|Any CPU | |||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.GPU|x64.ActiveCfg = Debug|Any CPU | |||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.GPU|x64.Build.0 = Debug|Any CPU | |||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.Release|Any CPU.ActiveCfg = Release|Any CPU | |||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.Release|Any CPU.Build.0 = Release|Any CPU | |||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.Release|x64.ActiveCfg = Release|Any CPU | |||||
| {D3CEC57A-9027-4DA4-AAAC-612A1EB50ADF}.Release|x64.Build.0 = Release|Any CPU | |||||
| EndGlobalSection | EndGlobalSection | ||||
| GlobalSection(SolutionProperties) = preSolution | GlobalSection(SolutionProperties) = preSolution | ||||
| HideSolutionNode = FALSE | HideSolutionNode = FALSE | ||||