| @@ -0,0 +1,9 @@ | |||
| namespace LLama.Web.Common | |||
| { | |||
| public enum LLamaExecutorType | |||
| { | |||
| Interactive = 0, | |||
| Instruct = 1, | |||
| Stateless = 2 | |||
| } | |||
| } | |||
| @@ -0,0 +1,41 @@ | |||
| namespace LLama.Web.Common | |||
| { | |||
| public class ServiceResult<T> : ServiceResult, IServiceResult<T> | |||
| { | |||
| public T Value { get; set; } | |||
| } | |||
| public class ServiceResult | |||
| { | |||
| public string Error { get; set; } | |||
| public bool HasError | |||
| { | |||
| get { return !string.IsNullOrEmpty(Error); } | |||
| } | |||
| public static IServiceResult<T> FromValue<T>(T value) | |||
| { | |||
| return new ServiceResult<T> | |||
| { | |||
| Value = value, | |||
| }; | |||
| } | |||
| public static IServiceResult<T> FromError<T>(string error) | |||
| { | |||
| return new ServiceResult<T> | |||
| { | |||
| Error = error, | |||
| }; | |||
| } | |||
| } | |||
| public interface IServiceResult<T> | |||
| { | |||
| T Value { get; set; } | |||
| string Error { get; set; } | |||
| bool HasError { get; } | |||
| } | |||
| } | |||
| @@ -2,60 +2,58 @@ | |||
| using LLama.Web.Models; | |||
| using LLama.Web.Services; | |||
| using Microsoft.AspNetCore.SignalR; | |||
| using Microsoft.Extensions.Options; | |||
| using System.Diagnostics; | |||
| namespace LLama.Web.Hubs | |||
| { | |||
| public class InteractiveHub : Hub<ISessionClient> | |||
| public class SessionConnectionHub : Hub<ISessionClient> | |||
| { | |||
| private readonly LLamaOptions _options; | |||
| private readonly ILogger<InteractiveHub> _logger; | |||
| private readonly IModelSessionService _modelSessionService; | |||
| private readonly ILogger<SessionConnectionHub> _logger; | |||
| private readonly ConnectionSessionService _modelSessionService; | |||
| public InteractiveHub(ILogger<InteractiveHub> logger, IOptions<LLamaOptions> options, IModelSessionService modelSessionService) | |||
| public SessionConnectionHub(ILogger<SessionConnectionHub> logger, ConnectionSessionService modelSessionService) | |||
| { | |||
| _logger = logger; | |||
| _options = options.Value; | |||
| _modelSessionService = modelSessionService; | |||
| } | |||
| public override async Task OnConnectedAsync() | |||
| { | |||
| _logger.Log(LogLevel.Information, "OnConnectedAsync, Id: {0}", Context.ConnectionId); | |||
| await base.OnConnectedAsync(); | |||
| _logger.Log(LogLevel.Information, "[OnConnectedAsync], Id: {0}", Context.ConnectionId); | |||
| // Notify client of successful connection | |||
| await Clients.Caller.OnStatus(Context.ConnectionId, SessionConnectionStatus.Connected); | |||
| await base.OnConnectedAsync(); | |||
| } | |||
| public override async Task OnDisconnectedAsync(Exception? exception) | |||
| { | |||
| _logger.Log(LogLevel.Information, "[OnDisconnectedAsync], Id: {0}", Context.ConnectionId); | |||
| await _modelSessionService.RemoveAsync(Context.ConnectionId); | |||
| // Remove connections session on dissconnect | |||
| await _modelSessionService.RemoveAsync(Context.ConnectionId); | |||
| await base.OnDisconnectedAsync(exception); | |||
| } | |||
| [HubMethodName("LoadModel")] | |||
| public async Task OnLoadModel(string modelName, string promptName, string parameterName) | |||
| public async Task OnLoadModel(LLamaExecutorType executorType, string modelName, string promptName, string parameterName) | |||
| { | |||
| _logger.Log(LogLevel.Information, "[OnLoadModel] - Load new model, Connection: {0}, Model: {1}, Prompt: {2}, Parameter: {3}", Context.ConnectionId, modelName, promptName, parameterName); | |||
| // Remove existing connections session | |||
| await _modelSessionService.RemoveAsync(Context.ConnectionId); | |||
| var modelOption = _options.Models.First(x => x.Name == modelName); | |||
| var promptOption = _options.Prompts.First(x => x.Name == promptName); | |||
| var parameterOption = _options.Parameters.First(x => x.Name == parameterName); | |||
| var interactiveExecutor = new InteractiveExecutor(new LLamaModel(modelOption)); | |||
| var modelSession = await _modelSessionService.CreateAsync(Context.ConnectionId, interactiveExecutor, modelOption, promptOption, parameterOption); | |||
| if (modelSession is null) | |||
| // Create model session | |||
| var modelSessionResult = await _modelSessionService.CreateAsync(executorType, Context.ConnectionId, modelName, promptName, parameterName); | |||
| if (modelSessionResult.HasError) | |||
| { | |||
| _logger.Log(LogLevel.Error, "[OnLoadModel] - Failed to add new model session, Connection: {0}", Context.ConnectionId); | |||
| await Clients.Caller.OnError("No model has been loaded"); | |||
| await Clients.Caller.OnError(modelSessionResult.Error); | |||
| return; | |||
| } | |||
| _logger.Log(LogLevel.Information, "[OnLoadModel] - New model session added, Connection: {0}", Context.ConnectionId); | |||
| // Notify client | |||
| await Clients.Caller.OnStatus(Context.ConnectionId, SessionConnectionStatus.Loaded); | |||
| } | |||
| @@ -63,16 +61,17 @@ namespace LLama.Web.Hubs | |||
| [HubMethodName("SendPrompt")] | |||
| public async Task OnSendPrompt(string prompt) | |||
| { | |||
| var stopwatch = Stopwatch.GetTimestamp(); | |||
| _logger.Log(LogLevel.Information, "[OnSendPrompt] - New prompt received, Connection: {0}", Context.ConnectionId); | |||
| // Get connections session | |||
| var modelSession = await _modelSessionService.GetAsync(Context.ConnectionId); | |||
| if (modelSession is null) | |||
| { | |||
| _logger.Log(LogLevel.Warning, "[OnSendPrompt] - No model has been loaded for this connection, Connection: {0}", Context.ConnectionId); | |||
| await Clients.Caller.OnError("No model has been loaded"); | |||
| return; | |||
| } | |||
| // Create unique response id | |||
| var responseId = Guid.NewGuid().ToString(); | |||
| @@ -80,6 +79,7 @@ namespace LLama.Web.Hubs | |||
| await Clients.Caller.OnResponse(new ResponseFragment(responseId, isFirst: true)); | |||
| // Send content of response | |||
| var stopwatch = Stopwatch.GetTimestamp(); | |||
| await foreach (var fragment in modelSession.InferAsync(prompt, CancellationTokenSource.CreateLinkedTokenSource(Context.ConnectionAborted))) | |||
| { | |||
| await Clients.Caller.OnResponse(new ResponseFragment(responseId, fragment)); | |||
| @@ -93,6 +93,6 @@ namespace LLama.Web.Hubs | |||
| await Clients.Caller.OnResponse(new ResponseFragment(responseId, signature, isLast: true)); | |||
| _logger.Log(LogLevel.Information, "[OnSendPrompt] - Inference complete, Connection: {0}, Elapsed: {1}, Canceled: {2}", Context.ConnectionId, elapsedTime, modelSession.IsInferCanceled()); | |||
| } | |||
| } | |||
| } | |||
| @@ -25,6 +25,11 @@ namespace LLama.Web.Models | |||
| _outputTransform = new LLamaTransforms.KeywordTextOutputStreamTransform(_promptOptions.OutputFilter, redundancyLength: 5); | |||
| } | |||
| public string ModelName | |||
| { | |||
| get { return _modelOptions.Name; } | |||
| } | |||
| public IAsyncEnumerable<string> InferAsync(string message, CancellationTokenSource cancellationTokenSource) | |||
| { | |||
| _cancellationTokenSource = cancellationTokenSource; | |||
| @@ -0,0 +1,96 @@ | |||
| @page | |||
| @model InstructModel | |||
| @{ | |||
| } | |||
| @Html.AntiForgeryToken() | |||
| <div class="d-flex flex-row h-100 pt-1 pb-1"> | |||
| <div class="d-flex flex-column h-100 border me-1 w-25 overflow-auto"> | |||
| <div class="d-flex flex-row justify-content-between border-bottom p-1 align-items-center"> | |||
| <h4>Instruct</h4> | |||
| <div> | |||
| <span>Hub: <b id="socket">Disconnected</b></span> | |||
| </div> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Model</small> | |||
| <select id="Model" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var modelOption in Model.Options.Models) | |||
| { | |||
| <option value="@modelOption.Name">@modelOption.Name</option> | |||
| } | |||
| </select> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Parameters</small> | |||
| <select id="Parameter" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var parameterOption in Model.Options.Parameters) | |||
| { | |||
| <option value="@parameterOption.Name">@parameterOption.Name</option> | |||
| } | |||
| </select> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Prompt</small> | |||
| <select id="Prompt" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var promptOption in Model.Options.Prompts) | |||
| { | |||
| <option value="@promptOption.Name" data-prompt="@promptOption.Prompt">@promptOption.Name</option> | |||
| } | |||
| </select> | |||
| <textarea id="PromptText" class="form-control mt-1" rows="12" disabled="disabled" style="font-size:13px;resize:none"></textarea> | |||
| </div> | |||
| <div class="d-flex flex-grow-1"></div> | |||
| <div id="session-details" class="m-1"></div> | |||
| <div class="m-1"> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="load">Create Session</button> | |||
| </div> | |||
| </div> | |||
| <div class="d-flex flex-column h-100 w-75"> | |||
| <div class="section-head"> | |||
| </div> | |||
| <div id="scroll-container" class="section-content border"> | |||
| <div id="output-container" class="d-flex flex-column gap-1 p-1"> | |||
| </div> | |||
| </div> | |||
| <div class="section-foot"> | |||
| <div class="input-group mt-2"> | |||
| <textarea id="input" type="text" class="form-control" value="what is a tree?" style="resize:none" rows="4">What is an apple?</textarea> | |||
| <div class="d-flex flex-column"> | |||
| <div class="d-flex flex-fill"> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="send" disabled="disabled" autocomplete="off">Send Message</button> | |||
| </div> | |||
| <div class="d-flex"> | |||
| <button class="btn btn-outline-secondary w-100" type="button" id="cancel" autocomplete="off"> | |||
| <i class="bi-x-circle"></i> | |||
| </button> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="clear" disabled="disabled" autocomplete="off"> | |||
| <i class="bi-trash3"></i> | |||
| </button> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| @{ await Html.RenderPartialAsync("_ChatTemplates"); } | |||
| @section Scripts { | |||
| <script src="~/js/sessionconnectionchat.js"></script> | |||
| <script> | |||
| createConnectionSessionChat(Enums.LLamaExecutorType.Instruct); | |||
| </script> | |||
| } | |||
| @@ -0,0 +1,34 @@ | |||
| using LLama.Web.Common; | |||
| using LLama.Web.Models; | |||
| using LLama.Web.Services; | |||
| using Microsoft.AspNetCore.Mvc; | |||
| using Microsoft.AspNetCore.Mvc.RazorPages; | |||
| using Microsoft.Extensions.Options; | |||
| namespace LLama.Web.Pages | |||
| { | |||
| public class InstructModel : PageModel | |||
| { | |||
| private readonly ILogger<InstructModel> _logger; | |||
| private readonly ConnectionSessionService _modelSessionService; | |||
| public InstructModel(ILogger<InstructModel> logger, IOptions<LLamaOptions> options, ConnectionSessionService modelSessionService) | |||
| { | |||
| _logger = logger; | |||
| Options = options.Value; | |||
| _modelSessionService = modelSessionService; | |||
| } | |||
| public LLamaOptions Options { get; set; } | |||
| public void OnGet() | |||
| { | |||
| } | |||
| public async Task<IActionResult> OnPostCancel(CancelModel model) | |||
| { | |||
| await _modelSessionService.CancelAsync(model.ConnectionId); | |||
| return new JsonResult(default); | |||
| } | |||
| } | |||
| } | |||
| @@ -0,0 +1,96 @@ | |||
| @page | |||
| @model InteractiveModel | |||
| @{ | |||
| } | |||
| @Html.AntiForgeryToken() | |||
| <div class="d-flex flex-row h-100 pt-1 pb-1"> | |||
| <div class="d-flex flex-column h-100 border me-1 w-25 overflow-auto"> | |||
| <div class="d-flex flex-row justify-content-between border-bottom p-1 align-items-center"> | |||
| <h4>Interactive</h4> | |||
| <div> | |||
| <span>Hub: <b id="socket">Disconnected</b></span> | |||
| </div> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Model</small> | |||
| <select id="Model" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var modelOption in Model.Options.Models) | |||
| { | |||
| <option value="@modelOption.Name">@modelOption.Name</option> | |||
| } | |||
| </select> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Parameters</small> | |||
| <select id="Parameter" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var parameterOption in Model.Options.Parameters) | |||
| { | |||
| <option value="@parameterOption.Name">@parameterOption.Name</option> | |||
| } | |||
| </select> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Prompt</small> | |||
| <select id="Prompt" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var promptOption in Model.Options.Prompts) | |||
| { | |||
| <option value="@promptOption.Name" data-prompt="@promptOption.Prompt">@promptOption.Name</option> | |||
| } | |||
| </select> | |||
| <textarea id="PromptText" class="form-control mt-1" rows="12" disabled="disabled" style="font-size:13px;resize:none"></textarea> | |||
| </div> | |||
| <div class="d-flex flex-grow-1"></div> | |||
| <div id="session-details" class="m-1"></div> | |||
| <div class="m-1"> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="load">Create Session</button> | |||
| </div> | |||
| </div> | |||
| <div class="d-flex flex-column h-100 w-75"> | |||
| <div class="section-head"> | |||
| </div> | |||
| <div id="scroll-container" class="section-content border"> | |||
| <div id="output-container" class="d-flex flex-column gap-1 p-1"> | |||
| </div> | |||
| </div> | |||
| <div class="section-foot"> | |||
| <div class="input-group mt-2"> | |||
| <textarea id="input" type="text" class="form-control" value="what is a tree?" style="resize:none" rows="4">What is an apple?</textarea> | |||
| <div class="d-flex flex-column"> | |||
| <div class="d-flex flex-fill"> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="send" disabled="disabled" autocomplete="off">Send Message</button> | |||
| </div> | |||
| <div class="d-flex"> | |||
| <button class="btn btn-outline-secondary w-100" type="button" id="cancel" autocomplete="off"> | |||
| <i class="bi-x-circle"></i> | |||
| </button> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="clear" disabled="disabled" autocomplete="off"> | |||
| <i class="bi-trash3"></i> | |||
| </button> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| @{ await Html.RenderPartialAsync("_ChatTemplates");} | |||
| @section Scripts { | |||
| <script src="~/js/sessionconnectionchat.js"></script> | |||
| <script> | |||
| createConnectionSessionChat(Enums.LLamaExecutorType.Interactive); | |||
| </script> | |||
| } | |||
| @@ -10,9 +10,9 @@ namespace LLama.Web.Pages | |||
| public class InteractiveModel : PageModel | |||
| { | |||
| private readonly ILogger<InteractiveModel> _logger; | |||
| private readonly IModelSessionService _modelSessionService; | |||
| private readonly ConnectionSessionService _modelSessionService; | |||
| public InteractiveModel(ILogger<InteractiveModel> logger, IOptions<LLamaOptions> options, IModelSessionService modelSessionService) | |||
| public InteractiveModel(ILogger<InteractiveModel> logger, IOptions<LLamaOptions> options, ConnectionSessionService modelSessionService) | |||
| { | |||
| _logger = logger; | |||
| Options = options.Value; | |||
| @@ -0,0 +1,4 @@ | |||
| .section-content { | |||
| flex: 1; | |||
| overflow-y: scroll; | |||
| } | |||
| @@ -0,0 +1,97 @@ | |||
| @page | |||
| @model StatelessModel | |||
| @{ | |||
| } | |||
| @Html.AntiForgeryToken() | |||
| <div class="d-flex flex-row h-100 pt-1 pb-1"> | |||
| <div class="d-flex flex-column h-100 border me-1 w-25 overflow-auto"> | |||
| <div class="d-flex flex-row justify-content-between border-bottom p-1 align-items-center"> | |||
| <h4>Stateless</h4> | |||
| <div> | |||
| <span>Hub: <b id="socket">Disconnected</b></span> | |||
| </div> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Model</small> | |||
| <select id="Model" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var modelOption in Model.Options.Models) | |||
| { | |||
| <option value="@modelOption.Name">@modelOption.Name</option> | |||
| } | |||
| </select> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Parameters</small> | |||
| <select id="Parameter" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var parameterOption in Model.Options.Parameters) | |||
| { | |||
| <option value="@parameterOption.Name">@parameterOption.Name</option> | |||
| } | |||
| </select> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Prompt</small> | |||
| <select id="Prompt" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var promptOption in Model.Options.Prompts) | |||
| { | |||
| <option value="@promptOption.Name" data-prompt="@promptOption.Prompt">@promptOption.Name</option> | |||
| } | |||
| </select> | |||
| <textarea id="PromptText" class="form-control mt-1" rows="12" disabled="disabled" style="font-size:13px;resize:none"></textarea> | |||
| </div> | |||
| <div class="d-flex flex-grow-1"></div> | |||
| <div id="session-details" class="m-1"></div> | |||
| <div class="m-1"> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="load">Create Session</button> | |||
| </div> | |||
| </div> | |||
| <div class="d-flex flex-column h-100 w-75"> | |||
| <div class="section-head"> | |||
| </div> | |||
| <div id="scroll-container" class="section-content border"> | |||
| <div id="output-container" class="d-flex flex-column gap-1 p-1"> | |||
| </div> | |||
| </div> | |||
| <div class="section-foot"> | |||
| <div class="input-group mt-2"> | |||
| <textarea id="input" type="text" class="form-control" value="what is a tree?" style="resize:none" rows="4">What is an apple?</textarea> | |||
| <div class="d-flex flex-column"> | |||
| <div class="d-flex flex-fill"> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="send" disabled="disabled" autocomplete="off">Send Message</button> | |||
| </div> | |||
| <div class="d-flex"> | |||
| <button class="btn btn-outline-secondary w-100" type="button" id="cancel" autocomplete="off"> | |||
| <i class="bi-x-circle"></i> | |||
| </button> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="clear" disabled="disabled" autocomplete="off"> | |||
| <i class="bi-trash3"></i> | |||
| </button> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| @{ await Html.RenderPartialAsync("_ChatTemplates"); } | |||
| @section Scripts { | |||
| <script src="~/js/sessionconnectionchat.js"></script> | |||
| <script> | |||
| createConnectionSessionChat(Enums.LLamaExecutorType.Stateless); | |||
| </script> | |||
| } | |||
| @@ -0,0 +1,34 @@ | |||
| using LLama.Web.Common; | |||
| using LLama.Web.Models; | |||
| using LLama.Web.Services; | |||
| using Microsoft.AspNetCore.Mvc; | |||
| using Microsoft.AspNetCore.Mvc.RazorPages; | |||
| using Microsoft.Extensions.Options; | |||
| namespace LLama.Web.Pages | |||
| { | |||
| public class StatelessModel : PageModel | |||
| { | |||
| private readonly ILogger<StatelessModel> _logger; | |||
| private readonly ConnectionSessionService _modelSessionService; | |||
| public StatelessModel(ILogger<StatelessModel> logger, IOptions<LLamaOptions> options, ConnectionSessionService modelSessionService) | |||
| { | |||
| _logger = logger; | |||
| Options = options.Value; | |||
| _modelSessionService = modelSessionService; | |||
| } | |||
| public LLamaOptions Options { get; set; } | |||
| public void OnGet() | |||
| { | |||
| } | |||
| public async Task<IActionResult> OnPostCancel(CancelModel model) | |||
| { | |||
| await _modelSessionService.CancelAsync(model.ConnectionId); | |||
| return new JsonResult(default); | |||
| } | |||
| } | |||
| } | |||
| @@ -0,0 +1,4 @@ | |||
| .section-content { | |||
| flex: 1; | |||
| overflow-y: scroll; | |||
| } | |||
| @@ -1,338 +0,0 @@ | |||
| @page | |||
| @model InteractiveModel | |||
| @{ | |||
| } | |||
| @Html.AntiForgeryToken() | |||
| <div class="d-flex flex-row h-100 pt-1 pb-1"> | |||
| <div class="d-flex flex-column h-100 border me-1 w-25 overflow-auto"> | |||
| <div class="d-flex flex-row justify-content-between border-bottom p-1 align-items-center"> | |||
| <h4>Interactive</h4> | |||
| <div> | |||
| <span>Hub: <b id="socket">Disconnected</b></span> | |||
| </div> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Model</small> | |||
| <select id="Model" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var modelOption in Model.Options.Models) | |||
| { | |||
| <option value="@modelOption.Name">@modelOption.Name</option> | |||
| } | |||
| </select> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Parameters</small> | |||
| <select id="Parameter" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var parameterOption in Model.Options.Parameters) | |||
| { | |||
| <option value="@parameterOption.Name">@parameterOption.Name</option> | |||
| } | |||
| </select> | |||
| </div> | |||
| <div class="m-1"> | |||
| <small>Prompt</small> | |||
| <select id="Prompt" class="form-control form-select input-control" required="required" autocomplete="off"> | |||
| <option value="" disabled selected hidden>Please Select</option> | |||
| @foreach (var promptOption in Model.Options.Prompts) | |||
| { | |||
| <option value="@promptOption.Name" data-prompt="@promptOption.Prompt">@promptOption.Name</option> | |||
| } | |||
| </select> | |||
| <textarea id="PromptText" class="form-control mt-1" rows="12" disabled="disabled" style="font-size:13px;resize:none"></textarea> | |||
| </div> | |||
| <div class="d-flex flex-grow-1"></div> | |||
| <div id="session-details" class="m-1"> | |||
| </div> | |||
| <div class="m-1"> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="load">Create Session</button> | |||
| </div> | |||
| </div> | |||
| <div class="d-flex flex-column h-100 w-75"> | |||
| <div class="section-head"> | |||
| </div> | |||
| <div id="scroll-container" class="section-content border"> | |||
| <div id="output-container" class="d-flex flex-column gap-1 p-1"> | |||
| </div> | |||
| </div> | |||
| <div class="section-foot"> | |||
| <div class="input-group mt-2"> | |||
| <textarea id="input" type="text" class="form-control" value="what is a tree?" style="resize:none" rows="4">What is an apple?</textarea> | |||
| <div class="d-flex flex-column"> | |||
| <div class="d-flex flex-fill"> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="send" disabled="disabled" autocomplete="off">Send Message</button> | |||
| </div> | |||
| <div class="d-flex"> | |||
| <button class="btn btn-outline-secondary w-100" type="button" id="cancel" autocomplete="off"> | |||
| <i class="bi-x-circle"></i> | |||
| </button> | |||
| <button class="btn btn-outline-secondary input-control w-100" type="button" id="clear" disabled="disabled" autocomplete="off"> | |||
| <i class="bi-trash3"></i> | |||
| </button> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| <script id="outputErrorTemplate" type="text/html"> | |||
| <i class="form-control text-danger">{{text}}</i> | |||
| </script> | |||
| <script id="outputInfoTemplate" type="text/html"> | |||
| <i class="form-control text-success">{{text}}</i> | |||
| </script> | |||
| <script id="outputUserTemplate" type="text/html"> | |||
| <div class="d-flex flex-row form-control bg-light"> | |||
| <div class="m-2 me-4"> | |||
| <img src="~/image/human.png" width="60"/> | |||
| </div> | |||
| <div class="d-flex flex-column flex-fill justify-content-between"> | |||
| <span class="w-100" style="resize:none" >{{text}}</span> | |||
| <div class="d-flex justify-content-end"> | |||
| <i>{{date}}</i> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </script> | |||
| <script id="outputBotTemplate" type="text/html"> | |||
| <div class="d-flex flex-row form-control"> | |||
| <div class="m-2 me-4"> | |||
| <img src="~/image/robot.png" width="60"/> | |||
| </div> | |||
| <div id="{{id}}" class="d-flex flex-column flex-fill justify-content-between"> | |||
| <span class="content"> | |||
| <img src="~/image/loading.gif" width="30" /> | |||
| </span> | |||
| <div class="d-flex justify-content-end"> | |||
| <div class="d-flex flex-column align-items-end"> | |||
| <i class="date"></i> | |||
| <i> | |||
| <small class="signature"></small> | |||
| </i> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </script> | |||
| <script id="sessionDetailsTemplate" type="text/html"> | |||
| <div> | |||
| <small>Session Details </small> | |||
| </div> | |||
| <div> | |||
| <i>Model: </i> | |||
| <span>{{model}}</span> | |||
| </div> | |||
| <div> | |||
| <i>Prompt: </i> | |||
| <span>{{prompt}}</span> | |||
| </div> | |||
| <div> | |||
| <i>Parameters: </i> | |||
| <span>{{parameter}}</span> | |||
| </div> | |||
| </script> | |||
| @section Scripts { | |||
| <script> | |||
| const outputErrorTemplate = $("#outputErrorTemplate").html(); | |||
| const outputInfoTemplate = $("#outputInfoTemplate").html(); | |||
| const outputUserTemplate = $("#outputUserTemplate").html(); | |||
| const outputBotTemplate = $("#outputBotTemplate").html(); | |||
| const sessionDetailsTemplate = $("#sessionDetailsTemplate").html(); | |||
| let connectionId; | |||
| const connection = new signalR.HubConnectionBuilder().withUrl("/InteractiveHub").build(); | |||
| const scrollContainer = $("#scroll-container"); | |||
| const outputContainer = $("#output-container"); | |||
| const chatInput = $("#input"); | |||
| const onStatus = (connection, status) => { | |||
| connectionId = connection; | |||
| if (status == Enums.SessionConnectionStatus.Connected) { | |||
| $("#socket").text("Connected").addClass("text-success"); | |||
| } | |||
| else if (status == Enums.SessionConnectionStatus.Loaded) { | |||
| enableControls(); | |||
| $("#session-details").html(Mustache.render(sessionDetailsTemplate, { model: getSelectedModel(), prompt: getSelectedPrompt(), parameter: getSelectedParameter() })); | |||
| onInfo(`New model session successfully started`) | |||
| } | |||
| } | |||
| const onError = (error) => { | |||
| enableControls(); | |||
| outputContainer.append(Mustache.render(outputErrorTemplate, { text: error, date: getDateTime() })); | |||
| } | |||
| const onInfo = (message) => { | |||
| outputContainer.append(Mustache.render(outputInfoTemplate, { text: message, date: getDateTime() })); | |||
| } | |||
| let responseContent; | |||
| let responseContainer; | |||
| let responseFirstFragment; | |||
| const onResponse = (response) => { | |||
| if (!response) | |||
| return; | |||
| if (response.isFirst) { | |||
| outputContainer.append(Mustache.render(outputBotTemplate, response)); | |||
| responseContainer = $(`#${response.id}`); | |||
| responseContent = responseContainer.find(".content"); | |||
| responseFirstFragment = true; | |||
| scrollToBottom(true); | |||
| return; | |||
| } | |||
| if (response.isLast) { | |||
| enableControls(); | |||
| responseContainer.find(".signature").append(response.content); | |||
| scrollToBottom(); | |||
| } | |||
| else { | |||
| if (responseFirstFragment) { | |||
| responseContent.empty(); | |||
| responseFirstFragment = false; | |||
| responseContainer.find(".date").append(getDateTime()); | |||
| } | |||
| responseContent.append(response.content); | |||
| scrollToBottom(); | |||
| } | |||
| } | |||
| const sendPrompt = async () => { | |||
| const text = chatInput.val(); | |||
| if (text) { | |||
| disableControls(); | |||
| outputContainer.append(Mustache.render(outputUserTemplate, { text: text, date: getDateTime() })); | |||
| await connection.invoke('SendPrompt', text); | |||
| chatInput.val(null); | |||
| scrollToBottom(true); | |||
| } | |||
| } | |||
| const cancelPrompt = async () => { | |||
| await ajaxPostJsonAsync('?handler=Cancel', { connectionId: connectionId }); | |||
| } | |||
| const loadModel = async () => { | |||
| const modelName = getSelectedModel(); | |||
| const promptName = getSelectedPrompt(); | |||
| const parameterName = getSelectedParameter(); | |||
| if (!modelName || !promptName || !parameterName) { | |||
| onError("Please select a valid Model, Parameter and Prompt"); | |||
| return; | |||
| } | |||
| disableControls(); | |||
| await connection.invoke('LoadModel', modelName, promptName, parameterName); | |||
| } | |||
| const enableControls = () => { | |||
| $(".input-control").removeAttr("disabled"); | |||
| } | |||
| const disableControls = () => { | |||
| $(".input-control").attr("disabled", "disabled"); | |||
| } | |||
| const clearOutput = () => { | |||
| outputContainer.empty(); | |||
| } | |||
| const updatePrompt = () => { | |||
| const customPrompt = $("#PromptText"); | |||
| const selection = $("option:selected", "#Prompt"); | |||
| const selectedValue = selection.data("prompt"); | |||
| customPrompt.text(selectedValue); | |||
| } | |||
| const getSelectedModel = () => { | |||
| return $("option:selected", "#Model").val(); | |||
| } | |||
| const getSelectedParameter = () => { | |||
| return $("option:selected", "#Parameter").val(); | |||
| } | |||
| const getSelectedPrompt = () => { | |||
| return $("option:selected", "#Prompt").val(); | |||
| } | |||
| const getDateTime = () => { | |||
| const dateTime = new Date(); | |||
| return dateTime.toLocaleString(); | |||
| } | |||
| const scrollToBottom = (force) => { | |||
| const scrollTop = scrollContainer.scrollTop(); | |||
| const scrollHeight = scrollContainer[0].scrollHeight; | |||
| if(force){ | |||
| scrollContainer.scrollTop(scrollContainer[0].scrollHeight); | |||
| return; | |||
| } | |||
| if (scrollTop + 70 >= scrollHeight - scrollContainer.innerHeight()) { | |||
| scrollContainer.scrollTop(scrollContainer[0].scrollHeight) | |||
| } | |||
| } | |||
| // Map UI functions | |||
| $("#load").on("click", loadModel); | |||
| $("#send").on("click", sendPrompt); | |||
| $("#clear").on("click", clearOutput); | |||
| $("#cancel").on("click", cancelPrompt); | |||
| $("#Prompt").on("change", updatePrompt); | |||
| chatInput.on('keydown', function (event) { | |||
| if (event.key === 'Enter' && !event.shiftKey) { | |||
| event.preventDefault(); | |||
| sendPrompt(); | |||
| } | |||
| }); | |||
| // Map signalr functions | |||
| connection.on("OnStatus", onStatus); | |||
| connection.on("OnError", onError); | |||
| connection.on("OnResponse", onResponse); | |||
| connection.start(); | |||
| </script> | |||
| } | |||
| @@ -0,0 +1,60 @@ | |||
| <script id="outputErrorTemplate" type="text/html"> | |||
| <i class="form-control text-danger">{{text}}</i> | |||
| </script> | |||
| <script id="outputInfoTemplate" type="text/html"> | |||
| <i class="form-control text-success">{{text}}</i> | |||
| </script> | |||
| <script id="outputUserTemplate" type="text/html"> | |||
| <div class="d-flex flex-row form-control bg-light"> | |||
| <div class="m-2 me-4"> | |||
| <img src="~/image/human.png" width="60"/> | |||
| </div> | |||
| <div class="d-flex flex-column flex-fill justify-content-between"> | |||
| <span class="w-100" style="resize:none" >{{text}}</span> | |||
| <div class="d-flex justify-content-end"> | |||
| <i>{{date}}</i> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </script> | |||
| <script id="outputBotTemplate" type="text/html"> | |||
| <div class="d-flex flex-row form-control"> | |||
| <div class="m-2 me-4"> | |||
| <img src="~/image/robot.png" width="60"/> | |||
| </div> | |||
| <div id="{{id}}" class="d-flex flex-column flex-fill justify-content-between"> | |||
| <span class="content"> | |||
| <img src="~/image/loading.gif" width="30" /> | |||
| </span> | |||
| <div class="d-flex justify-content-end"> | |||
| <div class="d-flex flex-column align-items-end"> | |||
| <i class="date"></i> | |||
| <i> | |||
| <small class="signature"></small> | |||
| </i> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </script> | |||
| <script id="sessionDetailsTemplate" type="text/html"> | |||
| <div> | |||
| <small>Session Details </small> | |||
| </div> | |||
| <div> | |||
| <i>Model: </i> | |||
| <span>{{model}}</span> | |||
| </div> | |||
| <div> | |||
| <i>Prompt: </i> | |||
| <span>{{prompt}}</span> | |||
| </div> | |||
| <div> | |||
| <i>Parameters: </i> | |||
| <span>{{parameter}}</span> | |||
| </div> | |||
| </script> | |||
| @@ -24,7 +24,13 @@ | |||
| <a class="nav-link text-dark" asp-area="" asp-page="/Index">Home</a> | |||
| </li> | |||
| <li class="nav-item"> | |||
| <a class="nav-link text-dark" asp-area="" asp-page="/Interactive">Interactive</a> | |||
| <a class="nav-link text-dark" asp-area="" asp-page="/Executor/Interactive">Interactive</a> | |||
| </li> | |||
| <li class="nav-item"> | |||
| <a class="nav-link text-dark" asp-area="" asp-page="/Executor/Instruct">Instruct</a> | |||
| </li> | |||
| <li class="nav-item"> | |||
| <a class="nav-link text-dark" asp-area="" asp-page="/Executor/Stateless">Stateless</a> | |||
| </li> | |||
| </ul> | |||
| </div> | |||
| @@ -20,7 +20,7 @@ namespace LLama.Web | |||
| .BindConfiguration(nameof(LLamaOptions)); | |||
| // Services DI | |||
| builder.Services.AddSingleton<IModelSessionService, ModelSessionService>(); | |||
| builder.Services.AddSingleton<ConnectionSessionService>(); | |||
| var app = builder.Build(); | |||
| @@ -41,7 +41,7 @@ namespace LLama.Web | |||
| app.MapRazorPages(); | |||
| app.MapHub<InteractiveHub>(nameof(InteractiveHub)); | |||
| app.MapHub<SessionConnectionHub>(nameof(SessionConnectionHub)); | |||
| app.Run(); | |||
| } | |||
| @@ -0,0 +1,94 @@ | |||
| using LLama.Abstractions; | |||
| using LLama.Web.Common; | |||
| using LLama.Web.Models; | |||
| using Microsoft.Extensions.Options; | |||
| using System.Collections.Concurrent; | |||
| using System.Drawing; | |||
| namespace LLama.Web.Services | |||
| { | |||
| /// <summary> | |||
| /// Example Service for handling a model session for a websockets connection lifetime | |||
| /// Each websocket connection will create its own unique session and context allowing you to use multiple tabs to compare prompts etc | |||
| /// </summary> | |||
| public class ConnectionSessionService : IModelSessionService | |||
| { | |||
| private readonly LLamaOptions _options; | |||
| private readonly ILogger<ConnectionSessionService> _logger; | |||
| private readonly ConcurrentDictionary<string, ModelSession> _modelSessions; | |||
| public ConnectionSessionService(ILogger<ConnectionSessionService> logger, IOptions<LLamaOptions> options) | |||
| { | |||
| _logger = logger; | |||
| _options = options.Value; | |||
| _modelSessions = new ConcurrentDictionary<string, ModelSession>(); | |||
| } | |||
| public Task<ModelSession> GetAsync(string connectionId) | |||
| { | |||
| _modelSessions.TryGetValue(connectionId, out var modelSession); | |||
| return Task.FromResult(modelSession); | |||
| } | |||
| public Task<IServiceResult<ModelSession>> CreateAsync(LLamaExecutorType executorType, string connectionId, string modelName, string promptName, string parameterName) | |||
| { | |||
| var modelOption = _options.Models.FirstOrDefault(x => x.Name == modelName); | |||
| if (modelOption is null) | |||
| return Task.FromResult(ServiceResult.FromError<ModelSession>($"Model option '{modelName}' not found")); | |||
| var promptOption = _options.Prompts.FirstOrDefault(x => x.Name == promptName); | |||
| if (promptOption is null) | |||
| return Task.FromResult(ServiceResult.FromError<ModelSession>($"Prompt option '{promptName}' not found")); | |||
| var parameterOption = _options.Parameters.FirstOrDefault(x => x.Name == parameterName); | |||
| if (parameterOption is null) | |||
| return Task.FromResult(ServiceResult.FromError<ModelSession>($"Parameter option '{parameterName}' not found")); | |||
| //Max instance | |||
| var currentInstances = _modelSessions.Count(x => x.Value.ModelName == modelOption.Name); | |||
| if (modelOption.MaxInstances > -1 && currentInstances >= modelOption.MaxInstances) | |||
| return Task.FromResult(ServiceResult.FromError<ModelSession>("Maximum model instances reached")); | |||
| // Create model | |||
| var llamaModel = new LLamaModel(modelOption); | |||
| // Create executor | |||
| ILLamaExecutor executor = executorType switch | |||
| { | |||
| LLamaExecutorType.Interactive => new InteractiveExecutor(llamaModel), | |||
| LLamaExecutorType.Instruct => new InstructExecutor(llamaModel), | |||
| LLamaExecutorType.Stateless => new StatelessExecutor(llamaModel), | |||
| _ => default | |||
| }; | |||
| // Create session | |||
| var modelSession = new ModelSession(executor, modelOption, promptOption, parameterOption); | |||
| if (!_modelSessions.TryAdd(connectionId, modelSession)) | |||
| return Task.FromResult(ServiceResult.FromError<ModelSession>("Failed to create model session")); | |||
| return Task.FromResult(ServiceResult.FromValue(modelSession)); | |||
| } | |||
| public Task<bool> RemoveAsync(string connectionId) | |||
| { | |||
| if (_modelSessions.TryRemove(connectionId, out var modelSession)) | |||
| { | |||
| modelSession.CancelInfer(); | |||
| modelSession.Dispose(); | |||
| return Task.FromResult(true); | |||
| } | |||
| return Task.FromResult(false); | |||
| } | |||
| public Task<bool> CancelAsync(string connectionId) | |||
| { | |||
| if (_modelSessions.TryGetValue(connectionId, out var modelSession)) | |||
| { | |||
| modelSession.CancelInfer(); | |||
| return Task.FromResult(true); | |||
| } | |||
| return Task.FromResult(false); | |||
| } | |||
| } | |||
| } | |||
| @@ -6,10 +6,10 @@ namespace LLama.Web.Services | |||
| { | |||
| public interface IModelSessionService | |||
| { | |||
| Task<ModelSession> GetAsync(string connectionId); | |||
| Task<ModelSession> CreateAsync(string connectionId, ILLamaExecutor executor, ModelOptions modelOption, PromptOptions promptOption, ParameterOptions parameterOption); | |||
| Task RemoveAsync(string connectionId); | |||
| Task CancelAsync(string connectionId); | |||
| Task<ModelSession> GetAsync(string sessionId); | |||
| Task<IServiceResult<ModelSession>> CreateAsync(LLamaExecutorType executorType, string sessionId, string modelName, string promptName, string parameterName); | |||
| Task<bool> RemoveAsync(string sessionId); | |||
| Task<bool> CancelAsync(string sessionId); | |||
| } | |||
| @@ -1,58 +0,0 @@ | |||
| using LLama.Abstractions; | |||
| using LLama.Web.Common; | |||
| using LLama.Web.Models; | |||
| using System.Collections.Concurrent; | |||
| namespace LLama.Web.Services | |||
| { | |||
| public class ModelSessionService : IModelSessionService | |||
| { | |||
| private readonly ILogger<ModelSessionService> _logger; | |||
| private readonly ConcurrentDictionary<string, ModelSession> _modelSessions; | |||
| public ModelSessionService(ILogger<ModelSessionService> logger) | |||
| { | |||
| _logger = logger; | |||
| _modelSessions = new ConcurrentDictionary<string, ModelSession>(); | |||
| } | |||
| public Task<ModelSession> GetAsync(string connectionId) | |||
| { | |||
| _modelSessions.TryGetValue(connectionId, out var modelSession); | |||
| return Task.FromResult(modelSession); | |||
| } | |||
| public Task<ModelSession> CreateAsync(string connectionId, ILLamaExecutor executor, ModelOptions modelOption, PromptOptions promptOption, ParameterOptions parameterOption) | |||
| { | |||
| //TODO: Max instance etc | |||
| var modelSession = new ModelSession(executor, modelOption, promptOption, parameterOption); | |||
| if (!_modelSessions.TryAdd(connectionId, modelSession)) | |||
| { | |||
| _logger.Log(LogLevel.Error, "[CreateAsync] - Failed to create model session, Connection: {0}", connectionId); | |||
| return Task.FromResult<ModelSession>(default); | |||
| } | |||
| return Task.FromResult(modelSession); | |||
| } | |||
| public Task RemoveAsync(string connectionId) | |||
| { | |||
| if (_modelSessions.TryRemove(connectionId, out var modelSession)) | |||
| { | |||
| _logger.Log(LogLevel.Information, "[RemoveAsync] - Removed model session, Connection: {0}", connectionId); | |||
| modelSession.Dispose(); | |||
| } | |||
| return Task.CompletedTask; | |||
| } | |||
| public Task CancelAsync(string connectionId) | |||
| { | |||
| if (_modelSessions.TryGetValue(connectionId, out var modelSession)) | |||
| { | |||
| _logger.Log(LogLevel.Information, "[CancelAsync] - Canceled model session, Connection: {0}", connectionId); | |||
| modelSession.CancelInfer(); | |||
| } | |||
| return Task.CompletedTask; | |||
| } | |||
| } | |||
| } | |||
| @@ -10,9 +10,9 @@ | |||
| "Models": [ | |||
| { | |||
| "Name": "WizardLM-7B", | |||
| "MaxInstances": 2, | |||
| "ModelPath": "D:\\Repositories\\AI\\Models\\wizardLM-7B.ggmlv3.q4_0.bin", | |||
| "ContextSize": 2048, | |||
| "MaxInstances": 4 | |||
| "ContextSize": 2048 | |||
| } | |||
| ], | |||
| "Parameters": [ | |||
| @@ -22,6 +22,10 @@ | |||
| } | |||
| ], | |||
| "Prompts": [ | |||
| { | |||
| "Name": "None", | |||
| "Prompt": "" | |||
| }, | |||
| { | |||
| "Name": "Alpaca", | |||
| "Path": "D:\\Repositories\\AI\\Prompts\\alpaca.txt", | |||
| @@ -0,0 +1,176 @@ | |||
| const createConnectionSessionChat = (LLamaExecutorType) => { | |||
| const outputErrorTemplate = $("#outputErrorTemplate").html(); | |||
| const outputInfoTemplate = $("#outputInfoTemplate").html(); | |||
| const outputUserTemplate = $("#outputUserTemplate").html(); | |||
| const outputBotTemplate = $("#outputBotTemplate").html(); | |||
| const sessionDetailsTemplate = $("#sessionDetailsTemplate").html(); | |||
| let connectionId; | |||
| const connection = new signalR.HubConnectionBuilder().withUrl("/SessionConnectionHub").build(); | |||
| const scrollContainer = $("#scroll-container"); | |||
| const outputContainer = $("#output-container"); | |||
| const chatInput = $("#input"); | |||
| const onStatus = (connection, status) => { | |||
| connectionId = connection; | |||
| if (status == Enums.SessionConnectionStatus.Connected) { | |||
| $("#socket").text("Connected").addClass("text-success"); | |||
| } | |||
| else if (status == Enums.SessionConnectionStatus.Loaded) { | |||
| enableControls(); | |||
| $("#session-details").html(Mustache.render(sessionDetailsTemplate, { model: getSelectedModel(), prompt: getSelectedPrompt(), parameter: getSelectedParameter() })); | |||
| onInfo(`New model session successfully started`) | |||
| } | |||
| } | |||
| const onError = (error) => { | |||
| enableControls(); | |||
| outputContainer.append(Mustache.render(outputErrorTemplate, { text: error, date: getDateTime() })); | |||
| } | |||
| const onInfo = (message) => { | |||
| outputContainer.append(Mustache.render(outputInfoTemplate, { text: message, date: getDateTime() })); | |||
| } | |||
| let responseContent; | |||
| let responseContainer; | |||
| let responseFirstFragment; | |||
| const onResponse = (response) => { | |||
| if (!response) | |||
| return; | |||
| if (response.isFirst) { | |||
| outputContainer.append(Mustache.render(outputBotTemplate, response)); | |||
| responseContainer = $(`#${response.id}`); | |||
| responseContent = responseContainer.find(".content"); | |||
| responseFirstFragment = true; | |||
| scrollToBottom(true); | |||
| return; | |||
| } | |||
| if (response.isLast) { | |||
| enableControls(); | |||
| responseContainer.find(".signature").append(response.content); | |||
| scrollToBottom(); | |||
| } | |||
| else { | |||
| if (responseFirstFragment) { | |||
| responseContent.empty(); | |||
| responseFirstFragment = false; | |||
| responseContainer.find(".date").append(getDateTime()); | |||
| } | |||
| responseContent.append(response.content); | |||
| scrollToBottom(); | |||
| } | |||
| } | |||
| const sendPrompt = async () => { | |||
| const text = chatInput.val(); | |||
| if (text) { | |||
| disableControls(); | |||
| outputContainer.append(Mustache.render(outputUserTemplate, { text: text, date: getDateTime() })); | |||
| await connection.invoke('SendPrompt', text); | |||
| chatInput.val(null); | |||
| scrollToBottom(true); | |||
| } | |||
| } | |||
| const cancelPrompt = async () => { | |||
| await ajaxPostJsonAsync('?handler=Cancel', { connectionId: connectionId }); | |||
| } | |||
| const loadModel = async () => { | |||
| const modelName = getSelectedModel(); | |||
| const promptName = getSelectedPrompt(); | |||
| const parameterName = getSelectedParameter(); | |||
| if (!modelName || !promptName || !parameterName) { | |||
| onError("Please select a valid Model, Parameter and Prompt"); | |||
| return; | |||
| } | |||
| disableControls(); | |||
| await connection.invoke('LoadModel', LLamaExecutorType, modelName, promptName, parameterName); | |||
| } | |||
| const enableControls = () => { | |||
| $(".input-control").removeAttr("disabled"); | |||
| } | |||
| const disableControls = () => { | |||
| $(".input-control").attr("disabled", "disabled"); | |||
| } | |||
| const clearOutput = () => { | |||
| outputContainer.empty(); | |||
| } | |||
| const updatePrompt = () => { | |||
| const customPrompt = $("#PromptText"); | |||
| const selection = $("option:selected", "#Prompt"); | |||
| const selectedValue = selection.data("prompt"); | |||
| customPrompt.text(selectedValue); | |||
| } | |||
| const getSelectedModel = () => { | |||
| return $("option:selected", "#Model").val(); | |||
| } | |||
| const getSelectedParameter = () => { | |||
| return $("option:selected", "#Parameter").val(); | |||
| } | |||
| const getSelectedPrompt = () => { | |||
| return $("option:selected", "#Prompt").val(); | |||
| } | |||
| const getDateTime = () => { | |||
| const dateTime = new Date(); | |||
| return dateTime.toLocaleString(); | |||
| } | |||
| const scrollToBottom = (force) => { | |||
| const scrollTop = scrollContainer.scrollTop(); | |||
| const scrollHeight = scrollContainer[0].scrollHeight; | |||
| if (force) { | |||
| scrollContainer.scrollTop(scrollContainer[0].scrollHeight); | |||
| return; | |||
| } | |||
| if (scrollTop + 70 >= scrollHeight - scrollContainer.innerHeight()) { | |||
| scrollContainer.scrollTop(scrollContainer[0].scrollHeight) | |||
| } | |||
| } | |||
| // Map UI functions | |||
| $("#load").on("click", loadModel); | |||
| $("#send").on("click", sendPrompt); | |||
| $("#clear").on("click", clearOutput); | |||
| $("#cancel").on("click", cancelPrompt); | |||
| $("#Prompt").on("change", updatePrompt); | |||
| chatInput.on('keydown', function (event) { | |||
| if (event.key === 'Enter' && !event.shiftKey) { | |||
| event.preventDefault(); | |||
| sendPrompt(); | |||
| } | |||
| }); | |||
| // Map signalr functions | |||
| connection.on("OnStatus", onStatus); | |||
| connection.on("OnError", onError); | |||
| connection.on("OnResponse", onResponse); | |||
| connection.start(); | |||
| } | |||
| @@ -40,7 +40,11 @@ const Enums = { | |||
| Loaded: 4, | |||
| Connected: 10 | |||
| }), | |||
| LLamaExecutorType: Object.freeze({ | |||
| Interactive: 0, | |||
| Instruct: 1, | |||
| Stateless: 2 | |||
| }), | |||
| GetName: (enumType, enumKey) => { | |||
| return Object.keys(enumType)[enumKey] | |||
| }, | |||
| @@ -123,10 +123,12 @@ namespace LLama | |||
| } | |||
| /// <inheritdoc /> | |||
| public async IAsyncEnumerable<string> InferAsync(string text, InferenceParams? inferenceParams = null, [EnumeratorCancellation] CancellationToken token = default) | |||
| public async IAsyncEnumerable<string> InferAsync(string text, InferenceParams? inferenceParams = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) | |||
| { | |||
| yield return ""; | |||
| throw new NotImplementedException(); | |||
| foreach (var result in Infer(text, inferenceParams, cancellationToken)) | |||
| { | |||
| yield return result; | |||
| } | |||
| } | |||
| } | |||
| } | |||