using LLama.Types; using System; using System.Collections.Generic; using System.IO; using System.Text; namespace LLama { public class ChatSession where T: IChatModel { IChatModel _model; List History { get; } = new List(); public ChatSession(T model) { _model = model; } public IEnumerable Chat(string text, string? prompt = null) { return _model.Chat(text, prompt); } public ChatSession WithPrompt(string prompt) { _model.InitChatPrompt(prompt); return this; } public ChatSession WithPromptFile(string promptFilename) { return WithPrompt(File.ReadAllText(promptFilename)); } /// /// Set the keyword to split the return value of chat AI. /// /// /// public ChatSession WithAntiprompt(string[] antiprompt) { _model.InitChatAntiprompt(antiprompt); return this; } } }