Namespace: LLama
The main chat session class.
public class ChatSession
Inheritance Object → ChatSession
The output transform used in this session.
public ITextStreamTransform OutputTransform;
The executor for this session.
public ILLamaExecutor Executor { get; }
The chat history for this session.
public ChatHistory History { get; }
The history transform used in this session.
public IHistoryTransform HistoryTransform { get; set; }
The input transform pipeline used in this session.
public List<ITextTransform> InputTransformPipeline { get; set; }
public ChatSession(ILLamaExecutor executor)
executor ILLamaExecutor
The executor for this session
Use a custom history transform.
public ChatSession WithHistoryTransform(IHistoryTransform transform)
transform IHistoryTransform
Add a text transform to the input transform pipeline.
public ChatSession AddInputTransform(ITextTransform transform)
transform ITextTransform
Use a custom output transform.
public ChatSession WithOutputTransform(ITextStreamTransform transform)
transform ITextStreamTransform
public void SaveSession(string path)
path String
The directory name to save the session. If the directory does not exist, a new directory will be created.
public void LoadSession(string path)
path String
The directory name to load the session.
Get the response from the LLama model with chat histories.
public IEnumerable<string> Chat(ChatHistory history, InferenceParams inferenceParams, CancellationToken cancellationToken)
history ChatHistory
inferenceParams InferenceParams
cancellationToken CancellationToken
Get the response from the LLama model. Note that prompt could not only be the preset words,
but also the question you want to ask.
public IEnumerable<string> Chat(string prompt, InferenceParams inferenceParams, CancellationToken cancellationToken)
prompt String
inferenceParams InferenceParams
cancellationToken CancellationToken
Get the response from the LLama model with chat histories.
public IAsyncEnumerable<string> ChatAsync(ChatHistory history, InferenceParams inferenceParams, CancellationToken cancellationToken)
history ChatHistory
inferenceParams InferenceParams
cancellationToken CancellationToken
Get the response from the LLama model with chat histories asynchronously.
public IAsyncEnumerable<string> ChatAsync(string prompt, InferenceParams inferenceParams, CancellationToken cancellationToken)
prompt String
inferenceParams InferenceParams
cancellationToken CancellationToken