using LLama.Common;
using LLama.Native;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace LLamaSharp.KernelMemory
{
///
/// Represents the configuration for LLamaSharp. Available properties are `ModelPath`, `ContextSize`, `Seed`, `GpuLayerCount`.
///
public class LLamaSharpConfig
{
///
/// Initializes a new instance of the class.
///
/// The path to the model file.
public LLamaSharpConfig(string modelPath)
{
ModelPath = modelPath;
}
///
/// Gets or sets the path to the model file.
///
public string ModelPath { get; set; }
///
/// Gets or sets the size of the context.
///
public uint? ContextSize { get; set; }
///
/// Gets or sets the seed value.
///
public uint? Seed { get; set; }
///
/// Gets or sets the number of GPU layers.
///
public int? GpuLayerCount { get; set; }
///
/// main_gpu interpretation depends on split_mode:
///
/// -
/// None
/// The GPU that is used for the entire mode.
///
/// -
/// Row
/// The GPU that is used for small tensors and intermediate results.
///
/// -
/// Layer
/// Ignored.
///
///
///
///
public int MainGpu { get; set; } = 0;
///
/// How to split the model across multiple GPUs
///
///
public GPUSplitMode SplitMode { get; set; } = GPUSplitMode.None;
///
/// Set the default inference parameters.
///
public InferenceParams? DefaultInferenceParams { get; set; }
}
}