Browse Source

Removed one more unused parameter

tags/v0.6.0
Martin Evans 2 years ago
parent
commit
b47977300a
2 changed files with 1 additions and 13 deletions
  1. +0
    -5
      LLama/Abstractions/IModelParams.cs
  2. +1
    -8
      LLama/Common/ModelParams.cs

+ 0
- 5
LLama/Abstractions/IModelParams.cs View File

@@ -77,11 +77,6 @@ namespace LLama.Abstractions
/// </summary> /// </summary>
int BatchSize { get; set; } int BatchSize { get; set; }


/// <summary>
/// Whether to convert eos to newline during the inference.
/// </summary>
bool ConvertEosToNewLine { get; set; }

/// <summary> /// <summary>
/// Whether to use embedding mode. (embedding) Note that if this is set to true, /// Whether to use embedding mode. (embedding) Note that if this is set to true,
/// The LLamaModel won't produce text response anymore. /// The LLamaModel won't produce text response anymore.


+ 1
- 8
LLama/Common/ModelParams.cs View File

@@ -69,11 +69,6 @@ namespace LLama.Common
/// </summary> /// </summary>
public int BatchSize { get; set; } = 512; public int BatchSize { get; set; } = 512;


/// <summary>
/// Whether to convert eos to newline during the inference.
/// </summary>
public bool ConvertEosToNewLine { get; set; } = false;

/// <summary> /// <summary>
/// Whether to use embedding mode. (embedding) Note that if this is set to true, /// Whether to use embedding mode. (embedding) Note that if this is set to true,
/// The LLamaModel won't produce text response anymore. /// The LLamaModel won't produce text response anymore.
@@ -137,7 +132,6 @@ namespace LLama.Common
/// <param name="loraBase">Base model path for the lora adapter (lora_base)</param> /// <param name="loraBase">Base model path for the lora adapter (lora_base)</param>
/// <param name="threads">Number of threads (-1 = autodetect) (n_threads)</param> /// <param name="threads">Number of threads (-1 = autodetect) (n_threads)</param>
/// <param name="batchSize">Batch size for prompt processing (must be >=32 to use BLAS) (n_batch)</param> /// <param name="batchSize">Batch size for prompt processing (must be >=32 to use BLAS) (n_batch)</param>
/// <param name="convertEosToNewLine">Whether to convert eos to newline during the inference.</param>
/// <param name="embeddingMode">Whether to use embedding mode. (embedding) Note that if this is set to true, The LLamaModel won't produce text response anymore.</param> /// <param name="embeddingMode">Whether to use embedding mode. (embedding) Note that if this is set to true, The LLamaModel won't produce text response anymore.</param>
/// <param name="ropeFrequencyBase">RoPE base frequency.</param> /// <param name="ropeFrequencyBase">RoPE base frequency.</param>
/// <param name="ropeFrequencyScale">RoPE frequency scaling factor</param> /// <param name="ropeFrequencyScale">RoPE frequency scaling factor</param>
@@ -148,7 +142,7 @@ namespace LLama.Common
int seed = 1337, bool useFp16Memory = true, int seed = 1337, bool useFp16Memory = true,
bool useMemorymap = true, bool useMemoryLock = false, bool perplexity = false, bool useMemorymap = true, bool useMemoryLock = false, bool perplexity = false,
string loraAdapter = "", string loraBase = "", int threads = -1, int batchSize = 512, string loraAdapter = "", string loraBase = "", int threads = -1, int batchSize = 512,
bool convertEosToNewLine = false, bool embeddingMode = false,
bool embeddingMode = false,
float ropeFrequencyBase = 10000.0f, float ropeFrequencyScale = 1f, bool mulMatQ = false, float ropeFrequencyBase = 10000.0f, float ropeFrequencyScale = 1f, bool mulMatQ = false,
string encoding = "UTF-8") string encoding = "UTF-8")
{ {
@@ -164,7 +158,6 @@ namespace LLama.Common
LoraBase = loraBase; LoraBase = loraBase;
Threads = threads == -1 ? Math.Max(Environment.ProcessorCount / 2, 1) : threads; Threads = threads == -1 ? Math.Max(Environment.ProcessorCount / 2, 1) : threads;
BatchSize = batchSize; BatchSize = batchSize;
ConvertEosToNewLine = convertEosToNewLine;
EmbeddingMode = embeddingMode; EmbeddingMode = embeddingMode;
RopeFrequencyBase = ropeFrequencyBase; RopeFrequencyBase = ropeFrequencyBase;
RopeFrequencyScale = ropeFrequencyScale; RopeFrequencyScale = ropeFrequencyScale;


Loading…
Cancel
Save