fix: typo in FixedSizeQueuetags/v0.4.0
| @@ -10,14 +10,14 @@ namespace LLama.Common | |||||
| /// A queue with fixed storage size. | /// A queue with fixed storage size. | ||||
| /// Currently it's only a naive implementation and needs to be further optimized in the future. | /// Currently it's only a naive implementation and needs to be further optimized in the future. | ||||
| /// </summary> | /// </summary> | ||||
| public class FixedSizeQuene<T>: IEnumerable<T> | |||||
| public class FixedSizeQueue<T>: IEnumerable<T> | |||||
| { | { | ||||
| int _maxSize; | int _maxSize; | ||||
| List<T> _storage; | List<T> _storage; | ||||
| public int Count => _storage.Count; | public int Count => _storage.Count; | ||||
| public int Capacity => _maxSize; | public int Capacity => _maxSize; | ||||
| public FixedSizeQuene(int size) | |||||
| public FixedSizeQueue(int size) | |||||
| { | { | ||||
| _maxSize = size; | _maxSize = size; | ||||
| _storage = new(); | _storage = new(); | ||||
| @@ -28,7 +28,7 @@ namespace LLama.Common | |||||
| /// </summary> | /// </summary> | ||||
| /// <param name="size"></param> | /// <param name="size"></param> | ||||
| /// <param name="data"></param> | /// <param name="data"></param> | ||||
| public FixedSizeQuene(int size, IEnumerable<T> data) | |||||
| public FixedSizeQueue(int size, IEnumerable<T> data) | |||||
| { | { | ||||
| _maxSize = size; | _maxSize = size; | ||||
| if(data.Count() > size) | if(data.Count() > size) | ||||
| @@ -38,7 +38,7 @@ namespace LLama.Common | |||||
| _storage = new(data); | _storage = new(data); | ||||
| } | } | ||||
| public FixedSizeQuene<T> FillWith(T value) | |||||
| public FixedSizeQueue<T> FillWith(T value) | |||||
| { | { | ||||
| for(int i = 0; i < Count; i++) | for(int i = 0; i < Count; i++) | ||||
| { | { | ||||
| @@ -24,7 +24,7 @@ namespace LLama | |||||
| protected List<llama_token> _embeds = new(); // embd | protected List<llama_token> _embeds = new(); // embd | ||||
| protected List<llama_token> _embed_inps = new(); | protected List<llama_token> _embed_inps = new(); | ||||
| protected List<llama_token> _session_tokens = new(); | protected List<llama_token> _session_tokens = new(); | ||||
| protected FixedSizeQuene<llama_token> _last_n_tokens; | |||||
| protected FixedSizeQueue<llama_token> _last_n_tokens; | |||||
| public LLamaModel Model => _model; | public LLamaModel Model => _model; | ||||
| protected StatefulExecutorBase(LLamaModel model, ILLamaLogger? logger = null) | protected StatefulExecutorBase(LLamaModel model, ILLamaLogger? logger = null) | ||||
| { | { | ||||
| @@ -35,7 +35,7 @@ namespace LLama | |||||
| _n_session_consumed = 0; | _n_session_consumed = 0; | ||||
| _embeds = new(); | _embeds = new(); | ||||
| _embed_inps = new(); | _embed_inps = new(); | ||||
| _last_n_tokens = new FixedSizeQuene<llama_token>(_model.ContextSize).FillWith(0); | |||||
| _last_n_tokens = new FixedSizeQueue<llama_token>(_model.ContextSize).FillWith(0); | |||||
| } | } | ||||
| public unsafe StatefulExecutorBase WithSessionFile(string filename) | public unsafe StatefulExecutorBase WithSessionFile(string filename) | ||||
| @@ -56,7 +56,7 @@ namespace LLama | |||||
| _is_prompt_run = state.IsPromptRun; | _is_prompt_run = state.IsPromptRun; | ||||
| _consumedTokensCount = state.ConsumedTokensCount; | _consumedTokensCount = state.ConsumedTokensCount; | ||||
| _embeds = state.Embeds; | _embeds = state.Embeds; | ||||
| _last_n_tokens = new FixedSizeQuene<llama_token>(state.LastTokensCapacity, state.LastTokens); | |||||
| _last_n_tokens = new FixedSizeQueue<llama_token>(state.LastTokensCapacity, state.LastTokens); | |||||
| _inp_pfx = state.InputPrefixTokens; | _inp_pfx = state.InputPrefixTokens; | ||||
| _inp_sfx = state.InputSuffixTokens; | _inp_sfx = state.InputSuffixTokens; | ||||
| _n_matching_session_tokens = state.MatchingSessionTokensCount; | _n_matching_session_tokens = state.MatchingSessionTokensCount; | ||||
| @@ -55,7 +55,7 @@ namespace LLama | |||||
| _is_prompt_run = state.IsPromptRun; | _is_prompt_run = state.IsPromptRun; | ||||
| _consumedTokensCount = state.ConsumedTokensCount; | _consumedTokensCount = state.ConsumedTokensCount; | ||||
| _embeds = state.Embeds; | _embeds = state.Embeds; | ||||
| _last_n_tokens = new FixedSizeQuene<llama_token>(state.LastTokensCapacity, state.LastTokens); | |||||
| _last_n_tokens = new FixedSizeQueue<llama_token>(state.LastTokensCapacity, state.LastTokens); | |||||
| _llama_token_newline = state.LLamaNewlineTokens; | _llama_token_newline = state.LLamaNewlineTokens; | ||||
| _n_matching_session_tokens = state.MatchingSessionTokensCount; | _n_matching_session_tokens = state.MatchingSessionTokensCount; | ||||
| _pastTokensCount = state.PastTokensCount; | _pastTokensCount = state.PastTokensCount; | ||||