diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
new file mode 100644
index 00000000..b00368fb
--- /dev/null
+++ b/.github/workflows/main.yml
@@ -0,0 +1,55 @@
+name: CI
+on:
+ push:
+ branches: [master]
+ pull_request:
+ branches: [master]
+
+jobs:
+ build:
+ name: Test
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ build: [linux-debug, linux-release, macos-debug, macos-release, windows-debug, windows-release]
+ include:
+ - build: linux-debug
+ os: ubuntu-latest
+ config: debug
+ - build: linux-release
+ os: ubuntu-latest
+ config: release
+ - build: macos-debug
+ os: macos-latest
+ config: debug
+ - build: macos-release
+ os: macos-latest
+ config: release
+ - build: windows-debug
+ os: windows-2019
+ config: debug
+ - build: windows-release
+ os: windows-2019
+ config: release
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-dotnet@v1
+ with:
+ dotnet-version: |
+ 6.0.x
+ 7.0.x
+ - name: Cache Gradle packages
+ uses: actions/cache@v3
+ with:
+ key: "unit_test_models"
+ path: LLama.Unittest/Models
+ # workaround for actions/setup-dotnet#155
+ - name: Clear package cache
+ run: dotnet clean LLamaSharp.sln && dotnet nuget locals all --clear
+ - name: Restore packages
+ run: dotnet restore LLamaSharp.sln
+ - name: Build
+ run: dotnet build LLamaSharp.sln -c ${{ matrix.config }} --no-restore
+ - name: Test
+ run: dotnet test LLamaSharp.sln -c ${{ matrix.config }}
diff --git a/.gitignore b/.gitignore
index 2c7d5041..e7c87968 100644
--- a/.gitignore
+++ b/.gitignore
@@ -342,4 +342,6 @@ test/TensorFlowNET.Examples/mnist
# docs
site/
-*.bin
+
+/LLama.Unittest/Models/*.bin
+
diff --git a/LLama.Unittest/LLama.Unittest.csproj b/LLama.Unittest/LLama.Unittest.csproj
index 845b9acf..81e71a88 100644
--- a/LLama.Unittest/LLama.Unittest.csproj
+++ b/LLama.Unittest/LLama.Unittest.csproj
@@ -23,15 +23,14 @@
-
-
-
-
-
+
+
+
+
diff --git a/LLama/Common/FixedSizeQueue.cs b/LLama/Common/FixedSizeQueue.cs
index ff18da17..4b082feb 100644
--- a/LLama/Common/FixedSizeQueue.cs
+++ b/LLama/Common/FixedSizeQueue.cs
@@ -30,12 +30,12 @@ namespace LLama.Common
///
public FixedSizeQueue(int size, IEnumerable data)
{
-
#if NETCOREAPP3_0_OR_GREATER
// Try an early check on the amount of data supplied (if possible)
if (data.TryGetNonEnumeratedCount(out var count) && count > size)
throw new ArgumentException($"The max size set for the quene is {size}, but got {count} initial values.");
#endif
+
// Size of "data" is unknown, copy it all into a list
_maxSize = size;
_storage = new List(data);