Browse Source

Merge branch 'master' into master

tags/v0.4.2-preview
Martin Evans GitHub 2 years ago
parent
commit
fe3bd11dfa
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 63 additions and 7 deletions
  1. +55
    -0
      .github/workflows/main.yml
  2. +3
    -1
      .gitignore
  3. +4
    -5
      LLama.Unittest/LLama.Unittest.csproj
  4. +1
    -1
      LLama/Common/FixedSizeQueue.cs

+ 55
- 0
.github/workflows/main.yml View File

@@ -0,0 +1,55 @@
name: CI
on:
push:
branches: [master]
pull_request:
branches: [master]

jobs:
build:
name: Test
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
build: [linux-debug, linux-release, macos-debug, macos-release, windows-debug, windows-release]
include:
- build: linux-debug
os: ubuntu-latest
config: debug
- build: linux-release
os: ubuntu-latest
config: release
- build: macos-debug
os: macos-latest
config: debug
- build: macos-release
os: macos-latest
config: release
- build: windows-debug
os: windows-2019
config: debug
- build: windows-release
os: windows-2019
config: release
steps:
- uses: actions/checkout@v2
- uses: actions/setup-dotnet@v1
with:
dotnet-version: |
6.0.x
7.0.x
- name: Cache Gradle packages
uses: actions/cache@v3
with:
key: "unit_test_models"
path: LLama.Unittest/Models
# workaround for actions/setup-dotnet#155
- name: Clear package cache
run: dotnet clean LLamaSharp.sln && dotnet nuget locals all --clear
- name: Restore packages
run: dotnet restore LLamaSharp.sln
- name: Build
run: dotnet build LLamaSharp.sln -c ${{ matrix.config }} --no-restore
- name: Test
run: dotnet test LLamaSharp.sln -c ${{ matrix.config }}

+ 3
- 1
.gitignore View File

@@ -342,4 +342,6 @@ test/TensorFlowNET.Examples/mnist

# docs
site/
*.bin

/LLama.Unittest/Models/*.bin


+ 4
- 5
LLama.Unittest/LLama.Unittest.csproj View File

@@ -23,15 +23,14 @@
</PackageReference>
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
</ItemGroup>

<Target Name="DownloadContentFiles" BeforeTargets="Build">
<Target Name="DownloadContentFiles" BeforeTargets="Build">
<DownloadFile SourceUrl="https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/resolve/main/llama-2-7b-chat.ggmlv3.q3_K_S.bin" DestinationFolder="Models" DestinationFileName="llama-2-7b-chat.ggmlv3.q3_K_S.bin" SkipUnchangedFiles="true">
</DownloadFile>
</Target>

<ItemGroup>
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
</ItemGroup>

<ItemGroup>
<Folder Include="Models\" />


+ 1
- 1
LLama/Common/FixedSizeQueue.cs View File

@@ -30,12 +30,12 @@ namespace LLama.Common
/// <param name="data"></param>
public FixedSizeQueue(int size, IEnumerable<T> data)
{

#if NETCOREAPP3_0_OR_GREATER
// Try an early check on the amount of data supplied (if possible)
if (data.TryGetNonEnumeratedCount(out var count) && count > size)
throw new ArgumentException($"The max size set for the quene is {size}, but got {count} initial values.");
#endif

// Size of "data" is unknown, copy it all into a list
_maxSize = size;
_storage = new List<T>(data);


Loading…
Cancel
Save