Browse Source

build: add package for kernel-memory integration.

tags/v0.8.0
Yaohui Liu 2 years ago
parent
commit
6a7e74e71b
No known key found for this signature in database GPG Key ID: E86D01E1809BD23E
3 changed files with 33 additions and 5 deletions
  1. +22
    -1
      LLama.KernelMemory/LLamaSharp.KernelMemory.csproj
  2. +3
    -3
      LLama.SemanticKernel/LLamaSharp.SemanticKernel.csproj
  3. +8
    -1
      README.md

+ 22
- 1
LLama.KernelMemory/LLamaSharp.KernelMemory.csproj View File

@@ -1,9 +1,30 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<TargetFrameworks>netstandard2.0;net6.0;net7.0</TargetFrameworks>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>

<Version>0.7.1</Version>
<Authors>Xbotter</Authors>
<Company>SciSharp STACK</Company>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
<Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
<RepositoryUrl>https://github.com/SciSharp/LLamaSharp</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&amp;v=4</PackageIconUrl>
<PackageTags>LLama, LLM, GPT, ChatGPT, kernel-memory, vector search, SciSharp</PackageTags>
<Description>
The integration of LLamaSharp and Microsoft kernel-memory. It could make it easy to support document search for LLamaSharp model inference.
</Description>
<PackageReleaseNotes>
Support integration with kernel-memory
</PackageReleaseNotes>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
<PackageOutputPath>packages</PackageOutputPath>
<Platforms>AnyCPU;x64;Arm64</Platforms>
<PackageId>LLamaSharp.kernel-memory</PackageId>
<Configurations>Debug;Release;GPU</Configurations>
</PropertyGroup>

<ItemGroup>


+ 3
- 3
LLama.SemanticKernel/LLamaSharp.SemanticKernel.csproj View File

@@ -10,8 +10,8 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>

<Version>0.6.2-beta1</Version>
<Authors>Tim Miller</Authors>
<Version>0.7.1</Version>
<Authors>Tim Miller, Xbotter</Authors>
<Company>SciSharp STACK</Company>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
<Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
@@ -20,7 +20,7 @@
<PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&amp;v=4</PackageIconUrl>
<PackageTags>LLama, LLM, GPT, ChatGPT, semantic-kernel, SciSharp</PackageTags>
<Description>
The integration of LLamaSharp ans semantic-kernel.
The integration of LLamaSharp and Microsoft semantic-kernel.
</Description>
<PackageReleaseNotes>
Support integration with semantic-kernel


+ 8
- 1
README.md View File

@@ -54,6 +54,13 @@ For [microsoft semantic-kernel](https://github.com/microsoft/semantic-kernel) in
LLamaSharp.semantic-kernel
```

For [microsoft kernel-memory](https://github.com/microsoft/kernel-memory) integration, please search and install the following package:

```
LLamaSharp.kernel-memory
```


### Tips for choosing a version

In general, there may be some break changes between two minor releases, for example 0.5.1 and 0.6.0. On the contrary, we don't introduce API break changes in patch release. Therefore it's recommended to keep the highest patch version of a minor release. For example, keep 0.5.6 instead of 0.5.3.
@@ -196,7 +203,7 @@ Another choice is generate gguf format file yourself with a pytorch weight (or a

🔳 Fine-tune

⚠️ Local document search (enabled by kernel-memory now)
Local document search (enabled by kernel-memory now)

🔳 MAUI Integration



Loading…
Cancel
Save