You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaSharp.csproj 2.5 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162
  1. <Project Sdk="Microsoft.NET.Sdk">
  2. <PropertyGroup>
  3. <TargetFrameworks>netstandard2.0;net6.0</TargetFrameworks>
  4. <RootNamespace>LLama</RootNamespace>
  5. <Nullable>enable</Nullable>
  6. <LangVersion>10</LangVersion>
  7. <Platforms>AnyCPU;x64</Platforms>
  8. <AllowUnsafeBlocks>True</AllowUnsafeBlocks>
  9. <Version>0.1.0</Version>
  10. <Authors>Yaohui Liu, Haiping Chen</Authors>
  11. <Company>SciSharp STACK</Company>
  12. <GeneratePackageOnBuild>true</GeneratePackageOnBuild>
  13. <Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
  14. <RepositoryUrl>https://github.com/SciSharp/LLamaSharp</RepositoryUrl>
  15. <RepositoryType>git</RepositoryType>
  16. <PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&amp;v=4</PackageIconUrl>
  17. <PackageTags>LLama, LLM, GPT, ChatGPT, NLP, AI, Chat Bot, SciSharp</PackageTags>
  18. <Description>
  19. The .NET binding of LLama.cpp, providing APIs to run the model and deploy it on Web.
  20. </Description>
  21. <PackageReleaseNotes>
  22. LLama 0.1.0 provides basic APIs to load and run models.
  23. </PackageReleaseNotes>
  24. <PackageLicenseExpression>MIT</PackageLicenseExpression>
  25. <PackageOutputPath>packages</PackageOutputPath>
  26. <Platforms>AnyCPU;x64</Platforms>
  27. <PackageId>LLama</PackageId>
  28. <Configurations>Debug;Release;GPU</Configurations>
  29. </PropertyGroup>
  30. <ItemGroup Condition="'$(TargetFramework)' == 'netstandard2.0'">
  31. <PackageReference Include="IsExternalInit" Version="1.0.3" PrivateAssets="all" />
  32. </ItemGroup>
  33. <ItemGroup>
  34. <PackageReference Include="Microsoft.Extensions.Logging" Version="7.0.0" />
  35. <PackageReference Include="Serilog" Version="3.0.0-dev-01998" />
  36. <PackageReference Include="Serilog.Extensions.Logging.File" Version="3.0.1-dev-00077" />
  37. <PackageReference Include="Serilog.Sinks.Console" Version="4.1.0" />
  38. </ItemGroup>
  39. <ItemGroup>
  40. <Content Include="./lib/llama.dll">
  41. <CopyToOutputDirectory>Always</CopyToOutputDirectory>
  42. <Pack>true</Pack>
  43. <PackagePath>lib\$(TargetFramework)</PackagePath>
  44. </Content>
  45. <Content Include="./lib/libllama.so">
  46. <CopyToOutputDirectory>Always</CopyToOutputDirectory>
  47. <Pack>true</Pack>
  48. <PackagePath>lib\$(TargetFramework)</PackagePath>
  49. </Content>
  50. <!--<Content Include="llama.so">
  51. <CopyToOutputDirectory>Always</CopyToOutputDirectory>
  52. <PackageCopyToOutput>true</PackageCopyToOutput>
  53. <pack>true</pack>
  54. </Content>-->
  55. </ItemGroup>
  56. </Project>

C#/.NET上易用的LLM高性能推理框架,支持LLaMA和LLaVA系列模型。

Contributors (1)