You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

LLamaSharp.csproj 2.9 kB

2 years ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071
  1. <Project Sdk="Microsoft.NET.Sdk">
  2. <PropertyGroup>
  3. <TargetFrameworks>netstandard2.0;net6.0;net7.0</TargetFrameworks>
  4. <RootNamespace>LLama</RootNamespace>
  5. <Nullable>enable</Nullable>
  6. <LangVersion>10</LangVersion>
  7. <Platforms>AnyCPU;x64</Platforms>
  8. <AllowUnsafeBlocks>True</AllowUnsafeBlocks>
  9. <Version>0.2.2</Version>
  10. <Authors>Yaohui Liu, Haiping Chen</Authors>
  11. <Company>SciSharp STACK</Company>
  12. <GeneratePackageOnBuild>true</GeneratePackageOnBuild>
  13. <Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
  14. <RepositoryUrl>https://github.com/SciSharp/LLamaSharp</RepositoryUrl>
  15. <RepositoryType>git</RepositoryType>
  16. <PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&amp;v=4</PackageIconUrl>
  17. <PackageTags>LLama, LLM, GPT, ChatGPT, NLP, AI, Chat Bot, SciSharp</PackageTags>
  18. <Description>
  19. The .NET binding of LLama.cpp, providing APIs to run the model and deploy it on Web.
  20. </Description>
  21. <PackageReleaseNotes>
  22. LLama 0.1.0 provides basic APIs to load and run models.
  23. </PackageReleaseNotes>
  24. <PackageLicenseExpression>MIT</PackageLicenseExpression>
  25. <PackageOutputPath>packages</PackageOutputPath>
  26. <Platforms>AnyCPU;x64</Platforms>
  27. <PackageId>LLamaSharp</PackageId>
  28. <Configurations>Debug;Release;GPU</Configurations>
  29. </PropertyGroup>
  30. <ItemGroup Condition="'$(TargetFramework)' == 'netstandard2.0'">
  31. <PackageReference Include="IsExternalInit" Version="1.0.3" PrivateAssets="all" />
  32. </ItemGroup>
  33. <ItemGroup>
  34. <PackageReference Include="Microsoft.Extensions.Logging" Version="7.0.0" />
  35. <PackageReference Include="Serilog" Version="3.0.0-dev-01998" />
  36. <PackageReference Include="Serilog.Extensions.Logging.File" Version="3.0.1-dev-00077" />
  37. <PackageReference Include="Serilog.Sinks.Console" Version="4.1.0" />
  38. </ItemGroup>
  39. <ItemGroup>
  40. <None Include="runtimes/libllama.dll">
  41. <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
  42. <Link>libllama.dll</Link>
  43. </None>
  44. <None Include="runtimes/libllama-cuda11.dll">
  45. <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
  46. <Link>libllama-cuda11.dll</Link>
  47. </None>
  48. <None Include="runtimes/libllama-cuda12.dll">
  49. <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
  50. <Link>libllama-cuda12.dll</Link>
  51. </None>
  52. <None Include="runtimes/libllama.so">
  53. <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
  54. <Link>libllama.so</Link>
  55. </None>
  56. <None Include="runtimes/libllama-cuda11.so">
  57. <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
  58. <Link>libllama-cuda11.so</Link>
  59. </None>
  60. <None Include="runtimes/libllama-cuda12.so">
  61. <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
  62. <Link>libllama-cuda12.so</Link>
  63. </None>
  64. </ItemGroup>
  65. </Project>

C#/.NET上易用的LLM高性能推理框架,支持LLaMA和LLaVA系列模型。