Skip to content

Commit 06ce132

Browse files
committed
CUDA11
1 parent bbfa090 commit 06ce132

File tree

3 files changed

+72
-26
lines changed

3 files changed

+72
-26
lines changed
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
<?xml version="1.0" encoding="utf-8"?>
2+
<package >
3+
<metadata>
4+
<id>LLamaSharp.Backend.Cuda11.Linux</id>
5+
<version>$version$</version>
6+
<title>LLamaSharp.Backend.Cuda11.Linux</title>
7+
<authors>llama.cpp Authors</authors>
8+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9+
<license type="expression">MIT</license>
10+
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
11+
<description>LLamaSharp.Backend.Cuda11.Linux contains the Linux binaries for LLamaSharp with Cuda11 support.</description>
12+
<releaseNotes></releaseNotes>
13+
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
14+
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
15+
</metadata>
16+
17+
<files>
18+
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Cuda11.props" />
19+
<file src="runtimes/deps/cu11.7.1/libllava_shared.so" target="runtimes/linux-x64/native/cuda11/libllava_shared.so" />
20+
<file src="runtimes/deps/cu11.7.1/libggml.so" target="runtimes/linux-x64/native/cuda11/libggml.so" />
21+
<file src="runtimes/deps/cu11.7.1/libllama.so" target="runtimes/linux-x64/native/cuda11/libllama.so" />
22+
23+
<file src="icon512.png" target="icon512.png" />
24+
</files>
25+
</package>
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
<?xml version="1.0" encoding="utf-8"?>
2+
<package >
3+
<metadata>
4+
<id>LLamaSharp.Backend.Cuda11.Windows</id>
5+
<version>$version$</version>
6+
<title>LLamaSharp.Backend.Cuda11.Windows</title>
7+
<authors>llama.cpp Authors</authors>
8+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9+
<license type="expression">MIT</license>
10+
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
11+
<description>LLamaSharp.Backend.Cuda11.Windows contains the Windows binaries for LLamaSharp with Cuda11 support.</description>
12+
<releaseNotes></releaseNotes>
13+
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
14+
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
15+
</metadata>
16+
17+
<files>
18+
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Cuda11.props" />
19+
<file src="runtimes/deps/cu11.7.1/llava_shared.dll" target="runtimes\win-x64\native\cuda11\llava_shared.dll" />
20+
<file src="runtimes/deps/cu11.7.1/ggml.dll" target="runtimes\win-x64\native\cuda11\ggml.dll" />
21+
<file src="runtimes/deps/cu11.7.1/llama.dll" target="runtimes\win-x64\native\cuda11\llama.dll" />
22+
23+
<file src="icon512.png" target="icon512.png" />
24+
</files>
25+
</package>
Lines changed: 22 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,27 @@
11
<?xml version="1.0" encoding="utf-8"?>
22
<package >
3-
<metadata>
4-
<id>LLamaSharp.Backend.Cuda11</id>
5-
<version>$version$</version>
6-
<title>LLamaSharp.Backend.Cuda11, the backend for LLamaSharp</title>
7-
<authors>llama.cpp Authors</authors>
8-
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9-
<license type="expression">MIT</license>
10-
<icon>icon512.png</icon>
11-
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
12-
<description>LLamaSharp.Backend.Cuda11 is a backend for LLamaSharp to use with Cuda11.</description>
13-
<releaseNotes></releaseNotes>
14-
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
15-
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
16-
</metadata>
3+
<metadata>
4+
<id>LLamaSharp.Backend.Cuda11</id>
5+
<version>$version$</version>
6+
<title>LLamaSharp.Backend.Cuda11, the backend for LLamaSharp</title>
7+
<authors>llama.cpp Authors</authors>
8+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9+
<license type="expression">MIT</license>
10+
<icon>icon512.png</icon>
11+
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
12+
<description>LLamaSharp.Backend.Cuda11 is a backend for LLamaSharp to use with Cuda11.</description>
13+
<releaseNotes></releaseNotes>
14+
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
15+
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
1716

18-
<files>
19-
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Cuda11.props" />
17+
<!-- Dependencies on platform-specific packages -->
18+
<dependencies>
19+
<dependency id="LLamaSharp.Backend.Cuda11.Windows" version="$version$" exclude="Linux" />
20+
<dependency id="LLamaSharp.Backend.Cuda11.Linux" version="$version$" exclude="Win" />
21+
</dependencies>
22+
</metadata>
2023

21-
<file src="runtimes/deps/cu11.7.1/libllava_shared.so" target="runtimes\linux-x64\native\cuda11\libllava_shared.so" />
22-
<file src="runtimes/deps/cu11.7.1/llava_shared.dll" target="runtimes\win-x64\native\cuda11\llava_shared.dll" />
23-
24-
<file src="runtimes/deps/cu11.7.1/ggml.dll" target="runtimes\win-x64\native\cuda11\ggml.dll" />
25-
<file src="runtimes/deps/cu11.7.1/llama.dll" target="runtimes\win-x64\native\cuda11\llama.dll" />
26-
<file src="runtimes/deps/cu11.7.1/libggml.so" target="runtimes\linux-x64\native\cuda11\libggml.so" />
27-
<file src="runtimes/deps/cu11.7.1/libllama.so" target="runtimes\linux-x64\native\cuda11\libllama.so" />
28-
29-
<file src="icon512.png" target="icon512.png" />
30-
</files>
24+
<files>
25+
<file src="icon512.png" target="icon512.png" />
26+
</files>
3127
</package>

0 commit comments

Comments
 (0)