Skip to content

Commit

Permalink
Add 0.2.23 builds to package index
Browse files Browse the repository at this point in the history
  • Loading branch information
jllllll committed Dec 15, 2023
1 parent dd21c47 commit 97f2bad
Show file tree
Hide file tree
Showing 74 changed files with 599 additions and 4 deletions.
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@ Requirements:
- CUDA 11.6 - 12.2
- CPython 3.8 - 3.11

llama.cpp, and llama-cpp-python by extension, has migrated to using the new GGUF format and has dropped support for GGML.
This applies to version 0.1.79+.
> [!WARNING]
> MacOS 11 and Windows ROCm wheels are unavailable for 0.2.22+.
> This is due to build issues with llama.cpp that are not yet resolved.
ROCm builds for AMD GPUs: https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/tag/rocm
Metal builds for MacOS 11.0+: https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/tag/metal
Expand Down
4 changes: 3 additions & 1 deletion generate-html.ps1
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ Set-Location $PSScriptRoot
$destinationDir = if (Test-Path $(Join-Path $(Resolve-Path '.') 'index')) {Join-Path '.' 'index' -resolve} else {(New-Item 'index' -ItemType 'Directory').fullname}
$avxVersions = "AVX","AVX2","AVX512","basic"
$cudaVersions = "11.6","11.7","11.8","12.0","12.1","12.2","rocm5.4.2","rocm5.5","rocm5.5.1","rocm5.6.1","cpu"
$packageVersions = (@(62)+66..74+76..85).foreach({"$_".Insert(0,'0.1.')}) + (0..11+14..20).foreach({"$_".Insert(0,'0.2.')})
$packageVersions = (@(62)+66..74+76..85).foreach({"$_".Insert(0,'0.1.')}) + (0..11+14..20+@(23)).foreach({"$_".Insert(0,'0.2.')})
$pythonVersions = "3.7","3.8","3.9","3.10","3.11"
$supportedSystems = 'linux_x86_64','win_amd64','macosx_11_0_x86_64','macosx_12_0_x86_64','macosx_13_0_x86_64','macosx_14_0_x86_64','macosx_11_0_arm64','macosx_12_0_arm64','macosx_13_0_arm64','macosx_14_0_arm64','macosx_11_0_aarch64','macosx_12_0_aarch64','macosx_13_0_aarch64','macosx_14_0_aarch64'
$wheelSource = 'https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download'
Expand Down Expand Up @@ -41,6 +41,8 @@ Foreach ($avxVersion in $avxVersions)
ForEach ($supportedSystem in $supportedSystems)
{
$doMacos = $avxVersion -eq 'basic' -and $cudaVersion -eq 'cpu' -and $supportedSystem.contains('macosx') -and (($packageVersion -eq '0.1.85' -and !$supportedSystem.contains('macosx_14_0')) -or [version]$packageVersion -gt [version]'0.2.4')
if ([version]$packageVersion -gt '0.2.20' -and $supportedSystem.contains('macosx_11_0')) {$doMacos = $false}
if ($cudaVersion.StartsWith('rocm') -and [version]$packageVersion -gt [version]"0.2.21" -and $supportedSystem -eq 'win_amd64') {continue}
if ($cudaVersion.StartsWith('rocm') -and $cudaVersion.Split('rocm')[-1] -ne '5.5.1' -and $supportedSystem -eq 'win_amd64') {continue}
if ($cudaVersion.StartsWith('rocm') -and $cudaVersion.Split('rocm')[-1] -eq '5.5.1' -and $supportedSystem -eq 'linux_x86_64') {continue}
if ([version]$packageVersion -gt [version]"0.1.85" -and $supportedSystem -eq 'linux_x86_64') {$supportedSystem = 'manylinux_2_31_x86_64'}
Expand Down
3 changes: 2 additions & 1 deletion generate-textgen-html.ps1
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ $destinationDir = if (Test-Path $(Join-Path $(Resolve-Path '.') 'index')) {Join-
$destinationDir = if (Test-Path $(Join-Path $destinationDir 'textgen')) {Join-Path $destinationDir 'textgen'} else {(New-Item $(Join-Path $destinationDir 'textgen') -ItemType 'Directory').fullname}
$avxVersions = "AVX","AVX2","basic"
$cudaVersions = "11.7","11.8","12.0","12.1","12.2","rocm5.4.2","rocm5.5","rocm5.5.1","rocm5.6.1"
$packageVersions = (73..74+76..85).foreach({"$_".Insert(0,'0.1.')}) + (0..11+14..20).foreach({"$_".Insert(0,'0.2.')})
$packageVersions = (73..74+76..85).foreach({"$_".Insert(0,'0.1.')}) + (0..11+14..20+@(23)).foreach({"$_".Insert(0,'0.2.')})
$pythonVersions = "3.8","3.9","3.10","3.11"
$supportedSystems = 'linux_x86_64','win_amd64'
$wheelSource = 'https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download'
Expand Down Expand Up @@ -37,6 +37,7 @@ Foreach ($avxVersion in $avxVersions)
$pyVer = $pythonVersion.replace('.','')
ForEach ($supportedSystem in $supportedSystems)
{
if ($cudaVersion.StartsWith('rocm') -and [version]$packageVersion -gt [version]"0.2.21" -and $supportedSystem -eq 'win_amd64') {continue}
if ($cudaVersion.StartsWith('rocm') -and $cudaVersion.Split('rocm')[-1] -ne '5.5.1' -and $supportedSystem -eq 'win_amd64') {continue}
if ($cudaVersion.StartsWith('rocm') -and $cudaVersion.Split('rocm')[-1] -eq '5.5.1' -and $supportedSystem -eq 'linux_x86_64') {continue}
if ([version]$packageVersion -gt [version]"0.1.85" -and $supportedSystem -eq 'linux_x86_64') {$supportedSystem = 'manylinux_2_31_x86_64'}
Expand Down
9 changes: 9 additions & 0 deletions index/AVX/cpu/llama-cpp-python/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -336,5 +336,14 @@
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.20+cpuavx-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.20+cpuavx-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.20+cpuavx-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.20+cpuavx-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.20+cpuavx-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.20+cpuavx-cp311-cp311-win_amd64.whl</a><br/>

<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp38-cp38-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cpuavx-cp38-cp38-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp38-cp38-win_amd64.whl">llama_cpp_python-0.2.23+cpuavx-cp38-cp38-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp39-cp39-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cpuavx-cp39-cp39-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp39-cp39-win_amd64.whl">llama_cpp_python-0.2.23+cpuavx-cp39-cp39-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp310-cp310-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cpuavx-cp310-cp310-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.23+cpuavx-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cpuavx-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.23+cpuavx-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.23+cpuavx-cp311-cp311-win_amd64.whl</a><br/>
</body>
</html>
9 changes: 9 additions & 0 deletions index/AVX/cu116/llama-cpp-python/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -391,5 +391,14 @@
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu116-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.20+cu116-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu116-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.20+cu116-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu116-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.20+cu116-cp311-cp311-win_amd64.whl</a><br/>

<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu116-cp38-cp38-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu116-cp38-cp38-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu116-cp38-cp38-win_amd64.whl">llama_cpp_python-0.2.23+cu116-cp38-cp38-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu116-cp39-cp39-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu116-cp39-cp39-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu116-cp39-cp39-win_amd64.whl">llama_cpp_python-0.2.23+cu116-cp39-cp39-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu116-cp310-cp310-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu116-cp310-cp310-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu116-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.23+cu116-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu116-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu116-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu116-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.23+cu116-cp311-cp311-win_amd64.whl</a><br/>
</body>
</html>
9 changes: 9 additions & 0 deletions index/AVX/cu117/llama-cpp-python/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -391,5 +391,14 @@
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu117-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.20+cu117-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu117-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.20+cu117-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu117-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.20+cu117-cp311-cp311-win_amd64.whl</a><br/>

<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu117-cp38-cp38-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu117-cp38-cp38-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu117-cp38-cp38-win_amd64.whl">llama_cpp_python-0.2.23+cu117-cp38-cp38-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu117-cp39-cp39-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu117-cp39-cp39-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu117-cp39-cp39-win_amd64.whl">llama_cpp_python-0.2.23+cu117-cp39-cp39-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu117-cp310-cp310-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu117-cp310-cp310-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu117-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.23+cu117-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu117-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu117-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu117-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.23+cu117-cp311-cp311-win_amd64.whl</a><br/>
</body>
</html>
9 changes: 9 additions & 0 deletions index/AVX/cu118/llama-cpp-python/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -391,5 +391,14 @@
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu118-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.20+cu118-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu118-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.20+cu118-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu118-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.20+cu118-cp311-cp311-win_amd64.whl</a><br/>

<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu118-cp38-cp38-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu118-cp38-cp38-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu118-cp38-cp38-win_amd64.whl">llama_cpp_python-0.2.23+cu118-cp38-cp38-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu118-cp39-cp39-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu118-cp39-cp39-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu118-cp39-cp39-win_amd64.whl">llama_cpp_python-0.2.23+cu118-cp39-cp39-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu118-cp310-cp310-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu118-cp310-cp310-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu118-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.23+cu118-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu118-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu118-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu118-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.23+cu118-cp311-cp311-win_amd64.whl</a><br/>
</body>
</html>
9 changes: 9 additions & 0 deletions index/AVX/cu120/llama-cpp-python/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -391,5 +391,14 @@
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu120-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.20+cu120-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu120-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.20+cu120-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu120-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.20+cu120-cp311-cp311-win_amd64.whl</a><br/>

<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu120-cp38-cp38-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu120-cp38-cp38-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu120-cp38-cp38-win_amd64.whl">llama_cpp_python-0.2.23+cu120-cp38-cp38-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu120-cp39-cp39-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu120-cp39-cp39-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu120-cp39-cp39-win_amd64.whl">llama_cpp_python-0.2.23+cu120-cp39-cp39-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu120-cp310-cp310-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu120-cp310-cp310-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu120-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.23+cu120-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu120-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu120-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu120-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.23+cu120-cp311-cp311-win_amd64.whl</a><br/>
</body>
</html>
9 changes: 9 additions & 0 deletions index/AVX/cu121/llama-cpp-python/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -391,5 +391,14 @@
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu121-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.20+cu121-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu121-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.20+cu121-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.20+cu121-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.20+cu121-cp311-cp311-win_amd64.whl</a><br/>

<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu121-cp38-cp38-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu121-cp38-cp38-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu121-cp38-cp38-win_amd64.whl">llama_cpp_python-0.2.23+cu121-cp38-cp38-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu121-cp39-cp39-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu121-cp39-cp39-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu121-cp39-cp39-win_amd64.whl">llama_cpp_python-0.2.23+cu121-cp39-cp39-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu121-cp310-cp310-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu121-cp310-cp310-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu121-cp310-cp310-win_amd64.whl">llama_cpp_python-0.2.23+cu121-cp310-cp310-win_amd64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu121-cp311-cp311-manylinux_2_31_x86_64.whl">llama_cpp_python-0.2.23+cu121-cp311-cp311-manylinux_2_31_x86_64.whl</a><br/>
<a href="https://github.com/jllllll/llama-cpp-python-cuBLAS-wheels/releases/download/AVX/llama_cpp_python-0.2.23+cu121-cp311-cp311-win_amd64.whl">llama_cpp_python-0.2.23+cu121-cp311-cp311-win_amd64.whl</a><br/>
</body>
</html>

0 comments on commit 97f2bad

Please sign in to comment.