blob: 00fb15bff2131764ffcf3105c9b43436114aa0ec (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
|
# Maintainer: envolution
# Contributor: Wu Zhenyu <wuzhenyu@ustc.edu>
# shellcheck shell=bash disable=SC2034,SC2154
_pkgname=llama-cpp-python
pkgname=python-llama-cpp-cuda
pkgver=0.3.7
pkgrel=1
pkgdesc="Python bindings for llama.cpp"
arch=(any)
provides=(python-llama-cpp)
conflicts=(python-llama-cpp)
url=https://github.com/abetlen/llama-cpp-python
license=(GPL-3.0-or-later)
depends=(
python-typing_extensions
python-numpy
python-diskcache
cuda
)
optdepends=(
uvicorn # python-uvicorn
python-fastapi
python-pydantic-settings
python-sse-starlette
python-pyaml
)
makedepends=(
python-scikit-build
python-installer
python-build
python-wheel
python-scikit-build-core
gcc13
)
checkdepends=(
python-pytest
python-huggingface-hub
python-scipy
python-httpx
python-fastapi
python-sse-starlette
#python-sse-starlette-context #missing but tests don't seem to require it
python-pydantic-settings
)
source=("https://files.pythonhosted.org/packages/source/${_pkgname:0:1}/$_pkgname/${_pkgname//-/_}-$pkgver.tar.gz")
sha256sums=('0566a0dcc0f38005c4093309a87f67c2452449522e3e17e15cd735a62957894c')
_srcdir="llama_cpp_python-${pkgver}"
build() {
cd $_srcdir
PREFIX=/usr
CMAKE_ARGS="-DGGML_CUDA=on -DCUDAToolkit_ROOT=/opt/cuda -DCMAKE_CUDA_COMPILER=/opt/cuda/bin/nvcc" \
NVCC_CCBIN='gcc-13' \
python -m build --wheel --no-isolation
}
check() {
cd $_srcdir
python -m pytest
}
package() {
local _sys_site=$(python -c "import sysconfig; print(sysconfig.get_paths()['purelib'])")
cd $_srcdir
python -m installer --destdir="$pkgdir" dist/*.whl
rm -r "$pkgdir/$_sys_site"/{lib,bin,include}
}
# vim:set ts=2 sw=2 et:
|