blob: b7d1018ee2c762c167d2ed63b0de35f68a1ebc8d (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
|
# Maintainer: wuxxin <wuxxin@gmail.com>
_ENABLE_CUDA=0
_ENABLE_ROCM=0
_GO_TAGS=""
# _GO_TAGS="tts stablediffusion"
_OPTIONAL_BACKENDS=""
if test "$(echo "$_GO_TAGS" | grep -o "tts")" = "tts"; then
_OPTIONAL_BACKENDS="backend-assets/grpc/piper $_OPTIONAL_BACKENDS"
fi
if test "$(echo "$_GO_TAGS" | grep -o "stablediffusion")" = "stablediffusion"; then
_OPTIONAL_BACKENDS="backend-assets/grpc/stablediffusion $_OPTIONAL_BACKENDS"
fi
_GRPC_BACKENDS="backend-assets/grpc/bert-embeddings backend-assets/grpc/llama-cpp backend-assets/grpc/whisper $_OPTIONAL_BACKENDS"
# backend-assets/grpc/rwkv
if test -n "$GPU_TARGETS"; then _AMDGPU_TARGETS="$GPU_TARGETS"; fi
_AMDGPU_TARGETS="${AMDGPU_TARGETS:-gfx900;gfx906;gfx908;gfx90a;gfx1030;gfx1100;gfx1101;gfx1102}"
_pkgname="localai"
pkgbase="${_pkgname}-git"
pkgname=("${pkgbase}")
pkgver=v1.40.0.60.gfd1b7b3
pkgrel=3
pkgdesc="The free, Open Source OpenAI alternative. Self-hosted, community-driven and local-first."
url="https://github.com/mudler/LocalAI"
license=('MIT')
arch=('x86_64')
provides=('localai')
conflicts=('localai')
depends=(
'ffmpeg'
'opencv'
'blas-openblas'
)
makedepends=(
'go'
'git'
'cmake'
)
if test "$(echo "$_GO_TAGS" | grep -o "tts")" = "tts"; then
depends+=(
'onnxruntime'
'libucd-git'
)
fi
if [[ $_ENABLE_CUDA = 1 ]]; then
pkgname+=("${pkgbase}-cuda")
makedepends+=(
'cuda'
'cudnn'
'nccl'
)
fi
if [[ $_ENABLE_ROCM = 1 ]]; then
pkgname+=("${pkgbase}-rocm")
makedepends+=(
'rocm-hip-sdk'
'miopen-hip'
'rccl'
)
fi
source=(
"${_pkgname}"::"git+https://github.com/mudler/LocalAI"
"whisper-1.5.1.patch"
)
sha256sums=(
'SKIP'
'SKIP'
)
prepare() {
cd "${srcdir}/${_pkgname}"
# update whisper and add gpu support
patch -Np1 -i "${srcdir}/whisper-1.5.1.patch"
# move backend_data to /usr/share
sed -ri "s#/tmp/localai/backend_data#/usr/share/${_pkgname}#g" main.go
# remove sources for not build backends: go-llama go-llama-ggml go-ggml-transformers gpt4all
_SOURCES="go-piper go-rwkv whisper.cpp go-bert go-stable-diffusion"
_SOURCES_PATHS="$(echo "$_SOURCES" | tr " " "\n" | sed -r "s#(.+)#sources/\1#" | tr "\n" " ")"
sed -ri "s#get-sources: .*#get-sources: backend/cpp/llama/llama.cpp $_SOURCES_PATHS#g" Makefile
sed -ri 's#.+\-replace github.com/nomic-ai/gpt4all/gpt4all.+##g' Makefile
sed -ri 's#.+\-replace github.com/go-skynet/go-ggml-transformers.cpp.+##g' Makefile
mkdir -p "sources"
make $_SOURCES_PATHS
# clone for different build types
cd "${srcdir}"
for n in "${_pkgname}-cpu" "${_pkgname}-cuda" "${_pkgname}-rocm"; do
if test -d "$n"; then rm -rf "$n"; fi
cp -r "${_pkgname}" "$n"
done
}
pkgver() {
cd "${srcdir}/../${_pkgname}"
echo "$(git describe --always --tags | tr "-" ".")"
}
build() {
cd "${srcdir}/${_pkgname}-cpu"
make BUILD_TYPE="openblas" GRPC_BACKENDS="$_GRPC_BACKENDS" GO_TAGS="$_GO_TAGS" build
if [[ $_ENABLE_ROCM = 1 ]]; then
cd "${srcdir}/${_pkgname}-rocm"
AMDGPU_TARGETS="$_AMDGPU_TARGETS" GPU_TARGETS="$_AMDGPU_TARGETS" \
make BUILD_TYPE="hipblas" GRPC_BACKENDS="$_GRPC_BACKENDS" GO_TAGS="$_GO_TAGS" build
fi
if [[ $_ENABLE_CUDA = 1 ]]; then
cd "${srcdir}/${_pkgname}-cuda"
make BUILD_TYPE="cublas" GRPC_BACKENDS="$_GRPC_BACKENDS" GO_TAGS="$_GO_TAGS" build
fi
}
package_localai-git() {
cd "${srcdir}/${_pkgname}-cpu"
install -Dm755 "local-ai" "${pkgdir}/usr/bin/local-ai"
# install -D backend-assets/espeak-ng-data -t "${pkgdir}/usr/share/${_pkgname}"
install -Dm644 README.md -t "${pkgdir}/usr/share/doc/${_pkgname}"
}
package_localai-git-rocm() {
cd "${srcdir}/${_pkgname}-rocm"
pkgdesc+=' (with ROCM support)'
depends+=('rocm-hip-runtime')
install -Dm755 "local-ai" "${pkgdir}/usr/bin/local-ai"
# install -D backend-assets/espeak-ng-data -t "${pkgdir}/usr/share/${_pkgname}"
install -Dm644 README.md -t "${pkgdir}/usr/share/doc/${_pkgname}"
}
package_localai-git-cuda() {
cd "${srcdir}/${_pkgname}-cuda"
pkgdesc+=' (with CUDA support)'
depends+=('cuda')
install -Dm755 "local-ai" "${pkgdir}/usr/bin/local-ai"
# install -D backend-assets/espeak-ng-data -t "${pkgdir}/usr/share/${_pkgname}"
install -Dm644 README.md -t "${pkgdir}/usr/share/doc/${_pkgname}"
}
|