summarylogtreecommitdiffstats
diff options
context:
space:
mode:
authorAntonio Rojas2024-01-06 09:19:26 +0100
committerAntonio Rojas2024-01-06 09:19:26 +0100
commit9033a6b5b00afa629191fc52f0a3bbd3c73364a1 (patch)
tree5678a3242457369940602035a13acece7df4622c
parentd98c68a3f95103676dda4ec2ee0ed6dbff725914 (diff)
downloadaur-9033a6b5b00afa629191fc52f0a3bbd3c73364a1.tar.gz
upgpkg: 0.1.18-1: Update to 0.1.18
-rw-r--r--.SRCINFO10
-rw-r--r--PKGBUILD23
2 files changed, 13 insertions, 20 deletions
diff --git a/.SRCINFO b/.SRCINFO
index b85d156491af..bef962381b59 100644
--- a/.SRCINFO
+++ b/.SRCINFO
@@ -1,7 +1,7 @@
pkgbase = ollama-cuda
pkgdesc = Create, run and share large language models (LLMs) with CUDA
- pkgver = 0.1.17
- pkgrel = 2
+ pkgver = 0.1.18
+ pkgrel = 1
url = https://github.com/jmorganca/ollama
arch = x86_64
license = MIT
@@ -11,15 +11,13 @@ pkgbase = ollama-cuda
makedepends = go
provides = ollama
conflicts = ollama
- source = git+https://github.com/jmorganca/ollama#commit=6b5bdfa6c9321405174ad443f21c2e41db36a867
- source = ggml::git+https://github.com/ggerganov/llama.cpp#commit=9e232f0234073358e7031c1b8d7aa45020469a3b
- source = gguf::git+https://github.com/ggerganov/llama.cpp#commit=a7aee47b98e45539d491071b25778b833b77e387
+ source = git+https://github.com/jmorganca/ollama#tag=v0.1.18
+ source = llamacpp::git+https://github.com/ggerganov/llama.cpp#commit=328b83de23b33240e28f4e74900d1d06726f5eb1
source = sysusers.conf
source = tmpfiles.d
source = ollama.service
b2sums = SKIP
b2sums = SKIP
- b2sums = SKIP
b2sums = 3aabf135c4f18e1ad745ae8800db782b25b15305dfeaaa031b4501408ab7e7d01f66e8ebb5be59fc813cfbff6788d08d2e48dcf24ecc480a40ec9db8dbce9fec
b2sums = c890a741958d31375ebbd60eeeb29eff965a6e1e69f15eb17ea7d15b575a4abee176b7d407b3e1764aa7436862a764a05ad04bb9901a739ffd81968c09046bb6
b2sums = a773bbf16cf5ccc2ee505ad77c3f9275346ddf412be283cfeaee7c2e4c41b8637a31aaff8766ed769524ebddc0c03cf924724452639b62208e578d98b9176124
diff --git a/PKGBUILD b/PKGBUILD
index 8cceaec43960..8bac672234df 100644
--- a/PKGBUILD
+++ b/PKGBUILD
@@ -3,28 +3,24 @@
pkgname=ollama-cuda
pkgdesc='Create, run and share large language models (LLMs) with CUDA'
-pkgver=0.1.17
-pkgrel=2
+pkgver=0.1.18
+pkgrel=1
arch=(x86_64)
url='https://github.com/jmorganca/ollama'
license=(MIT)
-_ollamacommit=6b5bdfa6c9321405174ad443f21c2e41db36a867 # tag: v0.1.17
# The git submodule commit hashes are here:
-# https://github.com/jmorganca/ollama/tree/v0.1.17/llm/llama.cpp
-_ggmlcommit=9e232f0234073358e7031c1b8d7aa45020469a3b
-_ggufcommit=a7aee47b98e45539d491071b25778b833b77e387
+# https://github.com/jmorganca/ollama/tree/v$pkgver
+_llamacppcommit=328b83de23b33240e28f4e74900d1d06726f5eb1
makedepends=(cmake cuda git go)
provides=(ollama)
conflicts=(ollama)
-source=(git+$url#commit=$_ollamacommit
- ggml::git+https://github.com/ggerganov/llama.cpp#commit=$_ggmlcommit
- gguf::git+https://github.com/ggerganov/llama.cpp#commit=$_ggufcommit
+source=(git+$url#tag=v$pkgver
+ llamacpp::git+https://github.com/ggerganov/llama.cpp#commit=$_llamacppcommit
sysusers.conf
tmpfiles.d
ollama.service)
b2sums=('SKIP'
'SKIP'
- 'SKIP'
'3aabf135c4f18e1ad745ae8800db782b25b15305dfeaaa031b4501408ab7e7d01f66e8ebb5be59fc813cfbff6788d08d2e48dcf24ecc480a40ec9db8dbce9fec'
'c890a741958d31375ebbd60eeeb29eff965a6e1e69f15eb17ea7d15b575a4abee176b7d407b3e1764aa7436862a764a05ad04bb9901a739ffd81968c09046bb6'
'a773bbf16cf5ccc2ee505ad77c3f9275346ddf412be283cfeaee7c2e4c41b8637a31aaff8766ed769524ebddc0c03cf924724452639b62208e578d98b9176124')
@@ -32,14 +28,13 @@ b2sums=('SKIP'
prepare() {
cd ${pkgname/-cuda}
- rm -frv llm/llama.cpp/gg{ml,uf}
+ rm -frv llm/llama.cpp
# Copy git submodule files instead of symlinking because the build process is sensitive to symlinks.
- cp -r "$srcdir/ggml" llm/llama.cpp/ggml
- cp -r "$srcdir/gguf" llm/llama.cpp/gguf
+ cp -r "$srcdir/llamacpp" llm/llama.cpp
# Do not git clone when "go generate" is being run.
- sed -i 's,git submodule,true,g' llm/llama.cpp/generate_linux.go
+ sed -i 's,git submodule,true,g' llm/generate/gen_common.sh
# Set build mode to release
sed -i '33s/DebugMode/ReleaseMode/;45s/DebugMode/ReleaseMode/' "$srcdir/ollama/server/routes.go"