summarylogtreecommitdiffstats
diff options
context:
space:
mode:
authorSir-Photch2023-10-08 18:15:44 +0200
committerSir-Photch2023-10-08 18:15:44 +0200
commitb7800278fa712a6f510f07beb25d03eb8d4cd00c (patch)
tree6482da23532dc29dbb8c9fb9783b8df6c2d6f00d
downloadaur-b7800278fa712a6f510f07beb25d03eb8d4cd00c.tar.gz
ready, set, go!
-rw-r--r--.SRCINFO22
-rw-r--r--PKGBUILD40
-rw-r--r--litellm-ollama.env19
-rw-r--r--litellm-ollama.install16
-rw-r--r--litellm-ollama@.service37
-rw-r--r--sysusers-litellm-ollama.conf1
-rw-r--r--tmpfiles-litellm-ollama.conf2
7 files changed, 137 insertions, 0 deletions
diff --git a/.SRCINFO b/.SRCINFO
new file mode 100644
index 000000000000..5e11ed8c6095
--- /dev/null
+++ b/.SRCINFO
@@ -0,0 +1,22 @@
+pkgbase = litellm-ollama
+ pkgdesc = Setup service to run ollama models via litellm
+ pkgver = 0.1.0
+ pkgrel = 1
+ install = litellm-ollama.install
+ arch = any
+ license = MIT
+ depends = litellm
+ depends = ollama
+ backup = etc/litellm-ollama/litellm-ollama.env
+ source = litellm-ollama@.service
+ source = litellm-ollama.install
+ source = sysusers-litellm-ollama.conf
+ source = tmpfiles-litellm-ollama.conf
+ source = litellm-ollama.env
+ sha512sums = 33ebaba40507448641bf42b483a3b15706ded6b111753a40be67f51fef52dd568f8fa50dd6c7e5bffbf53ede28cce648b9b99df993572aeee3b159de897e888e
+ sha512sums = b9493c66cb699af763eb828fe54ed974d8bdc3e1fb5fd5aabb2bc0040f317088f28661b7964d23f3495fee6afbcf093334cb24cfa20d831ebf3bacb72c6e58c3
+ sha512sums = ff9f5761112a6bc9a588588fa13ce552dd0d0f1e36873014a3b6aa07938caf6e61b9052b5aa4ce3f54239d6a53f42e26fb3e980250a08db09063be70d68070cd
+ sha512sums = d4c3034ea25e2776f4b9072f3f55b8b6dad4a8c31a748d4661fc1f1d87b77d416ea52b1bba4690a81c2c16034c87dacd13e49bc7323eba989b67d096fdc473e2
+ sha512sums = 0d4ab88f4faa0283812f3532e95337e4cb12155001c55043fa912d137156c59f9c2ddc5c86b5105398426817bee6d8a283957767d2968dc02663cd1ee8ea3762
+
+pkgname = litellm-ollama
diff --git a/PKGBUILD b/PKGBUILD
new file mode 100644
index 000000000000..8b38f744a8f2
--- /dev/null
+++ b/PKGBUILD
@@ -0,0 +1,40 @@
+# Maintainer: Sir-Photch <sir-photch@posteo.me>
+
+pkgname=litellm-ollama
+pkgver=0.1.0
+pkgrel=1
+pkgdesc='Setup service to run ollama models via litellm'
+arch=(any)
+license=(MIT)
+depends=(
+ litellm
+ ollama
+)
+backup=(etc/litellm-ollama/litellm-ollama.env)
+install=litellm-ollama.install
+source=(
+ 'litellm-ollama@.service'
+ 'litellm-ollama.install'
+ 'sysusers-litellm-ollama.conf'
+ 'tmpfiles-litellm-ollama.conf'
+ 'litellm-ollama.env'
+)
+
+sha512sums=('33ebaba40507448641bf42b483a3b15706ded6b111753a40be67f51fef52dd568f8fa50dd6c7e5bffbf53ede28cce648b9b99df993572aeee3b159de897e888e'
+ 'b9493c66cb699af763eb828fe54ed974d8bdc3e1fb5fd5aabb2bc0040f317088f28661b7964d23f3495fee6afbcf093334cb24cfa20d831ebf3bacb72c6e58c3'
+ 'ff9f5761112a6bc9a588588fa13ce552dd0d0f1e36873014a3b6aa07938caf6e61b9052b5aa4ce3f54239d6a53f42e26fb3e980250a08db09063be70d68070cd'
+ 'd4c3034ea25e2776f4b9072f3f55b8b6dad4a8c31a748d4661fc1f1d87b77d416ea52b1bba4690a81c2c16034c87dacd13e49bc7323eba989b67d096fdc473e2'
+ '0d4ab88f4faa0283812f3532e95337e4cb12155001c55043fa912d137156c59f9c2ddc5c86b5105398426817bee6d8a283957767d2968dc02663cd1ee8ea3762')
+
+package() {
+ cd "$srcdir"
+
+ install -Dm644 "$pkgname.env" "$pkgdir/etc/$pkgname/$pkgname.env"
+
+ mkdir -p "$pkgdir/var/lib/$pkgname/.config/litellm"
+ ln -s "/etc/$pkgname/$pkgname.env" "$pkgdir/var/lib/$pkgname/.config/litellm/.env.litellm"
+
+ install -Dm644 "$pkgname@.service" "$pkgdir/usr/lib/systemd/system/$pkgname@.service"
+ install -Dm644 "sysusers-$pkgname.conf" "$pkgdir/usr/lib/sysusers.d/$pkgname.conf"
+ install -Dm644 "tmpfiles-$pkgname.conf" "$pkgdir/usr/lib/tmpfiles.d/$pkgname.conf"
+}
diff --git a/litellm-ollama.env b/litellm-ollama.env
new file mode 100644
index 000000000000..8873aa2895a4
--- /dev/null
+++ b/litellm-ollama.env
@@ -0,0 +1,19 @@
+### KEYS ###
+# HUGGINGFACE_API_KEY="" # Uncomment to save your Hugging Face API key
+# OPENAI_API_KEY="" # Uncomment to save your OpenAI API Key
+# TOGETHER_API_KEY="" # Uncomment to save your TogetherAI API key
+# NLP_CLOUD_API_KEY="" # Uncomment to save your NLP Cloud API key
+# ANTHROPIC_API_KEY="" # Uncomment to save your Anthropic API key
+
+### MODEL CUSTOM PROMPT TEMPLATE ###
+# MODEL_SYSTEM_MESSAGE_START_TOKEN = "<|prompter|>" # This does not need to be a token, can be any string
+# MODEL_SYSTEM_MESSAGE_END_TOKEN = "<|endoftext|>" # This does not need to be a token, can be any string
+
+# MODEL_USER_MESSAGE_START_TOKEN = "<|prompter|>" # This does not need to be a token, can be any string
+# MODEL_USER_MESSAGE_END_TOKEN = "<|endoftext|>" # Applies only to user messages. Can be any string.
+
+# MODEL_ASSISTANT_MESSAGE_START_TOKEN = "<|prompter|>" # Applies only to assistant messages. Can be any string.
+# MODEL_ASSISTANT_MESSAGE_END_TOKEN = "<|endoftext|>" # Applies only to system messages. Can be any string.
+
+# MODEL_PRE_PROMPT = "You are a good bot" # Applied at the start of the prompt
+# MODEL_POST_PROMPT = "Now answer as best as you can" # Applied at the end of the prompt \ No newline at end of file
diff --git a/litellm-ollama.install b/litellm-ollama.install
new file mode 100644
index 000000000000..79dec32f1199
--- /dev/null
+++ b/litellm-ollama.install
@@ -0,0 +1,16 @@
+post_install() {
+ echo "v v v v v v v v v v
+
+Example how to get started with orca-mini:
+
+> pull orca-mini
+# sudo -u litellm-ollama -- bash -c 'ollama serve & ollama pull orca-mini; kill -INT \$!'
+
+> enable service for orca-mini
+# systemctl enable --now litellm-ollama@orca-mini.service
+
+> test API
+# litellm --test
+
+^ ^ ^ ^ ^ ^ ^ ^ ^ ^"
+}
diff --git a/litellm-ollama@.service b/litellm-ollama@.service
new file mode 100644
index 000000000000..fd54396dedbe
--- /dev/null
+++ b/litellm-ollama@.service
@@ -0,0 +1,37 @@
+[Unit]
+Description=LiteLLM Ollama model server
+
+[Service]
+Type=simple
+User=litellm-ollama
+Group=litellm-ollama
+
+ExecStart=/usr/bin/litellm --model ollama/%i --api_base http://localhost:11434
+Restart=on-failure
+RestartSec=30s
+
+WorkingDirectory=~
+StateDirectory=litellm-ollama
+
+ProtectSystem=strict
+PrivateTmp=yes
+ProtectHome=yes
+ProtectKernelTunables=yes
+NoNewPrivileges=yes
+SystemCallArchitectures=native
+MemoryDenyWriteExecute=true
+PrivateDevices=yes
+ProtectControlGroups=true
+RestrictSUIDSGID=true
+RestrictRealtime=true
+LockPersonality=true
+ProtectKernelLogs=true
+ProtectHostname=true
+ProtectKernelModules=true
+PrivateUsers=true
+ProtectClock=true
+SystemCallErrorNumber=EPERM
+SystemCallFilter=@system-service
+
+[Install]
+WantedBy=multi-user.target
diff --git a/sysusers-litellm-ollama.conf b/sysusers-litellm-ollama.conf
new file mode 100644
index 000000000000..d4496af92eaf
--- /dev/null
+++ b/sysusers-litellm-ollama.conf
@@ -0,0 +1 @@
+u litellm-ollama - "LiteLLM+Ollama service user" /var/lib/litellm-ollama
diff --git a/tmpfiles-litellm-ollama.conf b/tmpfiles-litellm-ollama.conf
new file mode 100644
index 000000000000..348caab463a3
--- /dev/null
+++ b/tmpfiles-litellm-ollama.conf
@@ -0,0 +1,2 @@
+d /var/lib/litellm-ollama 0700 litellm-ollama litellm-ollama
+z /etc/litellm-ollama/litellm-ollama.env 0640 root litellm-ollama