blob: 33db6b393aa12f212b840b56dfc1090f8a489d85 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
|
# Maintainer: Darius Niminenn <root at dnim dot dev>
# Contributor: Sir-Photch <sir-photch@posteo.me>
pkgname=litellm
pkgver=1.31.8
pkgrel=1
pkgdesc='Call all LLM APIs using the OpenAI format'
arch=(any)
url='https://pypi.org/project/litellm/'
license=(MIT)
depends=(
'python'
'python-openai'
'python-dotenv'
'python-tiktoken'
'python-argon2_cffi'
'python-importlib-metadata'
'python-tokenizers'
'python-click'
'python-jinja'
'python-aiohttp'
'python-python-multipart'
'python-requests'
'python-setuptools'
)
makedepends=(
'python-build'
'python-installer'
'python-poetry-core'
'python-wheel'
)
optdepends=(
'uvicorn: ASGI server for asyncio'
'gunicorn: WSGI HTTP Server'
'python-fastapi: Framework for building APIs'
'python-backoff: Backoff strategies for retrying operations'
'python-yaml: YAML parser and emitter'
'python-rq: Simple job queues for Python'
'python-orjson: Fast JSON parser and serializer'
'python-apscheduler: Task scheduler'
'python-streamlit: App framework for Machine Learning and Data Science'
'ollama: Serve local ollama models'
)
source=(
"https://files.pythonhosted.org/packages/source/l/$pkgname/$pkgname-$pkgver.tar.gz"
)
sha256sums=('c7a066bf607a9d0da065002d1c97e0acb15b7ac10a70f028f7acabf35fbbc207')
prepare() {
cd "$pkgname-$pkgver"
rm -rf dist
}
build() {
cd "$pkgname-$pkgver"
python -m build --wheel --no-isolation
}
package() {
cd "$pkgname-$pkgver"
python -m installer --destdir="$pkgdir" dist/*.whl
# wrapper script for server
cat << EOF > "$pkgdir/usr/bin/$pkgname"
#!/usr/bin/env python3
import litellm
litellm.run_server()
EOF
chmod +x "$pkgdir/usr/bin/$pkgname"
}
|