diff options
author | William Tang | 2023-06-24 15:56:14 +0800 |
---|---|---|
committer | William Tang | 2023-06-24 20:36:03 +0800 |
commit | 8731eb573a4a10f95a6f14516b3df9a66d17e842 (patch) | |
tree | 45119f51d33df85fe02d0a58a12b4593664a0272 /PKGBUILD | |
parent | d703d192e7cb723f2c429788ddca88f216ceee4a (diff) | |
download | aur-8731eb573a4a10f95a6f14516b3df9a66d17e842.tar.gz |
v3.4.1
Diffstat (limited to 'PKGBUILD')
-rw-r--r-- | PKGBUILD | 99 |
1 files changed, 30 insertions, 69 deletions
@@ -1,88 +1,49 @@ -# Maintainer: Lukas Zimmermann ("lukaszimmermann") <luk [dot] zim91 [at] gmail.com> +# Maintainer: William Tang <galaxyking0419@gmail.com> +# Contributor: Lukas Zimmermann ("lukaszimmermann") <luk [dot] zim91 [at] gmail.com> # Contributor: François Garillot ("huitseeker") <francois [at] garillot.net> # Contributor: Christian Krause ("wookietreiber") <kizkizzbangbang@gmail.com> # Contributor: Emanuel Fontelles ("emanuelfontelles") <emanuelfontelles@hotmail.com> pkgname=apache-spark -pkgver=3.3.0 -pkgrel=0 -pkgdesc="Apache Spark is a unified analytics engine for large-scale data processing." +pkgver=3.4.1 +pkgrel=1 +pkgdesc="A unified analytics engine for large-scale data processing" arch=('any') url="http://spark.apache.org" license=('APACHE') -depends=('java-environment>=6' 'java-environment<=11') -optdepends=('python2: python2 support for pyspark' - 'ipython2: ipython2 support for pyspark' - 'python: python3 support for pyspark' - 'ipython: ipython3 support for pyspark' - 'r: support for sparkR' - 'rsync: support rsync hadoop binaries from master' - 'hadoop: support for running on YARN') +depends=('inetutils' 'java-runtime-headless<=17') -install=apache-spark.install -source=("https://archive.apache.org/dist/spark/spark-${pkgver}/spark-${pkgver}-bin-hadoop3.tgz" +source=("https://dlcdn.apache.org/spark/spark-$pkgver/spark-$pkgver-bin-hadoop3.tgz" + 'apache-spark.sh' 'apache-spark-master.service' - 'apache-spark-slave@.service' - 'spark-env.sh' - 'spark-daemon-run.sh' - 'run-master.sh' - 'run-slave.sh') - -sha256sums=('4e0846207bf10311de43451bc99309086fce7990aaf54bf3038608b1981afbe7' - 'e4333e4a484543786e82d1f7af144ad99d9cc666d2e2742f30b5120996f32183' - 'e656ef5599d095472641e6fd58e9046d7db3f983b334e9a695e278ba799afc5b' - '0e9c3721cfac02e3b67248e1dfe2def49a9fc6408092b291b1c67e89a9c130cb' - '6ded9f6b31b8be5fa782fc18ec0991eb7cb18fbea65b8e7560587c3fbe7f20f7' - '1134342330c7680e7d9847cc4de2c0f97ecd55ee7db1c6068bc45219b5838e98' - '4e1159b25d0f6f3dcdf72d1a5f186e5693a5e9d8690ad1600a9e44aa43022e29') + 'apache-spark-worker@.service') +sha256sums=('de24e511aebd95e7408c636fde12d19391f57a33730fe30735d6742180e338d4' + '0cc82baad4d878d4e2bc5864a00b99d38f2906781ea47ee6282546788e797049' + 'de54c025ca8ce34a7b4fd95ec7b8d5dec44582787a0bd8da09232f26e2182c9a' + '47e6c154daecf7631ac9a33fe53a76888070c823c4381fcbde8d98377e586505') -backup=('etc/apache-spark/spark-env.sh') +install=apache-spark.install -package() { - cd "$srcdir/spark-${pkgver}-bin-hadoop3" +prepare() { + cd spark-${pkgver}-bin-hadoop3 - install -d "${pkgdir}/usr/bin" "${pkgdir}/opt" "${pkgdir}/var/log/apache-spark" "${pkgdir}/var/lib/apache-spark/work" - chmod 2775 "${pkgdir}/var/log/apache-spark" "${pkgdir}/var/lib/apache-spark/work" + # Remove Python and R files + rm -rf python R - cp -r "${srcdir}/spark-${pkgver}-bin-hadoop3" "${pkgdir}/opt/apache-spark/" + # Remove windows batch files + rm bin/*.cmd +} - cd "${pkgdir}/usr/bin" - for binary in beeline pyspark sparkR spark-class spark-shell find-spark-home spark-sql spark-submit load-spark-env.sh; do - local binpath="/opt/apache-spark/bin/${binary}" - ln -s "${binpath}" ${binary} - sed -i 's|^export SPARK_HOME=.*$|export SPARK_HOME=/opt/apache-spark|' "${pkgdir}/${binpath}" - sed -i -Ee 's/\$\(dirname "\$0"\)/$(dirname "$(readlink -f "$0")")/g' "${pkgdir}/${binpath}" - done +package() { + # Create directory structure + mkdir -p $pkgdir/{etc/profile.d,opt,usr/lib/systemd/system} - mkdir -p ${pkgdir}/etc/profile.d - echo '#!/bin/sh' > ${pkgdir}/etc/profile.d/apache-spark.sh - echo 'export SPARK_HOME=/opt/apache-spark' >> ${pkgdir}/etc/profile.d/apache-spark.sh - chmod 755 ${pkgdir}/etc/profile.d/apache-spark.sh + # Install path profile + cp $pkgname.sh $pkgdir/etc/profile.d/ - install -Dm644 "${srcdir}/apache-spark-master.service" "${pkgdir}/usr/lib/systemd/system/apache-spark-master.service" - install -Dm644 "${srcdir}/apache-spark-slave@.service" "${pkgdir}/usr/lib/systemd/system/apache-spark-slave@.service" - install -Dm644 "${srcdir}/spark-env.sh" "${pkgdir}/etc/apache-spark/spark-env.sh" - for script in run-master.sh run-slave.sh spark-daemon-run.sh; do - install -Dm755 "${srcdir}/${script}" "${pkgdir}/opt/apache-spark/sbin/${script}" - done - install -Dm644 "${srcdir}/spark-${pkgver}-bin-hadoop3/conf"/* "${pkgdir}/etc/apache-spark" + # Install systemd services + cp $pkgname-master.service $pkgname-worker@.service $pkgdir/usr/lib/systemd/system/ - cd "${pkgdir}/opt/apache-spark" - mv conf conf-templates - ln -sf "/etc/apache-spark" conf - ln -sf "/var/lib/apache-spark/work" . + # Install program files + mv spark-${pkgver}-bin-hadoop3 $pkgdir/opt/$pkgname } -sha256sums=('a78c30450ac862338dbc77e6e97bae69569e2c30615efa082d28d47c0781afef' - 'e4333e4a484543786e82d1f7af144ad99d9cc666d2e2742f30b5120996f32183' - 'e656ef5599d095472641e6fd58e9046d7db3f983b334e9a695e278ba799afc5b' - '0e9c3721cfac02e3b67248e1dfe2def49a9fc6408092b291b1c67e89a9c130cb' - '6ded9f6b31b8be5fa782fc18ec0991eb7cb18fbea65b8e7560587c3fbe7f20f7' - '1134342330c7680e7d9847cc4de2c0f97ecd55ee7db1c6068bc45219b5838e98' - '4e1159b25d0f6f3dcdf72d1a5f186e5693a5e9d8690ad1600a9e44aa43022e29') -sha256sums=('306b550f42ce1b06772d6084c545ef8448414f2bf451e0b1175405488f2a322f' - 'e4333e4a484543786e82d1f7af144ad99d9cc666d2e2742f30b5120996f32183' - 'e656ef5599d095472641e6fd58e9046d7db3f983b334e9a695e278ba799afc5b' - '0e9c3721cfac02e3b67248e1dfe2def49a9fc6408092b291b1c67e89a9c130cb' - '6ded9f6b31b8be5fa782fc18ec0991eb7cb18fbea65b8e7560587c3fbe7f20f7' - '1134342330c7680e7d9847cc4de2c0f97ecd55ee7db1c6068bc45219b5838e98' - '4e1159b25d0f6f3dcdf72d1a5f186e5693a5e9d8690ad1600a9e44aa43022e29') |