summarylogtreecommitdiffstats
path: root/PKGBUILD
diff options
context:
space:
mode:
authorLukas Zimmermann2020-05-02 12:42:05 +0200
committerLukas Zimmermann2020-05-02 12:42:05 +0200
commit8f2b682295f333b545fa7eca5996c78fd3c25089 (patch)
tree89e6fc47428d8ae58bddefbe1e73b729cb08d443 /PKGBUILD
parenta652aeafc0e431e0a8f4fd94b6684e87b6a563ca (diff)
downloadaur-8f2b682295f333b545fa7eca5996c78fd3c25089.tar.gz
Update to 2.4.5
Diffstat (limited to 'PKGBUILD')
-rw-r--r--PKGBUILD62
1 files changed, 29 insertions, 33 deletions
diff --git a/PKGBUILD b/PKGBUILD
index 54ec365c45bc..73f3c43d8364 100644
--- a/PKGBUILD
+++ b/PKGBUILD
@@ -1,9 +1,10 @@
-# Maintainer: François Garillot ("huitseeker") <francois [at] garillot.net>
+# Maintainer: Lukas Zimmermann ("lukaszimmermann") <luk [dot] zim91 [at] gmail.com>
+# Contributor: François Garillot ("huitseeker") <francois [at] garillot.net>
# Contributor: Christian Krause ("wookietreiber") <kizkizzbangbang@gmail.com>
# Contributor: Emanuel Fontelles ("emanuelfontelles") <emanuelfontelles@hotmail.com>
pkgname=apache-spark
-pkgver=2.4.4
+pkgver=2.4.5
pkgrel=1
pkgdesc="fast and general engine for large-scale data processing"
arch=('any')
@@ -26,52 +27,47 @@ source=("https://archive.apache.org/dist/spark/spark-${pkgver}/spark-${pkgver}-b
'spark-daemon-run.sh'
'run-master.sh'
'run-slave.sh')
-sha1sums=('53f99ba8c5a68c941dd17d45393a6040dd0b46c8'
- 'ac71d12070a9a10323e8ec5aed4346b1dd7f21c6'
- 'a191e4f8f7f8bbc596f4fadfb3c592c3efbc4fc0'
- '3fa39d55075d4728bd447692d648053c9f6b07ec'
- '08557d2d5328d5c99e533e16366fd893fffaad78'
- '323445b8d64aea0534a2213d2600d438f406855b'
- '65b1bc5fce63d1fa7a1b90f2d54a09acf62012a4')
-backup=('etc/apache-spark/spark-env.sh')
-PKGEXT=${PKGEXT:-'.pkg.tar.xz'}
+sha256sums=('020be52524e4df366eb974d41a6e18fcb6efcaba9a51632169e917c74267dd81'
+ 'e4333e4a484543786e82d1f7af144ad99d9cc666d2e2742f30b5120996f32183'
+ 'e656ef5599d095472641e6fd58e9046d7db3f983b334e9a695e278ba799afc5b'
+ '0e9c3721cfac02e3b67248e1dfe2def49a9fc6408092b291b1c67e89a9c130cb'
+ '6ded9f6b31b8be5fa782fc18ec0991eb7cb18fbea65b8e7560587c3fbe7f20f7'
+ '1134342330c7680e7d9847cc4de2c0f97ecd55ee7db1c6068bc45219b5838e98'
+ '4e1159b25d0f6f3dcdf72d1a5f186e5693a5e9d8690ad1600a9e44aa43022e29')
-prepare() {
- cd "$srcdir/spark-${pkgver}-bin-hadoop2.7"
-}
+backup=('etc/apache-spark/spark-env.sh')
package() {
cd "$srcdir/spark-${pkgver}-bin-hadoop2.7"
- install -d "$pkgdir/usr/bin" "$pkgdir/opt" "$pkgdir/var/log/apache-spark" "$pkgdir/var/lib/apache-spark/work"
- chmod 2775 "$pkgdir/var/log/apache-spark" "$pkgdir/var/lib/apache-spark/work"
+ install -d "${pkgdir}/usr/bin" "${pkgdir}/opt" "${pkgdir}/var/log/apache-spark" "${pkgdir}/var/lib/apache-spark/work"
+ chmod 2775 "${pkgdir}/var/log/apache-spark" "${pkgdir}/var/lib/apache-spark/work"
- cp -r "$srcdir/spark-${pkgver}-bin-hadoop2.7" "$pkgdir/opt/apache-spark/"
+ cp -r "${srcdir}/spark-${pkgver}-bin-hadoop2.7" "${pkgdir}/opt/apache-spark/"
- cd "$pkgdir/usr/bin"
+ cd "${pkgdir}/usr/bin"
for binary in beeline pyspark sparkR spark-class spark-shell find-spark-home spark-sql spark-submit load-spark-env.sh; do
- binpath="/opt/apache-spark/bin/$binary"
- ln -s "$binpath" $binary
- sed -i 's|^export SPARK_HOME=.*$|export SPARK_HOME=/opt/apache-spark|' "$pkgdir/$binpath"
- sed -i -Ee 's/\$\(dirname "\$0"\)/$(dirname "$(readlink -f "$0")")/g' "$pkgdir/$binpath"
+ local binpath="/opt/apache-spark/bin/${binary}"
+ ln -s "${binpath}" ${binary}
+ sed -i 's|^export SPARK_HOME=.*$|export SPARK_HOME=/opt/apache-spark|' "${pkgdir}/${binpath}"
+ sed -i -Ee 's/\$\(dirname "\$0"\)/$(dirname "$(readlink -f "$0")")/g' "${pkgdir}/${binpath}"
done
- mkdir -p $pkgdir/etc/profile.d
- echo '#!/bin/sh' > $pkgdir/etc/profile.d/apache-spark.sh
- echo 'SPARK_HOME=/opt/apache-spark' >> $pkgdir/etc/profile.d/apache-spark.sh
- echo 'export SPARK_HOME' >> $pkgdir/etc/profile.d/apache-spark.sh
- chmod 755 $pkgdir/etc/profile.d/apache-spark.sh
+ mkdir -p ${pkgdir}/etc/profile.d
+ echo '#!/bin/sh' > ${pkgdir}/etc/profile.d/apache-spark.sh
+ echo 'export SPARK_HOME=/opt/apache-spark' >> ${pkgdir}/etc/profile.d/apache-spark.sh
+ chmod 755 ${pkgdir}/etc/profile.d/apache-spark.sh
- install -Dm644 "$srcdir/apache-spark-master.service" "$pkgdir/usr/lib/systemd/system/apache-spark-master.service"
- install -Dm644 "$srcdir/apache-spark-slave@.service" "$pkgdir/usr/lib/systemd/system/apache-spark-slave@.service"
- install -Dm644 "$srcdir/spark-env.sh" "$pkgdir/etc/apache-spark/spark-env.sh"
+ install -Dm644 "${srcdir}/apache-spark-master.service" "${pkgdir}/usr/lib/systemd/system/apache-spark-master.service"
+ install -Dm644 "${srcdir}/apache-spark-slave@.service" "${pkgdir}/usr/lib/systemd/system/apache-spark-slave@.service"
+ install -Dm644 "${srcdir}/spark-env.sh" "${pkgdir}/etc/apache-spark/spark-env.sh"
for script in run-master.sh run-slave.sh spark-daemon-run.sh; do
- install -Dm755 "$srcdir/$script" "$pkgdir/opt/apache-spark/sbin/$script"
+ install -Dm755 "${srcdir}/${script}" "${pkgdir}/opt/apache-spark/sbin/${script}"
done
- install -Dm644 "$srcdir/spark-${pkgver}-bin-hadoop2.7/conf"/* "$pkgdir/etc/apache-spark"
+ install -Dm644 "${srcdir}/spark-${pkgver}-bin-hadoop2.7/conf"/* "${pkgdir}/etc/apache-spark"
- cd "$pkgdir/opt/apache-spark"
+ cd "${pkgdir}/opt/apache-spark"
mv conf conf-templates
ln -sf "/etc/apache-spark" conf
ln -sf "/var/lib/apache-spark/work" .