summarylogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.SRCINFO18
-rw-r--r--PKGBUILD62
-rw-r--r--apache-spark-slave@.service2
-rw-r--r--other-pkgbuild60
4 files changed, 39 insertions, 103 deletions
diff --git a/.SRCINFO b/.SRCINFO
index 1c7a8f9d8d3e..b71b22b05db9 100644
--- a/.SRCINFO
+++ b/.SRCINFO
@@ -1,6 +1,6 @@
pkgbase = apache-spark
pkgdesc = fast and general engine for large-scale data processing
- pkgver = 2.4.4
+ pkgver = 2.4.5
pkgrel = 1
url = http://spark.apache.org
install = apache-spark.install
@@ -16,20 +16,20 @@ pkgbase = apache-spark
optdepends = rsync: support rsync hadoop binaries from master
optdepends = hadoop: support for running on YARN
backup = etc/apache-spark/spark-env.sh
- source = https://archive.apache.org/dist/spark/spark-2.4.4/spark-2.4.4-bin-hadoop2.7.tgz
+ source = https://archive.apache.org/dist/spark/spark-2.4.5/spark-2.4.5-bin-hadoop2.7.tgz
source = apache-spark-master.service
source = apache-spark-slave@.service
source = spark-env.sh
source = spark-daemon-run.sh
source = run-master.sh
source = run-slave.sh
- sha1sums = 53f99ba8c5a68c941dd17d45393a6040dd0b46c8
- sha1sums = ac71d12070a9a10323e8ec5aed4346b1dd7f21c6
- sha1sums = a191e4f8f7f8bbc596f4fadfb3c592c3efbc4fc0
- sha1sums = 3fa39d55075d4728bd447692d648053c9f6b07ec
- sha1sums = 08557d2d5328d5c99e533e16366fd893fffaad78
- sha1sums = 323445b8d64aea0534a2213d2600d438f406855b
- sha1sums = 65b1bc5fce63d1fa7a1b90f2d54a09acf62012a4
+ sha256sums = 020be52524e4df366eb974d41a6e18fcb6efcaba9a51632169e917c74267dd81
+ sha256sums = e4333e4a484543786e82d1f7af144ad99d9cc666d2e2742f30b5120996f32183
+ sha256sums = e656ef5599d095472641e6fd58e9046d7db3f983b334e9a695e278ba799afc5b
+ sha256sums = 0e9c3721cfac02e3b67248e1dfe2def49a9fc6408092b291b1c67e89a9c130cb
+ sha256sums = 6ded9f6b31b8be5fa782fc18ec0991eb7cb18fbea65b8e7560587c3fbe7f20f7
+ sha256sums = 1134342330c7680e7d9847cc4de2c0f97ecd55ee7db1c6068bc45219b5838e98
+ sha256sums = 4e1159b25d0f6f3dcdf72d1a5f186e5693a5e9d8690ad1600a9e44aa43022e29
pkgname = apache-spark
diff --git a/PKGBUILD b/PKGBUILD
index 54ec365c45bc..73f3c43d8364 100644
--- a/PKGBUILD
+++ b/PKGBUILD
@@ -1,9 +1,10 @@
-# Maintainer: François Garillot ("huitseeker") <francois [at] garillot.net>
+# Maintainer: Lukas Zimmermann ("lukaszimmermann") <luk [dot] zim91 [at] gmail.com>
+# Contributor: François Garillot ("huitseeker") <francois [at] garillot.net>
# Contributor: Christian Krause ("wookietreiber") <kizkizzbangbang@gmail.com>
# Contributor: Emanuel Fontelles ("emanuelfontelles") <emanuelfontelles@hotmail.com>
pkgname=apache-spark
-pkgver=2.4.4
+pkgver=2.4.5
pkgrel=1
pkgdesc="fast and general engine for large-scale data processing"
arch=('any')
@@ -26,52 +27,47 @@ source=("https://archive.apache.org/dist/spark/spark-${pkgver}/spark-${pkgver}-b
'spark-daemon-run.sh'
'run-master.sh'
'run-slave.sh')
-sha1sums=('53f99ba8c5a68c941dd17d45393a6040dd0b46c8'
- 'ac71d12070a9a10323e8ec5aed4346b1dd7f21c6'
- 'a191e4f8f7f8bbc596f4fadfb3c592c3efbc4fc0'
- '3fa39d55075d4728bd447692d648053c9f6b07ec'
- '08557d2d5328d5c99e533e16366fd893fffaad78'
- '323445b8d64aea0534a2213d2600d438f406855b'
- '65b1bc5fce63d1fa7a1b90f2d54a09acf62012a4')
-backup=('etc/apache-spark/spark-env.sh')
-PKGEXT=${PKGEXT:-'.pkg.tar.xz'}
+sha256sums=('020be52524e4df366eb974d41a6e18fcb6efcaba9a51632169e917c74267dd81'
+ 'e4333e4a484543786e82d1f7af144ad99d9cc666d2e2742f30b5120996f32183'
+ 'e656ef5599d095472641e6fd58e9046d7db3f983b334e9a695e278ba799afc5b'
+ '0e9c3721cfac02e3b67248e1dfe2def49a9fc6408092b291b1c67e89a9c130cb'
+ '6ded9f6b31b8be5fa782fc18ec0991eb7cb18fbea65b8e7560587c3fbe7f20f7'
+ '1134342330c7680e7d9847cc4de2c0f97ecd55ee7db1c6068bc45219b5838e98'
+ '4e1159b25d0f6f3dcdf72d1a5f186e5693a5e9d8690ad1600a9e44aa43022e29')
-prepare() {
- cd "$srcdir/spark-${pkgver}-bin-hadoop2.7"
-}
+backup=('etc/apache-spark/spark-env.sh')
package() {
cd "$srcdir/spark-${pkgver}-bin-hadoop2.7"
- install -d "$pkgdir/usr/bin" "$pkgdir/opt" "$pkgdir/var/log/apache-spark" "$pkgdir/var/lib/apache-spark/work"
- chmod 2775 "$pkgdir/var/log/apache-spark" "$pkgdir/var/lib/apache-spark/work"
+ install -d "${pkgdir}/usr/bin" "${pkgdir}/opt" "${pkgdir}/var/log/apache-spark" "${pkgdir}/var/lib/apache-spark/work"
+ chmod 2775 "${pkgdir}/var/log/apache-spark" "${pkgdir}/var/lib/apache-spark/work"
- cp -r "$srcdir/spark-${pkgver}-bin-hadoop2.7" "$pkgdir/opt/apache-spark/"
+ cp -r "${srcdir}/spark-${pkgver}-bin-hadoop2.7" "${pkgdir}/opt/apache-spark/"
- cd "$pkgdir/usr/bin"
+ cd "${pkgdir}/usr/bin"
for binary in beeline pyspark sparkR spark-class spark-shell find-spark-home spark-sql spark-submit load-spark-env.sh; do
- binpath="/opt/apache-spark/bin/$binary"
- ln -s "$binpath" $binary
- sed -i 's|^export SPARK_HOME=.*$|export SPARK_HOME=/opt/apache-spark|' "$pkgdir/$binpath"
- sed -i -Ee 's/\$\(dirname "\$0"\)/$(dirname "$(readlink -f "$0")")/g' "$pkgdir/$binpath"
+ local binpath="/opt/apache-spark/bin/${binary}"
+ ln -s "${binpath}" ${binary}
+ sed -i 's|^export SPARK_HOME=.*$|export SPARK_HOME=/opt/apache-spark|' "${pkgdir}/${binpath}"
+ sed -i -Ee 's/\$\(dirname "\$0"\)/$(dirname "$(readlink -f "$0")")/g' "${pkgdir}/${binpath}"
done
- mkdir -p $pkgdir/etc/profile.d
- echo '#!/bin/sh' > $pkgdir/etc/profile.d/apache-spark.sh
- echo 'SPARK_HOME=/opt/apache-spark' >> $pkgdir/etc/profile.d/apache-spark.sh
- echo 'export SPARK_HOME' >> $pkgdir/etc/profile.d/apache-spark.sh
- chmod 755 $pkgdir/etc/profile.d/apache-spark.sh
+ mkdir -p ${pkgdir}/etc/profile.d
+ echo '#!/bin/sh' > ${pkgdir}/etc/profile.d/apache-spark.sh
+ echo 'export SPARK_HOME=/opt/apache-spark' >> ${pkgdir}/etc/profile.d/apache-spark.sh
+ chmod 755 ${pkgdir}/etc/profile.d/apache-spark.sh
- install -Dm644 "$srcdir/apache-spark-master.service" "$pkgdir/usr/lib/systemd/system/apache-spark-master.service"
- install -Dm644 "$srcdir/apache-spark-slave@.service" "$pkgdir/usr/lib/systemd/system/apache-spark-slave@.service"
- install -Dm644 "$srcdir/spark-env.sh" "$pkgdir/etc/apache-spark/spark-env.sh"
+ install -Dm644 "${srcdir}/apache-spark-master.service" "${pkgdir}/usr/lib/systemd/system/apache-spark-master.service"
+ install -Dm644 "${srcdir}/apache-spark-slave@.service" "${pkgdir}/usr/lib/systemd/system/apache-spark-slave@.service"
+ install -Dm644 "${srcdir}/spark-env.sh" "${pkgdir}/etc/apache-spark/spark-env.sh"
for script in run-master.sh run-slave.sh spark-daemon-run.sh; do
- install -Dm755 "$srcdir/$script" "$pkgdir/opt/apache-spark/sbin/$script"
+ install -Dm755 "${srcdir}/${script}" "${pkgdir}/opt/apache-spark/sbin/${script}"
done
- install -Dm644 "$srcdir/spark-${pkgver}-bin-hadoop2.7/conf"/* "$pkgdir/etc/apache-spark"
+ install -Dm644 "${srcdir}/spark-${pkgver}-bin-hadoop2.7/conf"/* "${pkgdir}/etc/apache-spark"
- cd "$pkgdir/opt/apache-spark"
+ cd "${pkgdir}/opt/apache-spark"
mv conf conf-templates
ln -sf "/etc/apache-spark" conf
ln -sf "/var/lib/apache-spark/work" .
diff --git a/apache-spark-slave@.service b/apache-spark-slave@.service
index 453b3465ce36..6a19518bdc14 100644
--- a/apache-spark-slave@.service
+++ b/apache-spark-slave@.service
@@ -1,5 +1,5 @@
[Unit]
-Description=Apache Spark Standalone Master
+Description=Apache Spark Standalone Slave
After=network.target
[Service]
diff --git a/other-pkgbuild b/other-pkgbuild
deleted file mode 100644
index 2e7d2aac24c1..000000000000
--- a/other-pkgbuild
+++ /dev/null
@@ -1,60 +0,0 @@
-# Maintainer: Franois Garillot ("huitseeker") <francois [at] garillot.net>
-# Contributor: Christian Krause ("wookietreiber") <kizkizzbangbang@gmail.com>
-
-pkgname=apache-spark
-pkgver=1.4.0
-pkgrel=1
-pkgdesc="fast and general engine for large-scale data processing"
-arch=('any')
-url="http://spark.apache.org"
-license=('APACHE')
-depends=('maven' 'java-environment>=6' 'scala' 'python2>=2.7')
-optdepends=('python: PYSPARK_PYTHON=python3 pyspark'
- 'ipython: PYSPARK_DRIVER_PYTHON=ipython pyspark; IPYTHON=1 pyspark')
-install=apache-spark.install
-source=("http://d3kbcqa49mib13.cloudfront.net/spark-$pkgver.tgz"
- 'apache-spark-standalone.service'
- 'spark-env.sh')
-md5sums=('180382ccce97616bcbf5f8278411519f'
- 'bb7d8b85366e6f9cc0b2777eaea161a8'
- '0913001583e607849270090555dbd309')
-backup=('etc/apache-spark/spark-env.sh')
-
-PKGEXT=${PKGEXT:-'.pkg.tar.xz'}
-
-prepare() {
- cd "$srcdir/spark-$pkgver"
-
- sed -i 's|pid=$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid|pid=/var/lib/apache-spark/spark-daemon.pid|' sbin/spark-daemon.sh
-}
-
-build() {
- cd "$srcdir/spark-$pkgver"
-
- export MAVEN_OPTS="-Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m"
-
- dev/change-version-to-2.11.sh
-
- JAVA_HOME=/usr/lib/jvm/default-runtime ./make-distribution.sh -Dscala-2.11 -Dmaven.repo.local=/tmp
-}
-
-package() {
- cd "$srcdir/spark-$pkgver"
-
- install -d "$pkgdir/usr/bin" "$pkgdir/usr/share"
-
- cp -r "$srcdir/spark-$pkgver/dist" "$pkgdir/usr/share/apache-spark/"
-
- cd "$pkgdir/usr/bin"
- for binary in beeline pyspark sparkR spark-class spark-shell spark-sql spark-submit load-spark-env.sh; do
- binpath="/usr/share/apache-spark/bin/$binary"
- ln -s "$binpath" $binary
- sed -i 's|^export SPARK_HOME=.*$|export SPARK_HOME=/usr/share/apache-spark|' "$pkgdir/$binpath"
- done
-
- install -Dm644 "$srcdir/apache-spark-standalone.service" "$pkgdir/usr/lib/systemd/system/apache-spark-standalone.service"
- install -Dm644 "$srcdir/spark-env.sh" "$pkgdir/etc/apache-spark/spark-env.sh"
-
- cd "$pkgdir/usr/share/apache-spark/conf"
- ln -sf "/etc/apache-spark/spark-env.sh" .
-}