diff options
author | François Garillot | 2017-10-10 12:58:40 -0700 |
---|---|---|
committer | François Garillot | 2017-10-10 12:58:40 -0700 |
commit | 30b4326be1370fcaacc6424a292347f3ee5b66b5 (patch) | |
tree | fb0e1f7af75f60c80ae12adf660b8d280de922a8 | |
parent | 37411ad734785673fda6450262fea27963cf299b (diff) | |
download | aur-30b4326be1370fcaacc6424a292347f3ee5b66b5.tar.gz |
Fix comments by @pdxleif
- symlink management improved
- hadoop not needed
- java 9 explicitly incompatible
-rw-r--r-- | .SRCINFO | 7 | ||||
-rw-r--r-- | PKGBUILD | 9 | ||||
-rw-r--r-- | spark-env.sh | 7 |
3 files changed, 14 insertions, 9 deletions
@@ -1,14 +1,15 @@ # Generated by mksrcinfo v8 -# Tue Aug 8 15:53:19 UTC 2017 +# Tue Oct 10 19:58:35 UTC 2017 pkgbase = apache-spark pkgdesc = fast and general engine for large-scale data processing pkgver = 2.2.0 - pkgrel = 3 + pkgrel = 4 url = http://spark.apache.org install = apache-spark.install arch = any license = APACHE depends = java-environment>=6 + depends = java-environment<9 optdepends = python2: python2 support for pyspark optdepends = ipython2: ipython2 support for pyspark optdepends = python: python3 support for pyspark @@ -27,7 +28,7 @@ pkgbase = apache-spark sha1sums = 15b9577049638fc1afe8d2843ac1ae9dec470962 sha1sums = ac71d12070a9a10323e8ec5aed4346b1dd7f21c6 sha1sums = a191e4f8f7f8bbc596f4fadfb3c592c3efbc4fc0 - sha1sums = e52d327571e84b9b350bc594131fcaf50a3dd0f4 + sha1sums = 3fa39d55075d4728bd447692d648053c9f6b07ec sha1sums = 08557d2d5328d5c99e533e16366fd893fffaad78 sha1sums = 323445b8d64aea0534a2213d2600d438f406855b sha1sums = 65b1bc5fce63d1fa7a1b90f2d54a09acf62012a4 @@ -3,12 +3,12 @@ pkgname=apache-spark pkgver=2.2.0 -pkgrel=3 +pkgrel=4 pkgdesc="fast and general engine for large-scale data processing" arch=('any') url="http://spark.apache.org" license=('APACHE') -depends=('java-environment>=6') +depends=('java-environment>=6' 'java-environment<9') optdepends=('python2: python2 support for pyspark' 'ipython2: ipython2 support for pyspark' 'python: python3 support for pyspark' @@ -27,7 +27,7 @@ source=("http://d3kbcqa49mib13.cloudfront.net/spark-${pkgver}-bin-without-hadoop sha1sums=('15b9577049638fc1afe8d2843ac1ae9dec470962' 'ac71d12070a9a10323e8ec5aed4346b1dd7f21c6' 'a191e4f8f7f8bbc596f4fadfb3c592c3efbc4fc0' - 'e52d327571e84b9b350bc594131fcaf50a3dd0f4' + '3fa39d55075d4728bd447692d648053c9f6b07ec' '08557d2d5328d5c99e533e16366fd893fffaad78' '323445b8d64aea0534a2213d2600d438f406855b' '65b1bc5fce63d1fa7a1b90f2d54a09acf62012a4') @@ -48,10 +48,11 @@ package() { cp -r "$srcdir/spark-${pkgver}-bin-without-hadoop" "$pkgdir/opt/apache-spark/" cd "$pkgdir/usr/bin" - for binary in beeline pyspark sparkR spark-class spark-shell spark-sql spark-submit load-spark-env.sh; do + for binary in beeline pyspark sparkR spark-class spark-shell find-spark-home spark-sql spark-submit load-spark-env.sh; do binpath="/opt/apache-spark/bin/$binary" ln -s "$binpath" $binary sed -i 's|^export SPARK_HOME=.*$|export SPARK_HOME=/opt/apache-spark|' "$pkgdir/$binpath" + sed -i -Ee 's/\$\(dirname "\$0"\)/$(dirname "$(readlink -f "$0")")/g' "$pkgdir/$binpath" done mkdir -p $pkgdir/etc/profile.d diff --git a/spark-env.sh b/spark-env.sh index 14625399700e..c6ed3dbc450e 100644 --- a/spark-env.sh +++ b/spark-env.sh @@ -1,6 +1,9 @@ #!/usr/bin/env bash - export JAVA_HOME=/usr/lib/jvm/default-runtime -export SPARK_DIST_CLASSPATH=$(hadoop classpath) +if (command -v hadoop 2> /dev/null); then + export SPARK_DIST_CLASSPATH=$(hadoop classpath) +else + export SPARK_DIST_CLASSPATH="" +fi SPARK_MASTER_IP=localhost SPARK_LOCAL_IP=localhost |