summarylogtreecommitdiffstats
diff options
context:
space:
mode:
authorXiang Gao2016-07-16 09:43:17 +0800
committerXiang Gao2016-07-16 09:43:17 +0800
commit9614fbb2f6f61c9d4d619bda98411d610c5bdd08 (patch)
tree08f9c1cb63122c5bd88079b427fefa6435d1065c
parente08f2efaa703478eef3c450f1e53820da46dd5aa (diff)
downloadaur-9614fbb2f6f61c9d4d619bda98411d610c5bdd08.tar.gz
new version
-rw-r--r--.SRCINFO2
-rw-r--r--PKGBUILD6
2 files changed, 4 insertions, 4 deletions
diff --git a/.SRCINFO b/.SRCINFO
index 0733dcb190db..83fe48f5c81d 100644
--- a/.SRCINFO
+++ b/.SRCINFO
@@ -1,6 +1,6 @@
pkgbase = apache-spark-git
pkgdesc = fast and general engine for large-scale data processing
- pkgver = 2.1.0.SNAPSHOT.20160714.17009
+ pkgver = 2.1.0.SNAPSHOT.20160715.17026
pkgrel = 1
url = http://spark.apache.org
install = apache-spark.install
diff --git a/PKGBUILD b/PKGBUILD
index 736004ac550f..3178f65e9530 100644
--- a/PKGBUILD
+++ b/PKGBUILD
@@ -3,7 +3,7 @@
# Contributor: Christian Krause ("wookietreiber") <kizkizzbangbang@gmail.com>
pkgname=apache-spark-git
-pkgver=2.1.0.SNAPSHOT.20160714.17009
+pkgver=2.1.0.SNAPSHOT.20160715.17026
pkgrel=1
pkgdesc="fast and general engine for large-scale data processing"
arch=('any')
@@ -57,7 +57,7 @@ package() {
sparkhome="$pkgdir/opt/apache-spark"
jarpath="assembly/target/scala-2.11/jars"
install -d "$sparkhome"
- cp -r "$srcdir/spark"/{bin,conf,data,docs,examples,licenses,python,R,sbin,CONTRIBUTING.md,LICENSE,NOTICE,README.md} "$pkgdir/opt/apache-spark/"
+ cp -r "$srcdir/spark"/{bin,conf,data,docs,examples,licenses,python,R,sbin,CONTRIBUTING.md,LICENSE,NOTICE,README.md} "$sparkhome"
install -D "$srcdir/spark/$jarpath"/* -t "$sparkhome/$jarpath"
rm -rf "$sparkhome/bin"/*.cmd
@@ -65,7 +65,7 @@ package() {
install -d "$pkgdir/usr/bin" "$pkgdir/var/log/apache-spark"
for i in $(ls "$sparkhome/bin");do ln -sf /opt/apache-spark/bin/$i "$pkgdir/usr/bin"; done
install -D "$srcdir"/*.service -t "$pkgdir/usr/lib/systemd/system/"
- install -D "$srcdir"/{run-master.sh,run-slave.sh,spark-daemon-run.sh} "$pkgdir/opt/apache-spark/sbin/"
+ install -D "$srcdir"/{run-master.sh,run-slave.sh,spark-daemon-run.sh} "$sparkhome/sbin/"
install -D "$srcdir/spark/conf"/* "$srcdir/spark-env.sh" -t "$pkgdir/etc/apache-spark"
install -D "$srcdir/apache-spark.sh" "$pkgdir/etc/profile.d/apache-spark.sh"