summarylogtreecommitdiffstats
diff options
context:
space:
mode:
authorFrançois Garillot2015-10-04 11:38:11 +0200
committerFrançois Garillot2015-10-04 11:38:11 +0200
commitb1747a8242a338b8c16fec71946a872c89a59816 (patch)
tree0df9f4162d0d36fff3193522e3009c649580affb
downloadaur-b1747a8242a338b8c16fec71946a872c89a59816.tar.gz
Initial conversion from old AUR
-rw-r--r--.SRCINFO24
-rw-r--r--PKGBUILD60
-rw-r--r--apache-spark-standalone.service14
-rw-r--r--apache-spark.install14
-rw-r--r--other-pkgbuild60
-rw-r--r--spark-env.sh4
6 files changed, 176 insertions, 0 deletions
diff --git a/.SRCINFO b/.SRCINFO
new file mode 100644
index 000000000000..95aed8b65b98
--- /dev/null
+++ b/.SRCINFO
@@ -0,0 +1,24 @@
+pkgbase = apache-spark
+ pkgdesc = fast and general engine for large-scale data processing
+ pkgver = 1.4.1
+ pkgrel = 2
+ url = http://spark.apache.org
+ install = apache-spark.install
+ arch = any
+ license = APACHE
+ depends = maven
+ depends = java-environment>=6
+ depends = scala
+ depends = python2>=2.7
+ optdepends = python: PYSPARK_PYTHON=python3 pyspark
+ optdepends = ipython: PYSPARK_DRIVER_PYTHON=ipython pyspark; IPYTHON=1 pyspark
+ backup = etc/apache-spark/spark-env.sh
+ source = http://d3kbcqa49mib13.cloudfront.net/spark-1.4.1.tgz
+ source = apache-spark-standalone.service
+ source = spark-env.sh
+ md5sums = 10ab725b33bb8c6ecc81c75e5018cefb
+ md5sums = bb7d8b85366e6f9cc0b2777eaea161a8
+ md5sums = 0913001583e607849270090555dbd309
+
+pkgname = apache-spark
+
diff --git a/PKGBUILD b/PKGBUILD
new file mode 100644
index 000000000000..314beee6c5b6
--- /dev/null
+++ b/PKGBUILD
@@ -0,0 +1,60 @@
+# Maintainer: François Garillot ("huitseeker") <francois [at] garillot.net>
+# Contributor: Christian Krause ("wookietreiber") <kizkizzbangbang@gmail.com>
+
+pkgname=apache-spark
+pkgver=1.4.1
+pkgrel=2
+pkgdesc="fast and general engine for large-scale data processing"
+arch=('any')
+url="http://spark.apache.org"
+license=('APACHE')
+depends=('maven' 'java-environment>=6' 'scala' 'python2>=2.7')
+optdepends=('python: PYSPARK_PYTHON=python3 pyspark'
+ 'ipython: PYSPARK_DRIVER_PYTHON=ipython pyspark; IPYTHON=1 pyspark')
+install=apache-spark.install
+source=("http://d3kbcqa49mib13.cloudfront.net/spark-$pkgver.tgz"
+ 'apache-spark-standalone.service'
+ 'spark-env.sh')
+md5sums=('10ab725b33bb8c6ecc81c75e5018cefb'
+ 'bb7d8b85366e6f9cc0b2777eaea161a8'
+ '0913001583e607849270090555dbd309')
+backup=('etc/apache-spark/spark-env.sh')
+
+PKGEXT=${PKGEXT:-'.pkg.tar.xz'}
+
+prepare() {
+ cd "$srcdir/spark-$pkgver"
+
+ sed -i 's|pid=$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid|pid=/var/lib/apache-spark/spark-daemon.pid|' sbin/spark-daemon.sh
+}
+
+build() {
+ cd "$srcdir/spark-$pkgver"
+
+ export MAVEN_OPTS="-Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m"
+
+ dev/change-version-to-2.11.sh
+
+ JAVA_HOME=/usr/lib/jvm/default-runtime ./make-distribution.sh -Dscala-2.11 -DskipTests -Dmaven.repo.local=/tmp
+}
+
+package() {
+ cd "$srcdir/spark-$pkgver"
+
+ install -d "$pkgdir/usr/bin" "$pkgdir/usr/share"
+
+ cp -r "$srcdir/spark-$pkgver/dist" "$pkgdir/usr/share/apache-spark/"
+
+ cd "$pkgdir/usr/bin"
+ for binary in beeline pyspark sparkR spark-class spark-shell spark-sql spark-submit load-spark-env.sh; do
+ binpath="/usr/share/apache-spark/bin/$binary"
+ ln -s "$binpath" $binary
+ sed -i 's|^export SPARK_HOME=.*$|export SPARK_HOME=/usr/share/apache-spark|' "$pkgdir/$binpath"
+ done
+
+ install -Dm644 "$srcdir/apache-spark-standalone.service" "$pkgdir/usr/lib/systemd/system/apache-spark-standalone.service"
+ install -Dm644 "$srcdir/spark-env.sh" "$pkgdir/etc/apache-spark/spark-env.sh"
+
+ cd "$pkgdir/usr/share/apache-spark/conf"
+ ln -sf "/etc/apache-spark/spark-env.sh" .
+}
diff --git a/apache-spark-standalone.service b/apache-spark-standalone.service
new file mode 100644
index 000000000000..3f81c9691450
--- /dev/null
+++ b/apache-spark-standalone.service
@@ -0,0 +1,14 @@
+[Unit]
+Description=Apache Spark Standalone Local Cluster
+After=network.target
+
+[Service]
+User=apache-spark
+Group=apache-spark
+Environment=SPARK_LOG_DIR=/var/lib/apache-spark/logs
+PIDFile=/var/lib/apache-spark/spark-daemon.pid
+ExecStart=/usr/share/apache-spark/sbin/start-all.sh
+ExecStop=/usr/share/apache-spark/sbin/stop-all.sh
+
+[Install]
+WantedBy=multi-user.target
diff --git a/apache-spark.install b/apache-spark.install
new file mode 100644
index 000000000000..9ffbbf29abe8
--- /dev/null
+++ b/apache-spark.install
@@ -0,0 +1,14 @@
+post_install() {
+ groupadd -r -f apache-spark
+ useradd -r -g apache-spark -s /usr/bin/nologin -d /var/lib/apache-spark apache-spark || true
+
+ [[ ! -d /var/lib/apache-spark ]] &&
+ install -d /var/lib/apache-spark
+
+ chown -R apache-spark:apache-spark /var/lib/apache-spark
+}
+
+post_remove() {
+ /usr/sbin/userdel apache-spark
+ /usr/sbin/groupdel apache-spark
+}
diff --git a/other-pkgbuild b/other-pkgbuild
new file mode 100644
index 000000000000..2e7d2aac24c1
--- /dev/null
+++ b/other-pkgbuild
@@ -0,0 +1,60 @@
+# Maintainer: Franois Garillot ("huitseeker") <francois [at] garillot.net>
+# Contributor: Christian Krause ("wookietreiber") <kizkizzbangbang@gmail.com>
+
+pkgname=apache-spark
+pkgver=1.4.0
+pkgrel=1
+pkgdesc="fast and general engine for large-scale data processing"
+arch=('any')
+url="http://spark.apache.org"
+license=('APACHE')
+depends=('maven' 'java-environment>=6' 'scala' 'python2>=2.7')
+optdepends=('python: PYSPARK_PYTHON=python3 pyspark'
+ 'ipython: PYSPARK_DRIVER_PYTHON=ipython pyspark; IPYTHON=1 pyspark')
+install=apache-spark.install
+source=("http://d3kbcqa49mib13.cloudfront.net/spark-$pkgver.tgz"
+ 'apache-spark-standalone.service'
+ 'spark-env.sh')
+md5sums=('180382ccce97616bcbf5f8278411519f'
+ 'bb7d8b85366e6f9cc0b2777eaea161a8'
+ '0913001583e607849270090555dbd309')
+backup=('etc/apache-spark/spark-env.sh')
+
+PKGEXT=${PKGEXT:-'.pkg.tar.xz'}
+
+prepare() {
+ cd "$srcdir/spark-$pkgver"
+
+ sed -i 's|pid=$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid|pid=/var/lib/apache-spark/spark-daemon.pid|' sbin/spark-daemon.sh
+}
+
+build() {
+ cd "$srcdir/spark-$pkgver"
+
+ export MAVEN_OPTS="-Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m"
+
+ dev/change-version-to-2.11.sh
+
+ JAVA_HOME=/usr/lib/jvm/default-runtime ./make-distribution.sh -Dscala-2.11 -Dmaven.repo.local=/tmp
+}
+
+package() {
+ cd "$srcdir/spark-$pkgver"
+
+ install -d "$pkgdir/usr/bin" "$pkgdir/usr/share"
+
+ cp -r "$srcdir/spark-$pkgver/dist" "$pkgdir/usr/share/apache-spark/"
+
+ cd "$pkgdir/usr/bin"
+ for binary in beeline pyspark sparkR spark-class spark-shell spark-sql spark-submit load-spark-env.sh; do
+ binpath="/usr/share/apache-spark/bin/$binary"
+ ln -s "$binpath" $binary
+ sed -i 's|^export SPARK_HOME=.*$|export SPARK_HOME=/usr/share/apache-spark|' "$pkgdir/$binpath"
+ done
+
+ install -Dm644 "$srcdir/apache-spark-standalone.service" "$pkgdir/usr/lib/systemd/system/apache-spark-standalone.service"
+ install -Dm644 "$srcdir/spark-env.sh" "$pkgdir/etc/apache-spark/spark-env.sh"
+
+ cd "$pkgdir/usr/share/apache-spark/conf"
+ ln -sf "/etc/apache-spark/spark-env.sh" .
+}
diff --git a/spark-env.sh b/spark-env.sh
new file mode 100644
index 000000000000..fe83e9da2e9f
--- /dev/null
+++ b/spark-env.sh
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+SPARK_MASTER_IP=127.0.0.1
+SPARK_LOCAL_IP=127.0.0.1