summarylogtreecommitdiffstats
path: root/PKGBUILD
diff options
context:
space:
mode:
Diffstat (limited to 'PKGBUILD')
-rw-r--r--PKGBUILD201
1 files changed, 62 insertions, 139 deletions
diff --git a/PKGBUILD b/PKGBUILD
index fbcbf7ac60c8..7db4d2acd797 100644
--- a/PKGBUILD
+++ b/PKGBUILD
@@ -1,153 +1,76 @@
-# Maintainer: Chris Severance aur.severach AatT spamgourmet.com
-# Contributor: David Roheim <david dot roheim at gmail dot com>
-# Contributor: Manuel Hoffmann <manuel@manuel-hoffmann.info>
-# Contributor: Markus Holtermann <aur@markusholtermann.eu>
-# Contributor: Mantas Vidutis <mantas.a.vidutis-at-gmail.com>
-# Contributor: Tianjiao Yin <ytj000@gmail.com>
-
-set -u
-pkgname='hadoop'
-pkgver='3.3.2'
-pkgrel='1'
-pkgdesc='Hadoop - MapReduce implementation and distributed filesystem'
-arch=('i686' 'x86_64')
-url='http://hadoop.apache.org'
+# Maintainer: William Tang <galaxyking0419@gmail.com>
+# Contributor: Chris Severance <aur.severach@spamgourmet.com>
+# Contributor: David Roheim <david.roheim@gmail.com>
+
+pkgname=hadoop
+pkgver=3.3.4
+pkgrel=1
+pkgdesc='An open-source software for reliable, scalable, distributed computing'
+arch=('x86_64')
+url='https://hadoop.apache.org/'
license=('apache')
-depends=('java-environment>=7' 'openssh' 'apache-ant' 'polkit')
-backup=(
- "etc/conf.d/${pkgname}"
- "etc/profile.d/${pkgname}.sh"
- "etc/${pkgname}/capacity-scheduler.xml"
- "etc/${pkgname}/configuration.xsl"
- "etc/${pkgname}/core-site.xml"
- "etc/${pkgname}/fair-scheduler.xml"
- "etc/${pkgname}/hadoop-env.sh"
- "etc/${pkgname}/hadoop-metrics2.properties"
- "etc/${pkgname}/hadoop-policy.xml"
- "etc/${pkgname}/hdfs-site.xml"
- "etc/${pkgname}/log4j.properties"
- "etc/${pkgname}/mapred-queue-acls.xml"
- "etc/${pkgname}/mapred-site.xml"
- "etc/${pkgname}/masters"
- "etc/${pkgname}/slaves"
- "etc/${pkgname}/ssl-client.xml.example"
- "etc/${pkgname}/ssl-server.xml.example"
- "etc/${pkgname}/taskcontroller.cfg"
- "etc/${pkgname}/task-log4j.properties"
-)
-options=('!strip')
-install="${pkgname}.install"
-source=(
- "http://mirror.reverse.net/pub/apache/hadoop/common/hadoop-${pkgver}/hadoop-${pkgver}.tar.gz"
- 'hadoop.profile'
- 'hadoop-conf'
- 'hadoop.sh'
- 'hadoop-namenode.service'
- 'hadoop-datanode.service'
- 'hadoop-secondarynamenode.service'
- 'hadoop-jobtracker.service'
- 'hadoop-tasktracker.service'
-)
-_verwatch=("${source[0]%|*}hadoop/common/" '.*href="hadoop-\([0-9\.]\+\)/.*' 'f')
-md5sums=('887bbfa2bc4bc2fdc8d0056dd7230469'
- '77fad322bff1877b0c5b4e6d693c979a'
- '2b662c5d0548cae29538060bbea9e96b'
- '56a70b8c94de7c1fb236ec24d7c11e05'
- '4e96bfa974fb7701b5636379c02f8470'
- '287feaae2d479042d7210ea5ef079a5e'
- '4dc609ae8d536dbb278a7e89c523384f'
- 'dba52a72c925365bc50a2e443a38f7f4'
- '8da68ae4b6f20a969df19945d359fc32')
-sha256sums=('b341587495b12eec0b244b517f21df88eb46ef634dc7dc3e5969455b80ce2ce5'
- 'b6607cb8531244d9be9241d8d4980d5695869f89fa598c8d24b35ec503df413b'
- 'e584c32246fd23fe5f35b13399372419584c27a234364b12d1479f3c87e70748'
- '93cb40f76f6bb0c1924b7ef083b82d39bf32190f86c28fc6304839703cdda7b1'
- '3fd40045f7657881cde0abee4ac1735232ba3d79121d724f74707252e19088b3'
- '230a58ab4e3462eb63662aee057965c5130247f7d9c98df83495c8da2c409fe5'
- '047d3d6aea9eada82780eaa93a55c6259fb1b63c68bc50cc26323e066c1b7f75'
- '5e9bc41b0086dfa7b237d1a7248a7f113299687f79ba0c58ba01eaeea0e35e79'
- '37d7a252292b365782d9e7a64d6019a78d9c561acf9b5af3c246b602d3e0a8ec')
-PKGEXT='.pkg.tar.gz' # Not worth the extra time to save 10%, not compatible with pacaur
-
-compile() {
- set -u
- cd "${pkgname}-${pkgver}"
- msg 'Cleaning...'
- ant clean
-
- msg 'Patching...'
- sed -i -e "s/${_devver}/${pkgver}/" 'build.xml'
- sed -i -e "s|<ivysettings>|<ivysettings>\n<caches defaultCacheDir=\"${srcdir}/ivy_cache\"/>|" 'ivy/ivysettings.xml'
-
- msg "Building..."
- ant -D'compile.native=true' bin-package
- set +u
+makedepends=('cmake' 'gcc' 'java-environment<=11' 'make' 'maven')
+depends=('inetutils' 'java-runtime-headless-openjdk=11' 'openssh' 'protobuf')
+
+source=("https://github.com/apache/hadoop/archive/refs/tags/rel/release-$pkgver.tar.gz"
+ "${pkgname}" "${pkgname}.sh"
+ hadoop-{datanode,historyserver,namenode,resourcemanager,secondarynamenode}.service)
+sha256sums=('a10fb474084c5d38b93ae901b453794d668ca98aa419a0546807b10cdae17827'
+ '1ec173297234b0d587255c1fac978b3929e967146ac542e2e1b44323f80e0bc5'
+ '3d20dd2ad1b773e7d4cb855c7556613e36ff56081749fe7b01c6e4fd0c743cc5'
+ '876d40b0a2ec9b9cec9b667d7909591ee0ef1acbd5417a0357c33539d8a54e1a'
+ 'f8f3b3a93a9e455da198ee93a873012399878459e78a3a7de0e396d69e81b61f'
+ '3d4aa2a660bd509e658c8109d9e91c00b0f0eee3a2ecab71a4785a76529ea242'
+ '2f6b8893a4b4e2ef120193ecfc2d929a8558d2a1c5b0af12e9224342ca90a158'
+ 'ed1e7f13d2023d49a51dc04c4459d12a53bff258a05b852a3e10a9fd2d18bbb8')
+
+install=$pkgname.install
+
+build() {
+ cd hadoop-rel-release-${pkgver}
+ mvn package -Pdist,native \
+ -Drequire.openssl -Drequire.zstd \
+ -Dmaven.javadoc.skip=true -DskipTests
}
package() {
- set -u
- local _usr_lib="${pkgdir}/usr/lib"
- local _hadoop_real_home="${_usr_lib}/${pkgname}-${pkgver}"
- local _hadoop_link_home="${_usr_lib}/${pkgname}"
-
- install -d "${_usr_lib}" "${pkgdir}/usr/lib/systemd/system"
- cp -pr "${srcdir}/${pkgname}-${pkgver}" "${_usr_lib}"
-
- #
- install -Dpm755 "${srcdir}/hadoop-conf" "${pkgdir}/etc/conf.d/hadoop"
- install -Dpm755 "${srcdir}/hadoop.profile" "${pkgdir}/etc/profile.d/hadoop.sh"
- install -Dpm644 "${srcdir}/"hadoop-*.service -t "${pkgdir}/usr/lib/systemd/system/"
-
- # we do not use soft link because we need put configures in backup array,
- # in order to preserve the conf when upgrade package.
- cp -pr "${_hadoop_real_home}/etc/hadoop" "${pkgdir}/etc"
- mv "${_hadoop_real_home}/etc" "${_hadoop_real_home}/orig_etc"
+ # Copy files to /usr
+ mkdir "$pkgdir"/usr
+ mv hadoop-rel-release-$pkgver/hadoop-dist/target/hadoop-$pkgver/* "$pkgdir"/usr/
- # todo: i need an own file :)
- install -Dm755 <(cat << EOF
-#!/bin/sh
-# Automatically generated by ${pkgname}-${pkgver} PKGBUILD from Arch Linux AUR
-# https://aur.archlinux.org/
-for f in /etc/profile.d/*.sh; do
- . "\${f}"
-done
-/usr/lib/hadoop/bin/hadoop "\$@"
-EOF
- ) "${pkgdir}/usr/bin/hadoop"
+ # Move sbin files to bin
+ cd "$pkgdir"/usr
+ mv sbin/* bin/
+ rmdir sbin
- cd "${_usr_lib}"
- ln -s "${pkgname}-${pkgver}" "${pkgname}"
+ # Move native libraries to /usr/lib
+ mv lib/native/* lib/
+ rmdir lib/native
- ## Disable IPv6 (comment out to disable IPv6 support):
- # sed -i 's|_OPTS="-D|_OPTS="-Djava.net.preferIPv4Stack=true -D|' hadoop-env.sh
+ # Move license and notice files
+ mkdir -p "$pkgdir"/usr/share/licenses/$pkgname/
+ mv licenses-binary/* LICENSE* NOTICE* README.txt "$pkgdir"/usr/share/licenses/$pkgname/
+ rmdir licenses-binary
-if ! :; then
- ## fix native
- if [ "${CARCH}" = 'i686' ]; then
- rm -rf 'lib/native/Linux-amd64-64'
- cd 'lib/native/Linux-i386-32'
- sed -i -e "s:dependency_libs=':dependency_libs='-L/opt/java/jre/lib/i386/server :" 'libhadoop.la'
- fi
+ # Remove windows batch files
+ rm {etc/hadoop,bin,libexec,share/hadoop/tools/resourceestimator/bin}/*.cmd
- if [ "${CARCH}" = 'x86_64' ]; then
- rm -rf 'lib/native/Linux-i386-32'
- cd 'lib/native/Linux-amd64-64'
- sed -i "s:dependency_libs=':dependency_libs='-L/opt/java/jre/lib/amd64/server :" 'libhadoop.la'
- fi
+ # Move etc directory
+ mv "$pkgdir"/usr/etc "$pkgdir"/
- ## Create some links, so Hadoop's KFS jar could access KFS libraries properly
- ## (it is still fine if KFS is not installed)
+ # Install profile script
+ cd "$srcdir"
+ mkdir "$pkgdir"/etc/profile.d
+ cp $pkgname.sh "$pkgdir"/etc/profile.d/
- msg 'Creating KFS links...'
+ # Install eviroment file
+ mkdir "$pkgdir"/etc/conf.d
+ cp $pkgname "$pkgdir"/etc/conf.d/
- for _lib in 'libkfsClient' 'libkfsCommon' 'libkfsEmulator' 'libkfsIO' 'libkfsMeta'; do
- for _ext in 'a' 'so'; do
- ln -sf "/usr/lib/${_lib}.${_ext}"
- done
- done
- ln -sf '/usr/lib/libkfs_access.so'
-fi
- set +u
+ # Install systemd service files
+ mkdir -p "$pkgdir"/usr/lib/systemd/system
+ cp $pkgname-{datanode,historyserver,namenode,resourcemanager,secondarynamenode}.service "$pkgdir"/usr/lib/systemd/system/
+ # Create required directories
+ mkdir -p "$pkgdir"/var/{lib,log}/hadoop
}
-set +u