summarylogtreecommitdiffstats
diff options
context:
space:
mode:
authorWilliam Tang2022-08-19 00:01:42 +0800
committerWilliam Tang2022-08-19 00:07:30 +0800
commite227fa6c494449dcd96599020ae359c2e2ca76ab (patch)
treed974af9fa6e4aa87978cef42e2e20c8935ccd1f8
parentdb5fe900379b5d28168d6ec5eabd0011c56c66d0 (diff)
downloadaur-e227fa6c494449dcd96599020ae359c2e2ca76ab.tar.gz
hadoop 3.3.4
-rw-r--r--.SRCINFO75
-rw-r--r--PKGBUILD201
-rw-r--r--hadoop6
-rw-r--r--hadoop-conf5
-rw-r--r--hadoop-datanode.service14
-rw-r--r--hadoop-historyserver.service13
-rw-r--r--hadoop-jobtracker.service13
-rw-r--r--hadoop-namenode.service11
-rw-r--r--hadoop-resourcemanager.service15
-rw-r--r--hadoop-secondarynamenode.service12
-rw-r--r--hadoop-tasktracker.service13
-rw-r--r--hadoop.install35
-rw-r--r--hadoop.profile4
-rw-r--r--hadoop.sh6
14 files changed, 157 insertions, 266 deletions
diff --git a/.SRCINFO b/.SRCINFO
index 6eca769f2383..9e0e64805bfc 100644
--- a/.SRCINFO
+++ b/.SRCINFO
@@ -1,62 +1,35 @@
pkgbase = hadoop
- pkgdesc = Hadoop - MapReduce implementation and distributed filesystem
- pkgver = 3.3.2
+ pkgdesc = An open-source software for reliable, scalable, distributed computing
+ pkgver = 3.3.4
pkgrel = 1
- url = http://hadoop.apache.org
+ url = https://hadoop.apache.org/
install = hadoop.install
- arch = i686
arch = x86_64
license = apache
- depends = java-environment>=7
+ makedepends = cmake
+ makedepends = gcc
+ makedepends = java-environment<=11
+ makedepends = make
+ makedepends = maven
+ depends = inetutils
+ depends = java-runtime-headless-openjdk=11
depends = openssh
- depends = apache-ant
- depends = polkit
- options = !strip
- backup = etc/conf.d/hadoop
- backup = etc/profile.d/hadoop.sh
- backup = etc/hadoop/capacity-scheduler.xml
- backup = etc/hadoop/configuration.xsl
- backup = etc/hadoop/core-site.xml
- backup = etc/hadoop/fair-scheduler.xml
- backup = etc/hadoop/hadoop-env.sh
- backup = etc/hadoop/hadoop-metrics2.properties
- backup = etc/hadoop/hadoop-policy.xml
- backup = etc/hadoop/hdfs-site.xml
- backup = etc/hadoop/log4j.properties
- backup = etc/hadoop/mapred-queue-acls.xml
- backup = etc/hadoop/mapred-site.xml
- backup = etc/hadoop/masters
- backup = etc/hadoop/slaves
- backup = etc/hadoop/ssl-client.xml.example
- backup = etc/hadoop/ssl-server.xml.example
- backup = etc/hadoop/taskcontroller.cfg
- backup = etc/hadoop/task-log4j.properties
- source = http://mirror.reverse.net/pub/apache/hadoop/common/hadoop-3.3.2/hadoop-3.3.2.tar.gz
- source = hadoop.profile
- source = hadoop-conf
+ depends = protobuf
+ source = https://github.com/apache/hadoop/archive/refs/tags/rel/release-3.3.4.tar.gz
+ source = hadoop
source = hadoop.sh
- source = hadoop-namenode.service
source = hadoop-datanode.service
+ source = hadoop-historyserver.service
+ source = hadoop-namenode.service
+ source = hadoop-resourcemanager.service
source = hadoop-secondarynamenode.service
- source = hadoop-jobtracker.service
- source = hadoop-tasktracker.service
- md5sums = 887bbfa2bc4bc2fdc8d0056dd7230469
- md5sums = 77fad322bff1877b0c5b4e6d693c979a
- md5sums = 2b662c5d0548cae29538060bbea9e96b
- md5sums = 56a70b8c94de7c1fb236ec24d7c11e05
- md5sums = 4e96bfa974fb7701b5636379c02f8470
- md5sums = 287feaae2d479042d7210ea5ef079a5e
- md5sums = 4dc609ae8d536dbb278a7e89c523384f
- md5sums = dba52a72c925365bc50a2e443a38f7f4
- md5sums = 8da68ae4b6f20a969df19945d359fc32
- sha256sums = b341587495b12eec0b244b517f21df88eb46ef634dc7dc3e5969455b80ce2ce5
- sha256sums = b6607cb8531244d9be9241d8d4980d5695869f89fa598c8d24b35ec503df413b
- sha256sums = e584c32246fd23fe5f35b13399372419584c27a234364b12d1479f3c87e70748
- sha256sums = 93cb40f76f6bb0c1924b7ef083b82d39bf32190f86c28fc6304839703cdda7b1
- sha256sums = 3fd40045f7657881cde0abee4ac1735232ba3d79121d724f74707252e19088b3
- sha256sums = 230a58ab4e3462eb63662aee057965c5130247f7d9c98df83495c8da2c409fe5
- sha256sums = 047d3d6aea9eada82780eaa93a55c6259fb1b63c68bc50cc26323e066c1b7f75
- sha256sums = 5e9bc41b0086dfa7b237d1a7248a7f113299687f79ba0c58ba01eaeea0e35e79
- sha256sums = 37d7a252292b365782d9e7a64d6019a78d9c561acf9b5af3c246b602d3e0a8ec
+ sha256sums = a10fb474084c5d38b93ae901b453794d668ca98aa419a0546807b10cdae17827
+ sha256sums = 1ec173297234b0d587255c1fac978b3929e967146ac542e2e1b44323f80e0bc5
+ sha256sums = 3d20dd2ad1b773e7d4cb855c7556613e36ff56081749fe7b01c6e4fd0c743cc5
+ sha256sums = 876d40b0a2ec9b9cec9b667d7909591ee0ef1acbd5417a0357c33539d8a54e1a
+ sha256sums = f8f3b3a93a9e455da198ee93a873012399878459e78a3a7de0e396d69e81b61f
+ sha256sums = 3d4aa2a660bd509e658c8109d9e91c00b0f0eee3a2ecab71a4785a76529ea242
+ sha256sums = 2f6b8893a4b4e2ef120193ecfc2d929a8558d2a1c5b0af12e9224342ca90a158
+ sha256sums = ed1e7f13d2023d49a51dc04c4459d12a53bff258a05b852a3e10a9fd2d18bbb8
pkgname = hadoop
diff --git a/PKGBUILD b/PKGBUILD
index fbcbf7ac60c8..7db4d2acd797 100644
--- a/PKGBUILD
+++ b/PKGBUILD
@@ -1,153 +1,76 @@
-# Maintainer: Chris Severance aur.severach AatT spamgourmet.com
-# Contributor: David Roheim <david dot roheim at gmail dot com>
-# Contributor: Manuel Hoffmann <manuel@manuel-hoffmann.info>
-# Contributor: Markus Holtermann <aur@markusholtermann.eu>
-# Contributor: Mantas Vidutis <mantas.a.vidutis-at-gmail.com>
-# Contributor: Tianjiao Yin <ytj000@gmail.com>
-
-set -u
-pkgname='hadoop'
-pkgver='3.3.2'
-pkgrel='1'
-pkgdesc='Hadoop - MapReduce implementation and distributed filesystem'
-arch=('i686' 'x86_64')
-url='http://hadoop.apache.org'
+# Maintainer: William Tang <galaxyking0419@gmail.com>
+# Contributor: Chris Severance <aur.severach@spamgourmet.com>
+# Contributor: David Roheim <david.roheim@gmail.com>
+
+pkgname=hadoop
+pkgver=3.3.4
+pkgrel=1
+pkgdesc='An open-source software for reliable, scalable, distributed computing'
+arch=('x86_64')
+url='https://hadoop.apache.org/'
license=('apache')
-depends=('java-environment>=7' 'openssh' 'apache-ant' 'polkit')
-backup=(
- "etc/conf.d/${pkgname}"
- "etc/profile.d/${pkgname}.sh"
- "etc/${pkgname}/capacity-scheduler.xml"
- "etc/${pkgname}/configuration.xsl"
- "etc/${pkgname}/core-site.xml"
- "etc/${pkgname}/fair-scheduler.xml"
- "etc/${pkgname}/hadoop-env.sh"
- "etc/${pkgname}/hadoop-metrics2.properties"
- "etc/${pkgname}/hadoop-policy.xml"
- "etc/${pkgname}/hdfs-site.xml"
- "etc/${pkgname}/log4j.properties"
- "etc/${pkgname}/mapred-queue-acls.xml"
- "etc/${pkgname}/mapred-site.xml"
- "etc/${pkgname}/masters"
- "etc/${pkgname}/slaves"
- "etc/${pkgname}/ssl-client.xml.example"
- "etc/${pkgname}/ssl-server.xml.example"
- "etc/${pkgname}/taskcontroller.cfg"
- "etc/${pkgname}/task-log4j.properties"
-)
-options=('!strip')
-install="${pkgname}.install"
-source=(
- "http://mirror.reverse.net/pub/apache/hadoop/common/hadoop-${pkgver}/hadoop-${pkgver}.tar.gz"
- 'hadoop.profile'
- 'hadoop-conf'
- 'hadoop.sh'
- 'hadoop-namenode.service'
- 'hadoop-datanode.service'
- 'hadoop-secondarynamenode.service'
- 'hadoop-jobtracker.service'
- 'hadoop-tasktracker.service'
-)
-_verwatch=("${source[0]%|*}hadoop/common/" '.*href="hadoop-\([0-9\.]\+\)/.*' 'f')
-md5sums=('887bbfa2bc4bc2fdc8d0056dd7230469'
- '77fad322bff1877b0c5b4e6d693c979a'
- '2b662c5d0548cae29538060bbea9e96b'
- '56a70b8c94de7c1fb236ec24d7c11e05'
- '4e96bfa974fb7701b5636379c02f8470'
- '287feaae2d479042d7210ea5ef079a5e'
- '4dc609ae8d536dbb278a7e89c523384f'
- 'dba52a72c925365bc50a2e443a38f7f4'
- '8da68ae4b6f20a969df19945d359fc32')
-sha256sums=('b341587495b12eec0b244b517f21df88eb46ef634dc7dc3e5969455b80ce2ce5'
- 'b6607cb8531244d9be9241d8d4980d5695869f89fa598c8d24b35ec503df413b'
- 'e584c32246fd23fe5f35b13399372419584c27a234364b12d1479f3c87e70748'
- '93cb40f76f6bb0c1924b7ef083b82d39bf32190f86c28fc6304839703cdda7b1'
- '3fd40045f7657881cde0abee4ac1735232ba3d79121d724f74707252e19088b3'
- '230a58ab4e3462eb63662aee057965c5130247f7d9c98df83495c8da2c409fe5'
- '047d3d6aea9eada82780eaa93a55c6259fb1b63c68bc50cc26323e066c1b7f75'
- '5e9bc41b0086dfa7b237d1a7248a7f113299687f79ba0c58ba01eaeea0e35e79'
- '37d7a252292b365782d9e7a64d6019a78d9c561acf9b5af3c246b602d3e0a8ec')
-PKGEXT='.pkg.tar.gz' # Not worth the extra time to save 10%, not compatible with pacaur
-
-compile() {
- set -u
- cd "${pkgname}-${pkgver}"
- msg 'Cleaning...'
- ant clean
-
- msg 'Patching...'
- sed -i -e "s/${_devver}/${pkgver}/" 'build.xml'
- sed -i -e "s|<ivysettings>|<ivysettings>\n<caches defaultCacheDir=\"${srcdir}/ivy_cache\"/>|" 'ivy/ivysettings.xml'
-
- msg "Building..."
- ant -D'compile.native=true' bin-package
- set +u
+makedepends=('cmake' 'gcc' 'java-environment<=11' 'make' 'maven')
+depends=('inetutils' 'java-runtime-headless-openjdk=11' 'openssh' 'protobuf')
+
+source=("https://github.com/apache/hadoop/archive/refs/tags/rel/release-$pkgver.tar.gz"
+ "${pkgname}" "${pkgname}.sh"
+ hadoop-{datanode,historyserver,namenode,resourcemanager,secondarynamenode}.service)
+sha256sums=('a10fb474084c5d38b93ae901b453794d668ca98aa419a0546807b10cdae17827'
+ '1ec173297234b0d587255c1fac978b3929e967146ac542e2e1b44323f80e0bc5'
+ '3d20dd2ad1b773e7d4cb855c7556613e36ff56081749fe7b01c6e4fd0c743cc5'
+ '876d40b0a2ec9b9cec9b667d7909591ee0ef1acbd5417a0357c33539d8a54e1a'
+ 'f8f3b3a93a9e455da198ee93a873012399878459e78a3a7de0e396d69e81b61f'
+ '3d4aa2a660bd509e658c8109d9e91c00b0f0eee3a2ecab71a4785a76529ea242'
+ '2f6b8893a4b4e2ef120193ecfc2d929a8558d2a1c5b0af12e9224342ca90a158'
+ 'ed1e7f13d2023d49a51dc04c4459d12a53bff258a05b852a3e10a9fd2d18bbb8')
+
+install=$pkgname.install
+
+build() {
+ cd hadoop-rel-release-${pkgver}
+ mvn package -Pdist,native \
+ -Drequire.openssl -Drequire.zstd \
+ -Dmaven.javadoc.skip=true -DskipTests
}
package() {
- set -u
- local _usr_lib="${pkgdir}/usr/lib"
- local _hadoop_real_home="${_usr_lib}/${pkgname}-${pkgver}"
- local _hadoop_link_home="${_usr_lib}/${pkgname}"
-
- install -d "${_usr_lib}" "${pkgdir}/usr/lib/systemd/system"
- cp -pr "${srcdir}/${pkgname}-${pkgver}" "${_usr_lib}"
-
- #
- install -Dpm755 "${srcdir}/hadoop-conf" "${pkgdir}/etc/conf.d/hadoop"
- install -Dpm755 "${srcdir}/hadoop.profile" "${pkgdir}/etc/profile.d/hadoop.sh"
- install -Dpm644 "${srcdir}/"hadoop-*.service -t "${pkgdir}/usr/lib/systemd/system/"
-
- # we do not use soft link because we need put configures in backup array,
- # in order to preserve the conf when upgrade package.
- cp -pr "${_hadoop_real_home}/etc/hadoop" "${pkgdir}/etc"
- mv "${_hadoop_real_home}/etc" "${_hadoop_real_home}/orig_etc"
+ # Copy files to /usr
+ mkdir "$pkgdir"/usr
+ mv hadoop-rel-release-$pkgver/hadoop-dist/target/hadoop-$pkgver/* "$pkgdir"/usr/
- # todo: i need an own file :)
- install -Dm755 <(cat << EOF
-#!/bin/sh
-# Automatically generated by ${pkgname}-${pkgver} PKGBUILD from Arch Linux AUR
-# https://aur.archlinux.org/
-for f in /etc/profile.d/*.sh; do
- . "\${f}"
-done
-/usr/lib/hadoop/bin/hadoop "\$@"
-EOF
- ) "${pkgdir}/usr/bin/hadoop"
+ # Move sbin files to bin
+ cd "$pkgdir"/usr
+ mv sbin/* bin/
+ rmdir sbin
- cd "${_usr_lib}"
- ln -s "${pkgname}-${pkgver}" "${pkgname}"
+ # Move native libraries to /usr/lib
+ mv lib/native/* lib/
+ rmdir lib/native
- ## Disable IPv6 (comment out to disable IPv6 support):
- # sed -i 's|_OPTS="-D|_OPTS="-Djava.net.preferIPv4Stack=true -D|' hadoop-env.sh
+ # Move license and notice files
+ mkdir -p "$pkgdir"/usr/share/licenses/$pkgname/
+ mv licenses-binary/* LICENSE* NOTICE* README.txt "$pkgdir"/usr/share/licenses/$pkgname/
+ rmdir licenses-binary
-if ! :; then
- ## fix native
- if [ "${CARCH}" = 'i686' ]; then
- rm -rf 'lib/native/Linux-amd64-64'
- cd 'lib/native/Linux-i386-32'
- sed -i -e "s:dependency_libs=':dependency_libs='-L/opt/java/jre/lib/i386/server :" 'libhadoop.la'
- fi
+ # Remove windows batch files
+ rm {etc/hadoop,bin,libexec,share/hadoop/tools/resourceestimator/bin}/*.cmd
- if [ "${CARCH}" = 'x86_64' ]; then
- rm -rf 'lib/native/Linux-i386-32'
- cd 'lib/native/Linux-amd64-64'
- sed -i "s:dependency_libs=':dependency_libs='-L/opt/java/jre/lib/amd64/server :" 'libhadoop.la'
- fi
+ # Move etc directory
+ mv "$pkgdir"/usr/etc "$pkgdir"/
- ## Create some links, so Hadoop's KFS jar could access KFS libraries properly
- ## (it is still fine if KFS is not installed)
+ # Install profile script
+ cd "$srcdir"
+ mkdir "$pkgdir"/etc/profile.d
+ cp $pkgname.sh "$pkgdir"/etc/profile.d/
- msg 'Creating KFS links...'
+ # Install eviroment file
+ mkdir "$pkgdir"/etc/conf.d
+ cp $pkgname "$pkgdir"/etc/conf.d/
- for _lib in 'libkfsClient' 'libkfsCommon' 'libkfsEmulator' 'libkfsIO' 'libkfsMeta'; do
- for _ext in 'a' 'so'; do
- ln -sf "/usr/lib/${_lib}.${_ext}"
- done
- done
- ln -sf '/usr/lib/libkfs_access.so'
-fi
- set +u
+ # Install systemd service files
+ mkdir -p "$pkgdir"/usr/lib/systemd/system
+ cp $pkgname-{datanode,historyserver,namenode,resourcemanager,secondarynamenode}.service "$pkgdir"/usr/lib/systemd/system/
+ # Create required directories
+ mkdir -p "$pkgdir"/var/{lib,log}/hadoop
}
-set +u
diff --git a/hadoop b/hadoop
new file mode 100644
index 000000000000..969ee3b62500
--- /dev/null
+++ b/hadoop
@@ -0,0 +1,6 @@
+HADOOP_COMMON_LIB_NATIVE_DIR=/usr/lib
+HADOOP_CONF_DIR=/etc/hadoop
+HADOOP_LOG_DIR=/var/log/hadoop
+HADOOP_USERNAME=hadoop
+
+JAVA_HOME=/usr/lib/jvm/java-11-openjdk
diff --git a/hadoop-conf b/hadoop-conf
deleted file mode 100644
index a3f49ef90816..000000000000
--- a/hadoop-conf
+++ /dev/null
@@ -1,5 +0,0 @@
-# example configuration file
-
-HADOOP_USERNAME="hadoop"
-
-. /etc/profile.d/hadoop.sh \ No newline at end of file
diff --git a/hadoop-datanode.service b/hadoop-datanode.service
index 7bbf74970ab5..cd22602bb577 100644
--- a/hadoop-datanode.service
+++ b/hadoop-datanode.service
@@ -1,13 +1,17 @@
[Unit]
-Description=Hadoop Datanode
-Requires=hadoop-namenode.service
-After=hadoop-namenode.service
+Description=Haoop data node and node manager server
[Service]
-EnvironmentFile=/etc/conf.d/hadoop
-ExecStart=/usr/bin/hadoop datanode
+Type=forking
User=hadoop
Group=hadoop
+EnvironmentFile=/etc/conf.d/hadoop
+ExecStart=/usr/bin/hdfs --daemon start datanode
+ExecStartPost=/usr/bin/yarn --daemon start nodemanager
+ExecStop=/usr/bin/yarn --daemon stop nodemanager
+ExecStopPost=/usr/bin/hdfs --daemon stop datanode
+Restart=on-abnormal
+SuccessExitStatus=143
[Install]
WantedBy=multi-user.target
diff --git a/hadoop-historyserver.service b/hadoop-historyserver.service
new file mode 100644
index 000000000000..68908fa8c884
--- /dev/null
+++ b/hadoop-historyserver.service
@@ -0,0 +1,13 @@
+[Unit]
+Description=Haoop job history server
+
+[Service]
+User=hadoop
+Group=hadoop
+EnvironmentFile=/etc/conf.d/hadoop
+ExecStart=/usr/bin/mapred historyserver
+Restart=on-abnormal
+SuccessExitStatus=143
+
+[Install]
+WantedBy=multi-user.target
diff --git a/hadoop-jobtracker.service b/hadoop-jobtracker.service
deleted file mode 100644
index 61973168b916..000000000000
--- a/hadoop-jobtracker.service
+++ /dev/null
@@ -1,13 +0,0 @@
-[Unit]
-Description=Hadoop Jobtracker
-Requires=hadoop-datanode.service
-After=hadoop-datanode.service
-
-[Service]
-EnvironmentFile=/etc/conf.d/hadoop
-ExecStart=/usr/bin/hadoop jobtracker
-User=hadoop
-Group=hadoop
-
-[Install]
-WantedBy=multi-user.target
diff --git a/hadoop-namenode.service b/hadoop-namenode.service
index 51553653d6a6..dc0e1ff971cf 100644
--- a/hadoop-namenode.service
+++ b/hadoop-namenode.service
@@ -1,12 +1,15 @@
[Unit]
-Description=Hadoop Namenode
-After=network.target
+Description=Haoop name node server
[Service]
-EnvironmentFile=/etc/conf.d/hadoop
-ExecStart=/usr/bin/hadoop namenode
+Type=forking
User=hadoop
Group=hadoop
+EnvironmentFile=/etc/conf.d/hadoop
+ExecStart=/usr/bin/hdfs --daemon start namenode
+ExecStop=/usr/bin/hdfs --daemon stop namenode
+Restart=on-abnormal
+SuccessExitStatus=143
[Install]
WantedBy=multi-user.target
diff --git a/hadoop-resourcemanager.service b/hadoop-resourcemanager.service
new file mode 100644
index 000000000000..5f22257877d7
--- /dev/null
+++ b/hadoop-resourcemanager.service
@@ -0,0 +1,15 @@
+[Unit]
+Description=Haoop resource manager server
+
+[Service]
+Type=forking
+User=hadoop
+Group=hadoop
+EnvironmentFile=/etc/conf.d/hadoop
+ExecStart=/usr/bin/yarn --daemon start resourcemanager
+ExecStop=/usr/bin/yarn --daemon stop resourcemanager
+Restart=on-abnormal
+SuccessExitStatus=143
+
+[Install]
+WantedBy=multi-user.target
diff --git a/hadoop-secondarynamenode.service b/hadoop-secondarynamenode.service
index f0e5c022750d..01ed5efa50c1 100644
--- a/hadoop-secondarynamenode.service
+++ b/hadoop-secondarynamenode.service
@@ -1,13 +1,15 @@
[Unit]
-Description=Hadoop Secondary Namenode
-Requires=hadoop-datanode.service
-After=hadoop-datanode.service
+Description=Haoop secondary name node server
[Service]
-EnvironmentFile=/etc/conf.d/hadoop
-ExecStart=/usr/bin/hadoop secondarynamenode
+Type=forking
User=hadoop
Group=hadoop
+EnvironmentFile=/etc/conf.d/hadoop
+ExecStart=/usr/bin/hdfs --daemon start secondarynamenode
+ExecStop=/usr/bin/hdfs --daemon stop secondarynamenode
+Restart=on-abnormal
+SuccessExitStatus=143
[Install]
WantedBy=multi-user.target
diff --git a/hadoop-tasktracker.service b/hadoop-tasktracker.service
deleted file mode 100644
index 004566ea067c..000000000000
--- a/hadoop-tasktracker.service
+++ /dev/null
@@ -1,13 +0,0 @@
-[Unit]
-Description=Hadoop Tasktracker
-Requires=hadoop-jobtracker.service
-After=hadoop-jobtracker.service
-
-[Service]
-EnvironmentFile=/etc/conf.d/hadoop
-ExecStart=/usr/bin/hadoop tasktracker
-User=hadoop
-Group=hadoop
-
-[Install]
-WantedBy=multi-user.target
diff --git a/hadoop.install b/hadoop.install
index a7b1b075ec42..477751f66a6c 100644
--- a/hadoop.install
+++ b/hadoop.install
@@ -1,32 +1,17 @@
post_install() {
- getent group 'hadoop' >/dev/null 2>&1 || groupadd -r 'hadoop' >/dev/null
- getent passwd 'hadoop' /dev/null 2>&1 || useradd -r -g 'hadoop' -d '/etc/hadoop' 'hadoop' >/dev/null
-
- if [ ! -d '/etc/hadoop/.ssh' ] ; then
- mkdir -p '/etc/hadoop/.ssh'
- fi
-
- chown -R 'hadoop:hadoop' '/etc/hadoop'
-
- if [ ! -e '/etc/hadoop/.ssh/id_rsa' ] ; then
- su -s '/usr/bin/bash' -c "ssh-keygen -t rsa -N '' -f ~/.ssh/id_rsa" - 'hadoop'
- su -s '/usr/bin/bash' -c "cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys" - 'hadoop'
- fi
-
- echo 'An SSH key-pair has been created. Depending on your SSHD configuration you may'
- echo 'need to connect to localhost as user "hadoop" to make sure Hadoop can connect.'
- echo ''
- echo '$ ssh localhost'
-
- echo ''
- echo '===> Read https://wiki.archlinux.org/index.php/Hadoop for more information'
+ getent group hadoop || groupadd hadoop
+ getent passwd hadoop || useradd -g hadoop -r -d /var/lib/hadoop hadoop
+ chown -R hadoop:hadoop /var/{lib,log}/hadoop
}
-post_upgrade() {
- :
+pre_remove() {
+ for s in datanode historyserver namenode resourcemanager secondarynamenode; do
+ systemctl stop hadoop-$s.service
+ systemctl --no-reload disable hadoop-$s.service
+ done
}
post_remove() {
- getent passwd 'hadoop' >/dev/null 2>&1 && userdel 'hadoop' >/dev/null
- getent group 'hadoop' >/dev/null 2>&1 && groupdel 'hadoop' >/dev/null
+ userdel hadoop
+ rm -rf /var/{lib,log}/hadoop
}
diff --git a/hadoop.profile b/hadoop.profile
deleted file mode 100644
index d5b49aa522d3..000000000000
--- a/hadoop.profile
+++ /dev/null
@@ -1,4 +0,0 @@
-export HADOOP_CONF_DIR=/etc/hadoop
-export HADOOP_LOG_DIR=/tmp/hadoop/log
-export HADOOP_SLAVES=/etc/hadoop/slaves
-export HADOOP_PID_DIR=/tmp/hadoop/run
diff --git a/hadoop.sh b/hadoop.sh
index 55974c37c308..5424eee4b0db 100644
--- a/hadoop.sh
+++ b/hadoop.sh
@@ -1,2 +1,4 @@
-HADOOP_CONF_DIR=/etc/hadoop
-export HADOOP_CONF_DIR \ No newline at end of file
+export HADOOP_COMMON_LIB_NATIVE_DIR=/usr/lib
+export HADOOP_CONF_DIR=/etc/hadoop
+export HADOOP_LOG_DIR=/var/log/hadoop
+export HADOOP_USERNAME=hadoop