forked from SingularitiesCR/spark-docker
-
Notifications
You must be signed in to change notification settings - Fork 0
/
start-hadoop
executable file
·35 lines (34 loc) · 1.18 KB
/
start-hadoop
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
#!/usr/bin/env bash
# Check user environment variable
if [[ -z "${HDFS_USER}" ]]; then
echo "Missing HDFS_USER environment variable" >&2
exit 1
fi
# Start the SSH daemon
service ssh restart
# Set HADOOP_USER_NAME environment vairable
HADOOP_USER_NAME="${HDFS_USER}"
echo "export HADOOP_USER_NAME=\"${HDFS_USER}\"" >> /etc/bash.bashrc
# Create user for HDFS
adduser --disabled-password --gecos "" $HDFS_USER
# Set up keys
su ${HDFS_USER} --command "cat /dev/zero | ssh-keygen -q -N ''"
su ${HDFS_USER} --command "ln -s /home/${HDFS_USER}/.ssh/id_rsa.pub /home/${HDFS_USER}/.ssh/authorized_keys"
# Fix directory permissions
if ! [[ $(whoami) = $HDFS_USER ]]; then
find /opt/hdfs ! -user $HDFS_USER -print0 | xargs -0 chown $HDFS_USER:$HDFS_USER \
> /dev/null 2>&1 ;
find $HADOOP_HOME ! -user $HDFS_USER -print0 | xargs -0 chown $HDFS_USER:$HDFS_USER \
> /dev/null 2>&1 ;
else
echo "\$HDFS_USER is root"
fi
# Select node type
if [[ "${1}" = 'namenode' ]]; then
su $HDFS_USER --command "/opt/util/bin/start-hadoop-namenode $2"
elif [[ "${1}" = 'datanode' ]]; then
su $HDFS_USER --command "/opt/util/bin/start-hadoop-datanode $2 $3"
else
echo "Invalid command '${1}'" >&2
exit 1
fi