-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy patheasyhadoop.sh
167 lines (145 loc) · 5.04 KB
/
easyhadoop.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
#!/bin/bash
# Function to install Java
install_java() {
sudo apt update
sudo apt install default-jdk default-jre -y
}
# Function to create Hadoop user and configure SSH
configure_hadoop_user() {
sudo adduser hadoop
sudo usermod -aG sudo hadoop
sudo su - hadoop
ssh-keygen -t rsa -f ~/.ssh/id_rsa -P ""
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 640 ~/.ssh/authorized_keys
exit
}
# Function to install OpenSSH
install_openssh() {
sudo apt install openssh-server openssh-client -y
}
# Function to download and install Hadoop
install_hadoop() {
HADOOP_DOWNLOAD_URL="https://archive.apache.org/dist/hadoop/common/hadoop-3.1.0/hadoop-3.1.0.tar.gz"
cd ~
wget "$HADOOP_DOWNLOAD_URL" -O hadoop-3.1.0.tar.gz
tar -xzvf hadoop-3.1.0.tar.gz
sudo mv hadoop-3.1.0 /usr/local/hadoop
sudo mkdir /usr/local/hadoop/logs
sudo chown -R hadoop:hadoop /usr/local/hadoop
echo 'export HADOOP_HOME=/usr/local/hadoop
export HADOOP_INSTALL=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib/native"' >> ~/.bashrc
source ~/.bashrc
JAVA_HOME=$(readlink -f /usr/bin/javac | sed "s:/bin/javac::")
echo "export JAVA_HOME=$JAVA_HOME" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh
echo 'export HADOOP_CLASSPATH+=" $HADOOP_HOME/lib/*.jar"' >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh
cd /usr/local/hadoop/lib
sudo wget https://jcenter.bintray.com/javax/activation/javax.activation-api/1.2.0/javax.activation-api-1.2.0.jar
}
# Function to configure core-site.xml
configure_core_site() {
echo "<?xml version=\"1.0\"?>
<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://$1:9000</value>
</property>
</configuration>" | sudo tee $HADOOP_HOME/etc/hadoop/core-site.xml
}
# Function to configure hdfs-site.xml
configure_hdfs_site() {
echo "<?xml version=\"1.0\"?>
<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:///home/hadoop/hadoop/hadoopdata/hdfs/namenode</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:///home/hadoop/hadoop/hadoopdata/hdfs/datanode</value>
</property>
</configuration>" | sudo tee $HADOOP_HOME/etc/hadoop/hdfs-site.xml
}
# Function to configure yarn-site.xml
configure_yarn_site() {
echo "<?xml version=\"1.0\"?>
<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.auxservices.mapreduce.shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>$1</value>
</property>
</configuration>" | sudo tee $HADOOP_HOME/etc/hadoop/yarn-site.xml
}
# Function to configure Hadoop files
configure_hadoop_files() {
read -p "Enter Public DNS/IP or 'localhost' for Hadoop configuration: " input_dns
# Configure core-site.xml
configure_core_site "$input_dns"
# Create HDFS data directories
sudo mkdir -p /home/hadoop/hadoop/hadoopdata/hdfs/{namenode,datanode}
sudo chown -R hadoop:hadoop /home/hadoop/hadoop/hadoopdata/hdfs
# Configure hdfs-site.xml
configure_hdfs_site
# Configure yarn-site.xml
configure_yarn_site "$input_dns"
hdfs namenode -format
}
# Function to start Hadoop services
start_hadoop_services() {
start-dfs.sh
start-yarn.sh
}
# Function to verify running components
verify_components() {
jps
}
# Main menu
while true; do
clear
figlet -f big "EasyHadoop"
echo "Creator: Abishek Kafle"
echo "Apache Hadoop Installation and Configuration Menu"
echo "1. Install Java"
echo "2. Configure Hadoop User and SSH"
echo "3. Install OpenSSH"
echo "4. Install Hadoop"
echo "5. Configure Hadoop Files"
echo "6. Start Hadoop Services"
echo "7. Verify Running Components"
echo "8. Exit"
read -p "Enter your choice: " choice
case $choice in
1) install_java ;;
2) configure_hadoop_user ;;
3) install_openssh ;;
4) install_hadoop ;;
5) configure_hadoop_files ;;
6) start_hadoop_services ;;
7) verify_components ;;
8) exit ;;
*) echo "Invalid choice. Please enter a valid option." ;;
esac
read -p "Press Enter to continue..."
done