hadoop.sh

#!/bin/bash

#if [ -x "$(command -v /usr/local/hadoop-2.9.0/bin/hadoop)" ]; then
#        echo "command hadoop already installed"
#else
tar zvxf /home/dataexa/insight-microservice-poc/software/hadoop-2.9.0.tar.gz -C /usr/local/
echo "export PATH=/usr/local/hadoop-2.9.0/bin:$PATH" >> /etc/profile
source /etc/profile

cd /usr/local/hadoop-2.9.0
echo "export JAVA_HOME=/usr/local/jdk"  >>  etc/hadoop/hadoop-env.sh

mkdir input

cp etc/hadoop/*.xml input

hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.9.0.jar grep input output 'dfs[a-z.]+'

cd /usr/local/hadoop-2.9.0
cat > etc/hadoop/core-site.xml << EOF
<configuration>
    <property>
        <name>fs.defaultFS</name>
        <value>hdfs://localhost:8020</value>
    </property>
</configuration>
EOF

cat > etc/hadoop/hdfs-site.xml << EOF
<configuration>
   <property>
       <name>dfs.replication</name>
       <value>1</value>
   </property>
</configuration>
EOF

#设定本机的无密码ssh登陆,静默方式生成
echo '---------------配置ssh免密登录----------------------'
echo "" | ssh-keygen -t rsa -P ""
echo '----------秘钥生成完成,开始生成公钥----------------'
echo '---------------请输入您当前账户的密码----------------------'
ssh-copy-id localhost


hdfs namenode -format

sbin/start-dfs.sh
hdfs dfs -mkdir /user
hdfs dfs -mkdir /user/test
hdfs dfs -put etc/hadoop /user/test/input
hadoop fs -ls /user/test/input
hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.9.0.jar grep /user/test/input output 'dfs[a-z.]+'
hdfs dfs -cat output/*
bin/hdfs dfs -get output output
cat output/*
sbin/stop-dfs.sh
sleep 15
cp etc/hadoop/mapred-site.xml.template etc/hadoop/mapred-site.xml
cat > etc/hadoop/mapred-site.xml  << EOF
<configuration>
    <property>
        <name>mapreduce.framework.name</name>
        <value>yarn</value>
    </property>
</configuration>
EOF

cat > etc/hadoop/yarn-site.xml << EOF
<configuration>
    <property>
        <name>yarn.nodemanager.aux-services</name>
        <value>mapreduce_shuffle</value>
    </property>
</configuration>
EOF
sbin/start-dfs.sh
sleep 15
sbin/start-yarn.sh
sleep 15
hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.9.0.jar grep /user/test/input output 'dfs[a-z.]+'
hdfs dfs -cat output/*
sbin/stop-yarn.sh
sbin/mr-jobhistory-daemon.sh start historyserver
#fi
原文地址:https://www.cnblogs.com/sxgaofeng/p/12786237.html