hadoop配置文件

1、启动脚本文件

hadoop-env.sh

yarn-env.sh

2、参数配置文件

core-site.xml

hdfs-site.xml

yarn-site.xml

mapred-site.xml

3、集群主机文件

slaves

文件内容:

hadoop-env.sh添加:

export JAVA_HOME=/usr/java/jdk1.8.0_51
export HADOOP_CONF_DIR=/home/hadoop/hadoop/etc/hadoop

yarn-env.sh添加:

export JAVA_HOME=/usr/java/jdk1.8.0_51
export YARN_CONF_DIR=/home/hadoop/hadoop/etc/hadoop

core-site.xml

<configuration>
    <property>
        <name>hadoop.tmp.dir</name> 
        <value>/home/hadoop/hdfs/tmp</value>
        <final>true</final>
        <description>A base for other temporary directories.</description>
    </property>
    <property>
        <name>fs.defaultFS</name>
        <value>hdfs://rhel01:8020</value>
        <final>true</final>
    </property>
</configuration>

 hdfs-site.xml

<configuration>
  <property>
    <name>dfs.replication</name>
    <value>2</value>
  </property>
  <property>
    <name>dfs.name.dir</name>
    <value>/home/hadoop/hdfs/name</value>
  </property>
  <property>
    <name>dfs.data.dir</name>
    <value>/home/hadoop/hdfs/data</value>
  </property>
  <property>
    <name>dfs.permissions</name>
    <value>false</value>
  </property>
</configuration>

yarn-site.xml

<configuration>
    <property>
        <name>mapreduce.framework.name</name>
        <value>yarn</value>
    </property>
</configuration>

mapred-site.xml

<configuration>
    <property>
        <name>mapreduce.framework.name</name>
        <value>yarn</value>
    </property>
</configuration>

slaves

node01
node02
node03

 环境变量:

.bash_profile

export JAVA_HOME=/usr/java/jdk1.8.0_51
export HADOOP_HOME=$HOME/hadoop
export SPARK_HOME=$HOME/spark
export HIVE_HOME=$HOME/hive

PATH=$PATH:$HOME/bin:$HOME/hadoop/bin:$HOME/hadoop/sbin:$HIVE_HOME/bin:$SPARK_HOME/bin

export JAVA_HOME=/usr/java/jdk1.8.0_51
export PATH=${JAVA_HOME}/bin:$PATH
export SCALA_HOME=/usr/scala
export PATH=${SCALA_HOME}/bin:$PATH
export HADOOP_HOME=/home/hadoop/hadoop
export LD_LIBRARY_PATH=$HADOOP_HOME/lib/native

原文地址:https://www.cnblogs.com/timlong/p/9855462.html