# hadoop-env.sh 配置在最后一行
export JAVA_HOME=/data/soft/jdk1.8
# core-site.xml
fs.defaultFS
hdfs://bigdata01:9000
hadoop.tmp.dir
/data/hadoop_repo
# hdfs-site.xml
dfs.replication
2
dfs.namenode.secondary.http-address
bigdata01:50090
# mapred-site.xml
mapreduce.framework.name
yarn
# yarn-site.xml
yarn.ndoemanager.aux-services
mapreduce_shuffle
yarn.nodemanager.env-whitelist
JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_MAPRED_HOME
yarn.resourcemanager.hostname
bigdata01
# works 配置从节点
bigdata02
bigdata03
# start-dfs.sh
HDFS_DATANODE_USER=root
HDFS_DATANODE_SECURE_USER=hdfs
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
# stop-dfs.sh
HDFS_DATANODE_USER=root
HDFS_DATANODE_SECURE_USER=hdfs
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
# start-yarn.sh
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=yarn
YARN_NODEMANAGER_USER=root
# stop-yarn.sh
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=yarn
YARN_NODEMANAGER_USER=root
Comments NOTHING