解压hadoop
修改conf/hadoop-env.sh
export JAVA_HOME=/usr/local/jre
复制src/core/core-default.xml到conf/core-site.xml
hadoop.tmp.dir = /root/hadoop-0.20.2/data
fs.default.name = hdfs://hadoop160:54310
修改masters和slaves文件加入主节点和从节点主机
复制src/hdfs/hdfs-default.xml到conf/hdfs-site.xml
dfs.replication = 3
dfs.datanode.max.xcievers=4096
dfs.datanode.handler.count=30