hadoop2.7 完全分布式安装
1、安装JDK2、SSH互信免登陆
3、/etc/profile
HADOOP_PREFIX=/opt/hadoop
JAVA_HOME=/opt/jdk18
PATH=$PATH:$JAVA_HOME/bin:$HADOOP_PREFIX/bin:$HADOOP_PREFIX/sbin
export HADOOP_PREFIX PATH JAVA_HOME
4、hadoop安装目录/etc/hadoop/hadoop-en.sh
export JAVA_HOME=/opt/jdk18
export HADOOP_COMMON_HOME=/opt/hadoop
5、编辑/etc/hosts
192.168.98.34 NameNode34
192.168.98.35 DataNode35
192.168.98.37 DataNode37
192.168.98.38 DataNode38
6.1 core-site.xml
hadoop.tmp.dir
/opt/hadoop/tmp
A base for other temporary directories.
fs.defaultFS
hdfs://NameNode34:9000
io.file.buffer.size
131072
6.2 hdfs-site.xml
dfs.namenode.name.dir
file:/opt/hadoop/Name
dfs.datanode.data.dir
/opt/hadoop/Data
dfs.blocksize
268435456
dfs.namenode.handler.count
100
6.3 yarn-site.xml
mapreduce.job.ubertask.enable
true
mapreduce.job.ubertask.maxmaps
9
mapreduce.job.ubertask.maxreduces
5
yarn.acl.enable
false
yarn.admin.acl
*
yarn.log-aggregation-enable
false
yarn.nodemanager.aux-services.mapreduce.shuffle.class
org.apache.hadoop.mapred.ShuffleHandler
yarn.resourcemanager.address
NameNode34:8032
yarn.resourcemanager.scheduler.address
NameNode34:8030
yarn.resourcemanager.resource-tracker.address
NameNode34:8035
yarn.resourcemanager.admin.address
NameNode34:8033
yarn.resourcemanager.webapp.address
NameNode34:8088
yarn.resourcemanager.hostname
NameNode34
yarn.nodemanager.aux-services
mapreduce_shuffle
6.4 mapred-site.xml
mapreduce.framework.name
yarn
mapreduce.jobhistory.address
NameNode34:10020
mapreduce.jobhistory.webapp.address
NameNode34:19888
mapreduce.framework.name
yarn
mapreduce.jobhistory.address
NameNode34:10020
mapreduce.jobhistory.webapp.address
NameNode34:19888
mapreduce.map.memory.mb
4096
mapreduce.map.java.opts
-Xmx1024M
mapreduce.reduce.memory.mb
4096
mapreduce.reduce.java.opts
-Xmx1024M
7、执行 hdfs namenode -format
8、编辑 hadoop安装目录/etc/hadoop/slaves文件
localhost
DataNode35
DataNode37
DataNode38
9、执行 start-dfs.sh
10、执行 start-yarn.sh
http://NameNode:8088/ 查看yarn
http://NameNode:50070/ 查看hdfs
创建如下脚本程序
# vi scp_hadoop.sh
脚本内容如下:
#!/bin/sh
for host in redmongdb nginx;do
echo $host
scp-r /work/apps/hadoopsch@${host}:/work/apps/
Done
保存退出后, 修改文件的可执行属性 (chmod a+x *.sh)
WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform
export LD_LIBRARY_PATH=$HADOOP_HOME/lib/native/
[*] ###set java_env
[*]
[*] export JAVA_HOME=/usr/java/jdk1.8.0_25/
[*]
[*]
export> export HADOOP_HOME=/opt/hadoop
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_YARN_HOME=$HADOOP_HOME
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HADOOP_HOME/lib
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
#export HADOOP_OPTS=\"-Djava.library.path=$HADOOP_HOME/lib/native\"
[*]
[*] export LD_LIBRARY_PATH=/opt/hadoop/lib/native/
页:
[1]