`

hadoop2.5.0入门安装配置

 
阅读更多
说明:
操作系统CentOS6.5 64位
jdk1.7+
hadoop2.5.0
重要配置部分:
#hosts文件配置 /etc/hosts 其中163为主节点 其他为从节点
192.168.100.163 master
192.168.100.165 node1
192.168.100.166 node2
192.168.100.167 node3

#ssh配置
#1.生成主节点用户的SSH公钥
[root@master ~]# ssh-keygen -t dsa -P '' -f ~/.ssh/id_dsa

#2.本机将~/.ssh/id_dsa.pub添加~/.ssh/authorized_keys文件中
[root@master ~]# cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys

#3.测试本机ssh无密码登陆
[root@master ~]# ssh localhost

#4.将~/.ssh/id_dsa.pub添加到目标机器的~/.ssh/authorized_keys文件中
[root@master ~]# scp -r ~/.ssh/id_dsa.pub root@node1:.ssh/
[root@master ~]# ssh node1
[root@node1 ~]# cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys
node2,node3重复#4的操作!
#hadoop jdk安装步骤省略
#hadoop 配置
#1. hadoop-env.sh
# The java implementation to use.
export JAVA_HOME=/usr/java/jdk1.7.0_67

#2. yarn-env.sh
export JAVA_HOME=/usr/java/jdk1.7.0_67
#3. core-site.xml
<configuration>
<property>
        <name>fs.defaultFS</name>
        <value>hdfs://master:9000</value>
    </property>
<property>
  <name>hadoop.tmp.dir</name>
  <value>/founder/tmp</value>
  <description>A base for other temporary directories.</description>
</property>
</configuration>
#4.hdfs-site.xml
<configuration>
    <property>
        <name>dfs.replication</name>
        <value>3</value>
    </property>
<property>
  <name>dfs.namenode.name.dir</name>
  <value>file:///home/hadoop/hdfs/name</value>
  <final>true</final>
</property>
<property>
  <name>dfs.dataname.data.dir</name>
  <value>file:///home/hadoop/hdfs/data</value>
  <final>true</final>
</property>
<property>
  <name>dfs.namenode.secondary.http-address</name>
  <value>master:9001</value>
</property>
<property>
  <name>dfs.webhdfs.enabled</name>
  <value>true</value>
</property>
</configuration>

#5. mapred-site.xml
<configuration>
<property> 
<name>mapreduce.framework.name</name> 
<value>yarn</value> 
</property>
</configuration>

#6. slaves
node1
node2
node3
#7. yarn-site.xml
<configuration>

<!-- Site specific YARN configuration properties -->
<property>
        <name>yarn.resourcemanager.hostname</name>
        <value>master</value>
        <description>The hostname of the RM.</description>
    </property>
<property>
  <name>yarn.nodemanager.aux-services</name>
  <value>mapreduce_shuffle</value>
</property>
</configuration>


# hadoop环境变量设置 /etc/profile

export HADOOP_DEV_HOME=/founder/hadoop-2.5.0
export PATH=$PATH:$HADOOP_DEV_HOME/bin
export PATH=$PATH:$HADOOP_DEV_HOME/sbin
export HADOOP_MAPARED_HOME=${HADOOP_DEV_HOME}
export HADOOP_COMMON_HOME=${HADOOP_DEV_HOME}
export HADOOP_HDFS_HOME=${HADOOP_DEV_HOME}
export YARN_HOME=${HADOOP_DEV_HOME}
export HADOOP_CONF_DIR=${HADOOP_DEV_HOME}/etc/hadoop
export HDFS_CONF_DIR=${HADOOP_DEV_HOME}/etc/hadoop
export YARN_CONF_DIR=${HADOOP_DEV_HOME}/etc/hadoop
export HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_DEV_HOME}/lib/native 
export JAVA_LIBRARY_PATH=${HADOOP_DEV_HOME}/lib/native

#java setting
export JAVA_HOME=/usr/java/jdk1.7.0_67
export PATH=$JAVA_HOME/bin:$PATH



#hadoop调试信息 方便错误调试!临时环境变量即可
export HADOOP_ROOT_LOGGER=DEBUG,console

web地址
http://master:8088
分享到:
评论

相关推荐

Global site tag (gtag.js) - Google Analytics