| 角色 | IP |
| master | 192.168.10.10 |
| save-1 | 192.168.10.11 |
| save-2 | 192.168.10.12 |
jdk安装,三台机器一样装
#tar xf jdk-8u161-linux-x64.tar.gz -C /usr/local/
#mv /usr/local/{jdk1.8.0_161,jdk} #vim /etc/profile.d/jdk.sh export JAVA_HOME=/usr/local/jdk export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar export PATH=$JAVA_HOME/bin:$PATH #exec bash
hosts 本地文件解析,三台机器一样
# vim /etc/hosts 127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4 ::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 192.168.10.10 hadoop-1 masters 192.168.10.11 hadoop-2 slaves-1 192.168.10.12 hadoop-3 slaves-2
配置免密码登录
主节点 ssh-keygen 一路回车 ssh-copy-id hadoop-2 ssh-copy-id hadoop-3 1从节点配置 ssh-keygen 一路回车 ssh-copy-id hadoop-1 ssh-copy-id hadoop-3 2从节点配置 ssh-keygen 一路回车 ssh-copy-id hadoop-1 ssh-copy-id hadoop-2
安装hadoop
tar xf hadoop-1.2.1-bin.tar.gz -C /usr/local/src/
cd /usr/local/src/hadoop-1.2.1
mkdir tmp
cd conf/
# vim masters
masters
# vim slaves
slaves-1
slaves-2
[root@masters conf]# vim core-site.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!-- Put site-specific property overrides in this file. -->
<configuration>
<propert>
<name>hadoop.tmp.dir</name>
<value>/usr/local/src/hadoop-1.2.1/tmp</value>
</propert>
<propert>
<name>fs.default.name</name>
<value>hdfs://192.168.10.10:9000</value>
</propert>
</configuration>
[root@masters conf]# vim mapred-site.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!-- Put site-specific property overrides in this file. -->
<configuration>
<propert>
<name>mapred.job.tracker</name>
<value>http://192.168.10.10:9001</value>
</propert>
</configuration>
[root@masters conf]# vim hdfs-site.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!-- Put site-specific property overrides in this file. -->
<configuration>
<propert>
<name>dfs.replication</name>
<value>3</value>
</propert>
</configuration>
# vim hadoop-env.sh
export JAVA_HOME=/usr/local/jdk
将hadoop拷贝从节点
[root@masters conf]# cd ../../ [root@masters src]# scp -r hadoop-1.2.1 hadoop-2:/usr/local/src/ [root@masters src]# scp -r hadoop-1.2.1 hadoop-3:/usr/local/src/ [root@masters src]# cd hadoop-1.2.1/bin/ [root@masters bin]# ./start-all.sh