主服务器
# 一路回车
ssh-keygen -t rsa
ssh-copy-id hadoop1
ssh-copy-id hadoop2
ssh-copy-id hadoop3
hosts配置
三台机器
172.16.1.220 hadoop1.dcs.com hadoop1
172.16.1.221 hadoop2.dcs.com hadoop2
172.16.1.222 hadoop3.dcs.com hadoop3
修改主机名
每台机器做出修改,分别为hadoop1,hadoop2,hadoop3;重启机器reboot
vi /etc/hostname
hadoop1
关闭防火墙
三台机器
systemctl stop firewalld
systemctl disable firewalld
systemctl stop NetworkManager
systemctl disableNetworkManager
#关闭selinux
setenforce 0
服务器时间同步
三台机器
yum -y install ntp
主服务器
vi /etc/ntp.conf
# 修改
restrict 192.168.0.0 mask 255.255.255.0
#注释掉其他上游时间服务器
#server 0.centos.pool.ntp.org iburst
#server 1.centos.pool.ntp.org iburst
#server 2.centos.pool.ntp.org iburst
#server 3.centos.pool.ntp.org iburst
# 新增
server 127.127.1.0
fudge 127.127.1.0 stratum 10
# 启动服务
systemctl start ntpd.service
systemctl enable ntpd.service
# 子节点设置同步
crontab -e
0-59/10 * * * * /usr/sbin/ntpdate c7-01
关闭THP特性
三台机器
vi /etc/rc.local
# 新增
echo never > /sys/kernel/mm/transparent_hugepage/defrag
echo never > /sys/kernel/mm/transparent_hugepage/enabled
:wq
source /etc/rc.local
ulimit配置
所有主机
# 新增配置文件
vi /etc/security/limits.d/hbase.nofiles.conf
hbase - nofile 327680
# 设定所有用户的ulimit:
vi /etc/security/limits.conf
# 以下内容添加到该文件中
* - nofile 65536
* - nproc 16384
vi /etc/security/limits.d/20-nproc.conf
#以下内容添加到该文件中
* soft nproc 16384
root soft nproc unlimited
#如果不能生效的话;查看是否设置成功,重新连接shell终端生效
vi /etc/profile
ulimit -n 65535
ulimit -u 16384
source /etc/profile
yum源配置
主服务器操作
(1)安装httpd服务
# 主服务器安装httpd
yum install -y httpd
systemctl start httpd
systemctl enable httpd
(2)将上面下载的三个包放到/var/www/html目录下
cd /var/www/html/
# 将包上传
tar -zxvf ambari-2.4.2.0-centos7.tar.gz
tar -zxvf HDP-2.5.3.0-centos7-rpm.tar.gz
mkdir /var/www/html/HDP-UTILS-1.1.0.21
tar -zxvf HDP-UTILS-1.1.0.21-centos7.tar.gz -C /var/www/html/HDP-UTILS-1.1.0.21/
新增配置文件
vi /etc/yum.repos.d/ambari.repo
[HDP]
name=HDP
baseurl=http://172.16.1.220/HDP/centos7
gpgcheck=1
gpgkey=http://172.16.1.220/HDP/centos7/RPM-GPG-KEY/RPM-GPG-KEY-Jenkins
enabled=1
priority=1
[HDP-UTILS]
name=HDP-UTILS
baseurl=http://172.16.1.220/HDP-UTILS-1.1.0.21
gpgcheck=1
gpgkey=http://172.16.1.220/HDP-UTILS-1.1.0.21/RPM-GPG-KEY/RPM-GPG-KEY-Jenkins
enabled=1
priority=1
[Ambari]
name=Ambari
baseurl=http://172.16.1.220/AMBARI-2.4.2.0/centos7/2.4.2.0-136
gpgcheck=1
gpgkey=http://172.16.1.220/AMBARI-2.4.2.0/centos7/2.4.2.0-136/RPM-GPG-KEY/RPM-GPG-KEY-Jenkins
enabled=1
priority=1
:wq
yum list
发送配置文件到其他服务器,然后更新yum源
scp /etc/yum.repos.d/ambari.repo hadoop2:/etc/yum.repos.d/
scp /etc/yum.repos.d/ambari.repo hadoop3:/etc/yum.repos.d/
yum list
jdk1.8
三台机器
tar -zxvf jdk1.8
export JAVA_HOME=/app/jdk1.8
export CLASSPATH=.:$JAVA_HOME/jre/lib/rt.jar:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
export PATH=$JAVA_HOME/bin:$PATH
source /etc/profile
which java
ambari部署及Hadoop
安装ambari服务端
#只有abari服务端节点做
yum install -y ambari-server
安装ambari客户端
#三台机器都做
yum install -y ambari-agent
ambari-agent配置
vi /etc/ambari-agent/conf/ambari-agent.ini
# 修改
hostname= hadoop1.dcs.com
:wq
# 配置文件拷贝到各个主机:
scp /etc/ambari-agent/conf/ambari-agent.ini hadoop2:/etc/ambari-agent/conf
scp /etc/ambari-agent/conf/ambari-agent.ini hadoop3:/etc/ambari-agent/conf
ambari-server配置
ambari-server setup
ambari-server daemon [y/n] ---------------------y
checking jdk -------------------------------3 Custom JDK
Path to JAVA_HOME------------------------------- /opt/jdk1.8.0_162 根据实际情况填写
database configuration[y/n]------------------------------------ n 使用服务自带的PostgreSQL数据库
省略:下面的操作是通过登陆web页面,进行HADOOP的集群部署