一.环境
- 软件环境
centos7
spark1.6
jdk1.7
scala2.10 - 硬件环境
192.168.1.21(master,node21)
192.168.1.22(slave,node22)
192.168.1.23(slave,node23)
二.配置jdk、scala运行环境
- jdk安装(我习惯用tar.gz包)
tar -xzvf jdk1.7.tar.gz -C /opt/
mv /opt/jdk1.7 /opt/jdk
- scala安装
tar xzvf scala-2.10.4.tgz -C /opt
mv /opt/scala-2.10.4 /opt/scala
- 配置环境变量
vi /etc/profile
(添加下面的配置)
export JAVA_HOME=/opt/jdk
export PATH=$JAVA_HOME/bin:$PATH
export SCALA_HOME=/opt/scala/
export PATH=$SCALA_HOME/bin:$PATH
##保存
@H_404_65@source /etc/profile
- 测试
java -version scala -version
三.spark安装
安装
tar -xzvf spark-1.6.0-bin-2.6.4.tgz -C /opt
mv /opt/spark-1.6.0-bin-2.6.4 /opt/spark
- 配置slaves
@H_404_65@cd /opt/spark/conf cp slaves.template slaves vi slaves (添加下面内容,填写hostname或者ip都可以,spark对两个都识别) node22 node23
- 配置spark-env.sh
@H_404_65@cd /opt/spark/conf
cp spark-env.sh.template spark-env.sh
vi spark-env.sh
(添加下面内容,不添加java_home,slaves节点会启动不了)
export JAVA_HOME=/opt/jdk
四.部署到slave节点上面
- jdk
scp -r /opt/jdk 192.168.1.22:/opt/
scp -r /opt/jdk 192.168.1.23:/opt/
- scala
scp -r /opt/scala 192.168.1.22:/opt
scp -r /opt/scala 192.168.1.23:/opt
- spark
scp -r /opt/spark 192.168.1.22:/opt
scp -r /opt/spark 192.168.1.23:/opt
- 环境变量
scp /etc/profile 192.168.1.22:/etc
scp /etc/profile 192.168.1.23:/etc
(在所有slave上,执行下面)
@H_404_65@source /etc/profile
5.启动
master节点上执行
/opt/spark/sbin/start-all.sh