1 ,java 1.8
2 ,scala2.11.8
3 ,hadoop 2.6
4 ,spark 安装规划 :
master | worker | worker |
---|
node01 | node02 | node03 |
5 ,版本选择 :
spark-2.3.1-bin-hadoop2.6.tgz
6 ,上传,解压 :
tar -xzvf spark-2.3.1-bin-hadoop2.6.tgz -C /export/servers/
7 ,配置文件 : 指定从节点 ( slaves )
cd /export/servers/spark-2.3.1-bin-hadoop2.6/conf
cp slaves.template slaves
vim slaves
node02
node03
8 ,配置文件 : 执行脚本 ( spark-env.sh )
cp spark-env.sh.template spark-env.sh
vim spark-env.sh
export SPARK_MASTER_HOST=node01
export SPARK_MASTER_PORT=7077
export SPARK_WORKER_CORES=2
export SPARK_WORKER_MEMORY=3g
export JAVA_HOME=/export/servers/jdk1.8.0_191
export SCALA_HOME=/export/servers/scala-2.11.8
scp -r spark-2.3.1-bin-hadoop2.6/ root@node02:$PWD
scp -r spark-2.3.1-bin-hadoop2.6/ root@node03:$PWD
10 ,环境变量 :
vim /etc/profile
# spark
export SPARK_HOME=/export/servers/spark-2.3.1-bin-hadoop2.6
export PATH=$PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin
source /etc/profile
11 ,启动 spark :
cd /export/servers/spark-2.3.1-bin-hadoop2.6/sbin
./start-all.sh
12 ,停止集群 :
cd /export/servers/spark-2.3.1-bin-hadoop2.6/sbin
./stop-all.sh
13 ,成功的标志 : 访问 web 端口
http://node01:8080/