spark集群搭建、启动

2018-01-04  本文已影响0人  lehuai
#cd training
# tar -zxvf spark-2.1.0-bin-hadoop2.7.tgz
# rm -rf spark-2.1.0-bin-hadoop2.7.tgz
# mv spark-2.1.0-bin-hadoop2.7/ spark
# cd spark
# cd conf
# cp spark-env.sh.template spark-env.sh
#vi /etc/profile
export SPARK_HOME=/root/training/spark

export PATH=$PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin
#source /etc/profile
# vi spark-env.sh
export JAVA_HOME=/usr/java/default

export SPARK_MASTER_IP=hadoop21

export SPARK_MASTER_PORT=7077

export HADOOP_HOME=/root/training/hadoop

export HADOOP_CONF_DIR=/opt/hadoop/etc/hadoop

export SPARK_MASTER_HOST=192.168.56.21

export SPARK_WORKER_MEMORY=1g

export SPARK_WORKER_CORES=2
# mv slaves.template slaves
#vi slaves
hadoop22
hadoop23
hadoop24
#cd ../..
# scp -r spark/ root@hadoop22:/root/training/
# scp -r spark/ root@hadoop23:/root/training/
# scp -r spark/ root@hadoop24:/root/training/

hadoop22,23,24

#cd training
#vi /etc/profile
export SPARK_HOME=/root/training/spark

export PATH=$PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin
#source /etc/profile

hadoop21

cd spark/sbin
#./start-all.sh
上一篇下一篇

猜你喜欢

热点阅读