工作中的shell
2017-12-18 本文已影响36人
先生_吕
zookeeper集群脚本
zkStart.sh
#!/bin/bash
zkhost='hadoop2004 hadoop2005 hadoop2006'
for zk in $zkhost
do
ssh -T $zk <<EOF
source /etc/profile
cd \$ZOOKEEPER_HOME
bin/zkServer.sh start
EOF
echo 节点 $zk zookeeper进程已启动... [done]
done
zkstart.png
zkStop.sh
#!/bin/bash
zkhost='hadoop2004 hadoop2005 hadoop2006'
for zk in $zkhost
do
ssh -T $zk <<EOF
source /etc/profile
cd \$ZOOKEEPER_HOME
bin/zkServer.sh stop
EOF
echo 节点 $zk zookeeper进程已终止... [done]
done
zkStop.png
storm集群脚本
stormStart.sh
#!/bin/bash
#mbus节点
nimbusServers='hadoop2004'
#supervisor节点
supervisorServers='hadoop2005 hadoop2006'
#启动所有的nimbus
for nim in $nimbusServers
do
ssh -T $nim <<EOF
source /etc/profile
cd \$STORM_HOME
bin/storm nimbus >/dev/null 2>&1 &
EOF
echo 从节点 $nim 启动nimbus...[ done ]
sleep 1
done
#启动所有的ui
for u in $nimbusServers
do
ssh -T $u <<EOF
source /etc/profile
cd \$STORM_HOME
bin/storm ui >/dev/null 2>&1 &
EOF
echo 从节点 $u 启动ui...[ done ]
sleep 1
done
#启动所有的supervisor
for visor in $supervisorServers
do
ssh -T $visor <<EOF
source /etc/profile
cd \$STORM_HOME
bin/storm supervisor >/dev/null 2>&1 &
EOF
echo 从节点 $visor 启动supervisor...[ done ]
sleep 1
done
图片.png
stormStop.sh
#!/bin/bash
#mbus节点
nimbusServers='hadoop2004'
#supervisor节点
supervisorServers='hadoop2005 hadoop2006'
#停止所有的nimbus和ui
for nim in $nimbusServers
do
echo 从节点 $nim 停止nimbus...[ done ]
ssh $nim "kill -9 `ssh $nim ps -ef | grep nimbus | awk '{print $2}'| head -n 1`" >/dev/null 2>&1
echo 从节点 $nim 停止ui...[ done ]
ssh $nim "kill -9 `ssh $nim ps -ef | grep core | awk '{print $2}'| head -n 1`" >/dev/null 2>&1
done
#停止所有的supervisor
for visor in $supervisorServers
do
echo 从节点 $visor 停止supervisor...[ done ]
ssh $visor "kill -9 `ssh $visor ps -ef | grep supervisor | awk '{print $2}'| head -n 1`" >/dev/null 2>&1
done
图片.png
hadoopHA集群脚本
start-hadoopHA.sh
#!/bin/bash
nameNodeHost='hadoop2001'
yarnHost='hadoop2003'
for nameNode in $nameNodeHost
do
echo nameNode 进程启动... [done]
ssh -T $nameNode <<EOF
source /etc/profile
cd \$HADOOP_HOME
sbin/start-dfs.sh
EOF
done
for yarn in $yarnHost
do
echo 节点 $yarn 启动yarn进程... [done]
ssh -T $yarn <<EOF
source /etc/profile
cd \$HADOOP_HOME
sbin/start-yarn.sh
EOF
echo hadoop-HA集群启动完毕...
done
start-hadoop.png
stop-hadoopHA.sh
#!/bin/bash
nameNodeHost='hadoop2001 hadoop2002'
yarnHost='hadoop2003'
for nameNode in $nameNodeHost
do
echo nameNode 进程停止... [done]
ssh -T $nameNode <<EOF
source /etc/profile
cd \$HADOOP_HOME
sbin/stop-dfs.sh
EOF
done
for yarn in $yarnHost
do
echo 节点 $yarn 停止yarn进程... [done]
ssh -T $yarn <<EOF
source /etc/profile
cd \$HADOOP_HOME
sbin/stop-yarn.sh
EOF
echo hadoop-HA集群已关闭
done
stop-HadoopHA .png