創建同步腳本xsync
mkdir -p /home/hadoop/bin && cd /home/hadoop/bin
vim xsync
#!/bin/bash#1. 判斷參數個數
if [ $# -lt 1 ]
thenecho Not Arguementexit;
fi#2. 遍歷集群所有機器
for host in node1 node2 node3
doecho ==================== $host ====================#3. 遍歷所有目錄,挨個發送for file in $@do#4. 判斷文件是否存在if [ -e $file ]then#5. 獲取父目錄pdir=$(cd -P $(dirname $file); pwd)#6. 獲取當前文件的名稱fname=$(basename $file)ssh $host "mkdir -p $pdir"rsync -av $pdir/$fname $host:$pdirelseecho $file does not exists!fidone
done
chmod +x xsync.sh
創建jps狀態打印
vim /export/server/hadoop/sbin/jpsall.sh
#!/bin/bash
for host in node1 node2 node3
doecho =============== $host ===============ssh $host $JAVA_HOME/bin/jps
done
chmod +x jpsall.sh
hadoop啟動腳本
cd /export/hadoop/sbin
vim exec-hadoop.sh
#!/bin/bash
if [ $# -lt 1 ]
thenecho "No Args Input..."exit ;
fi
case $1 in
"start")echo " =================== 啟動 hadoop集群 ==================="echo " --------------- 啟動 hdfs ---------------"ssh node2 $HADOOP_HOME/sbin/start-dfs.shecho " --------------- 啟動 yarn ---------------"ssh node1 $HADOOP_HOME/sbin/start-yarn.shecho " --------------- 啟動 historyserver ---------------"ssh node2 $HADOOP_HOME/bin/mapred --daemon start historyserverecho " --------------- 啟動 spark ---------------"ssh node2 $SPARK_HOME/sbin/start-all.shecho " --------------- 啟動 spark 歷史服務器 ---------------"ssh node2 $SPARK_HOME/sbin/start-history-server.sh# ssh node2 /export/server/zookeeper/bin/zk.sh start
;;
"stop")echo " =================== 關閉 hadoop集群 ==================="echo " --------------- stop historyserver ---------------"ssh node2 $HADOOP_HOME/bin/mapred --daemon stop historyserverecho " --------------- stop yarn ---------------"ssh node1 $HADOOP_HOME/sbin/stop-yarn.shecho " --------------- stop hdfs ---------------"ssh node2 $HADOOP_HOME/sbin/stop-dfs.shecho " --------------- stop spark ---------------"ssh node2 $SPARK_HOME/sbin/stop-all.shecho " --------------- stop spark 歷史服務器 ---------------"ssh node2 $SPARK_HOME/sbin/stop-history-server.sh# ssh node2 /export/server/zookeeper/bin/zk.sh stop
;;
*)echo "Input Args Error..."
;;
esac
chmod +x exec-hadoop.sh