1. 三台机器: hadoop22, hadoop23, hadoop24
2. hdfs在22机器启动,yarn在hadoop23机器
3. 脚本需要hadoop用户启动才可以
4. 脚本:
#!/bin/bash
HADOOP_PATH="/opt/module/hadoop-3.3.4"
# 检查脚本执行用户是否为 hadoop
if [ "$(whoami)" != "hadoop" ]; then
echo "错误: 该脚本需要以 hadoop 用户身份执行"
exit 1
fi
# 检查输入参数
if [ "$#" -ne 1 ]; then
echo "使用方法: $0 <start|stop>"
exit 1
fi
function start_cluster() {
echo "****************************************************************"
echo "* *"
echo "***********************启动hadoop集群.**************************"
echo "* *"
echo "****************************************************************"
cd $HADOOP_PATH
sbin/start-dfs.sh
echo "已启动hdf."
ssh -T hadoop23 << EOF
cd "$HADOOP_PATH"
sbin/start-yarn.sh
echo "已启动yarn."
exit
EOF
}
function stop_cluster() {
echo "****************************************************************"
echo "* *"
echo "***********************关闭hadoop集群.**************************"
echo "* *"
echo "****************************************************************"
cd $HADOOP_PATH
sbin/stop-dfs.sh
echo "已关闭hdf."
ssh -T hadoop23 << EOF
cd "$HADOOP_PATH"
sbin/stop-yarn.sh
echo "已关闭yarn."
exit
EOF
}
# 判断输入参数是start还是stop
case "$1" in
"start")
start_cluster
;;
"stop")
stop_cluster
;;
*)
echo "未知操作:$1"
exit 1
;;
esac