卸载ambari脚本

Posted mergy

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了卸载ambari脚本相关的知识,希望对你有一定的参考价值。

卸载ambari脚本

#!/bin/bash

PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH

#取得集群的所有主机名,这里需要注意:/etc/hosts配置的IP和主机名只能用一个空格分割
hostList=$(cat /etc/hosts | tail -n +3 | cut -d   -f 2)
yumReposDir=/etc/yum.repos.d/
alterNativesDir=/etc/alternatives/
pingCount=5
logPre=TDP

read -p "Please input your master hostname: " master
master=${master:-"master"}
ssh $master "ambari-server stop"
#重置ambari数据库
ssh $master "ambari-server reset"
if [[ $? -eq 0 ]]; then
    #nothing to do
    else
        ssh $master "yum -y remove ambari-server"
fi

for host in $hostList
do
    #echo $host
    #检测主机的连通性
    unPing=$(ping $host -c $pingCount | grep Unreachable | wc -l)
    if [ "$unPing" == "$pingCount" ]; then
        echo -e "$logPre======>$host is Unreachable,please check ‘/etc/hosts‘ file"
        continue
    fi

    echo "$logPre======>$host deleting... 
"
    #1.)删除hdp.repo、HDP.repo、HDP-UTILS.repo和ambari.repo
    ssh $host "cd $yumReposDir"
    ssh $host "rm -rf $yumReposDir/hdp.repo"
    ssh $host "rm -rf $yumReposDir/HDP*"
    ssh $host "rm -rf $yumReposDir/ambari.repo"
    
    #删除HDP相关的安装包
    ssh $host "yum remove -y  sqoop.noarch"
    ssh $host "yum remove -y  lzo-devel.x86_64"
    ssh $host "yum remove -y  hadoop-libhdfs.x86_64"
    ssh $host "yum remove -y  rrdtool.x86_64"
    ssh $host "yum remove -y  hbase.noarch"
    ssh $host "yum remove -y  pig.noarch"
    ssh $host "yum remove -y  lzo.x86_64" 
    ssh $host "yum remove -y  ambari-log4j.noarch"
    ssh $host "yum remove -y  oozie.noarch"
    ssh $host "yum remove -y  oozie-client.noarch"
    ssh $host "yum remove -y  gweb.noarch"
    ssh $host "yum remove -y  snappy-devel.x86_64"
    ssh $host "yum remove -y  hcatalog.noarch"
    ssh $host "yum remove -y  python-rrdtool.x86_64"
    ssh $host "yum remove -y  nagios.x86_64"
    ssh $host "yum remove -y  webhcat-tar-pig.noarch"
    ssh $host "yum remove -y  snappy.x86_64"
    ssh $host "yum remove -y  libconfuse.x86_64"
    ssh $host "yum remove -y  webhcat-tar-hive.noarch"
    ssh $host "yum remove -y  ganglia-gmetad.x86_64"
    ssh $host "yum remove -y  extjs.noarch"
    ssh $host "yum remove -y  hive.noarch"
    ssh $host "yum remove -y  hadoop-lzo.x86_64"
    ssh $host "yum remove -y  hadoop-lzo-native.x86_64"
    ssh $host "yum remove -y  hadoop-native.x86_64"
    ssh $host "yum remove -y  hadoop-pipes.x86_64"
    ssh $host "yum remove -y  nagios-plugins.x86_64"
    ssh $host "yum remove -y  hadoop.x86_64"
    ssh $host "yum remove -y  zookeeper.noarch"   
    ssh $host "yum remove -y  hadoop-sbin.x86_64"
    ssh $host "yum remove -y  ganglia-gmond.x86_64"
    ssh $host "yum remove -y  libganglia.x86_64"
    ssh $host "yum remove -y  perl-rrdtool.x86_64"
    ssh $host "yum remove -y  epel-release.noarch"
    ssh $host "yum remove -y  compat-readline5*"
    ssh $host "yum remove -y  fping.x86_64"
    ssh $host "yum remove -y  perl-Crypt-DES.x86_64"
    ssh $host "yum remove -y  exim.x86_64"
    ssh $host "yum remove -y ganglia-web.noarch"
    ssh $host "yum remove -y perl-Digest-HMAC.noarch"
    ssh $host "yum remove -y perl-Digest-SHA1.x86_64"
    ssh $host "yum remove -y bigtop-jsvc.x86_64"
    ssh $host "yum remove -y hive*"
    ssh $host "yum remove -y oozie*"
    ssh $host "yum remove -y pig*"
    ssh $host "yum remove -y zookeeper*"
    ssh $host "yum remove -y tez*"
    ssh $host "yum remove -y hbase*"
    ssh $host "yum remove -y ranger*"
    ssh $host "yum remove -y knox*"
    ssh $host "yum remove -y storm*"
    ssh $host "yum remove -y accumulo*"
    ssh $host "yum remove -y falcon*"
    ssh $host "yum remove -y ambari-metrics-hadoop-sink"
    ssh $host "yum remove -y smartsense-hst"
    ssh $host "yum remove -y slider_2_4_2_0_258"
    ssh $host "yum remove -y ambari-metrics-monitor"
    ssh $host "yum remove -y spark2_2_5_3_0_37-yarn-shuffle"
    ssh $host "yum remove -y spark_2_5_3_0_37-yarn-shuffle"
    ssh $host "yum remove -y ambari-infra-solr-client"
    
    #删除快捷方式
    ssh $host "cd $alterNativesDir"
    ssh $host "rm -rf hadoop-etc" 
    ssh $host "rm -rf zookeeper-conf"
    ssh $host "rm -rf hbase-conf" 
    ssh $host "rm -rf hadoop-log" 
    ssh $host "rm -rf hadoop-lib"
    ssh $host "rm -rf hadoop-default" 
    ssh $host "rm -rf oozie-conf"
    ssh $host "rm -rf hcatalog-conf" 
    ssh $host "rm -rf hive-conf"
    ssh $host "rm -rf hadoop-man"
    ssh $host "rm -rf sqoop-conf"
    ssh $host "rm -rf hadoop-confone"

    #删除用户及文件
    ssh $host "userdel -rf nagios"
    ssh $host "userdel -rf hive"
    ssh $host "userdel -rf ambari-qa"
    ssh $host "userdel -rf hbase"
    ssh $host "userdel -rf oozie"
    ssh $host "userdel -rf hcat"
    ssh $host "userdel -rf mapred"
    ssh $host "userdel -rf hdfs"
    ssh $host "userdel -rf rrdcached"
    ssh $host "userdel -rf zookeeper"
    ssh $host "userdel -rf sqoop"
    ssh $host "userdel -rf puppet"
    ssh $host "userdel -rf flume"
    ssh $host "userdel -rf tez"
    ssh $host "userdel -rf yarn"
    ssh $host "userdel -rf knox"
    ssh $host "userdel -rf storm"
    ssh $host "userdel -rf hbase"
    ssh $host "userdel -rf kafka"
    ssh $host "userdel -rf falcon"
    ssh $host "userdel -rf yarn"
    ssh $host "userdel -rf hcat"
    ssh $host "userdel -rf atlas"
    ssh $host "userdel -rf spark"
    ssh $host "userdel -rf ams"
    ssh $host "userdel -rf zeppelin"
    ssh $host "rm -rf /home/*"
    ssh $host " userdel -rf hadoop"

    #删除文件夹
    ssh $host "rm -rf /hadoop"
    ssh $host "rm -rf /etc/hadoop" 
    ssh $host "rm -rf /etc/hbase"
    ssh $host "rm -rf /etc/hcatalog" 
    ssh $host "rm -rf /etc/hive"
    ssh $host "rm -rf /etc/ganglia" 
    ssh $host "rm -rf /etc/nagios"
    ssh $host "rm -rf /etc/oozie"
    ssh $host "rm -rf /etc/sqoop"
    ssh $host "rm -rf /etc/zookeeper" 
    ssh $host "rm -rf /etc/hive2"
    ssh $host "rm -rf /etc/flume"
    ssh $host "rm -rf /etc/storm"
    ssh $host "rm -rf /etc/tez_hive2"
    ssh $host "rm -rf /etc/spark2"
    ssh $host "rm -rf /etc/phoenix"
    ssh $host "rm -rf /etc/pig"
    ssh $host "rm -rf /etc/hive-hcatalog"
    ssh $host "rm -rf /etc/tez"
    ssh $host "rm -rf /etc/falcon"
    ssh $host "rm -rf /etc/knox"
    ssh $host "rm -rf /etc/hive-webhca"
    ssh $host "rm -rf /etc/kafka"
    ssh $host "rm -rf /etc/slider"
    ssh $host "rm -rf /etc/storm-slider-client"
    ssh $host "rm -rf /etc/spark"
    ssh $host "rm -rf /var/log/*"
    ssh $host "rm -rf /var/nagios"
    ssh $host "rm -rf /var/run/hbase"
    ssh $host "rm -rf /var/run/hive"
    ssh $host "rm -rf /var/run/ganglia" 
    ssh $host "rm -rf /var/run/nagios"
    ssh $host "rm -rf /var/run/oozie"
    ssh $host "rm -rf /var/run/zookeeper"
    ssh $host "rm -rf /var/run/spark"
    ssh $host "rm -rf /var/run/hadoop"
    ssh $host "rm -rf /var/run/flume"
    ssh $host "rm -rf /var/run/storm"
    ssh $host "rm -rf /var/run/webhcat"
    ssh $host "rm -rf /var/run/hadoop-yarn"
    ssh $host "rm -rf /var/run/hadoop-mapreduce"
    ssh $host "rm -rf /var/run/kafka"
    ssh $host "rm -rf /var/run/sqoop"
    ssh $host "rm -rf /var/run/hive-hcatalog"
    ssh $host "rm -rf /var/run/falcon"
    ssh $host "rm -rf /var/run/hadoop-hdfs"
    ssh $host "rm -rf /var/run/ambari-metrics-collector"
    ssh $host "rm -rf /var/run/ambari-metrics-monitor"
    ssh $host "rm -rf /var/lib/ambari*"
    ssh $host "rm -rf /var/lib/hive"
    ssh $host "rm -rf /var/lib/ganglia" 
    ssh $host "rm -rf /var/lib/oozie"
    ssh $host "rm -rf /var/lib/zookeeper"
    ssh $host "rm -rf /var/lib/hadoop-yarn"
    ssh $host "rm -rf /var/lib/hadoop-mapreduce"
    ssh $host "rm -rf /var/lib/hadoop-hdfs"
    ssh $host "rm -rf /var/lib/knox"
    ssh $host "rm -rf /var/tmp/oozie"
    ssh $host "rm -rf /usr/lib/python2.6/site-packages/ambari_*"
    ssh $host "rm -rf /usr/lib/python2.6/site-packages/resource_management"
    ssh $host "rm -rf /usr/lib/ambari-*"
    ssh $host "rm -rf /usr/lib/hadoop"
    ssh $host "rm -rf /usr/lib/hbase"
    ssh $host "rm -rf /usr/lib/hcatalog" 
    ssh $host "rm -rf /usr/lib/hive"
    ssh $host "rm -rf /usr/lib/oozie"
    ssh $host "rm -rf /usr/lib/sqoop"
    ssh $host "rm -rf /usr/lib/zookeeper"
    ssh $host "rm -rf /usr/lib/flume"
    ssh $host "rm -rf /usr/lib/storm"
    ssh $host "rm -rf /usr/hdp"
    ssh $host "rm -rf /usr/hadoop"
    ssh $host "rm -rf /usr/bin/worker-lanucher"
    ssh $host "rm -rf /usr/bin/zookeeper-client"
    ssh $host "rm -rf /usr/bin/zookeeper-server"
    ssh $host "rm -rf /usr/bin/zookeeper-server-cleanup"
    ssh $host "rm -rf /usr/bin/yarn" 
    ssh $host "rm -rf /usr/bin/storm"
    ssh $host "rm -rf /usr/bin/storm-slider" 
    ssh $host "rm -rf /usr/bin/worker-lanucher"
    ssh $host "rm -rf /usr/bin/storm"
    ssh $host "rm -rf /usr/bin/storm-slider "
    ssh $host "rm -rf /usr/bin/sqoop "
    ssh $host "rm -rf /usr/bin/sqoop-codegen "
    ssh $host "rm -rf /usr/bin/sqoop-create-hive-table "
    ssh $host "rm -rf /usr/bin/sqoop-eval "
    ssh $host "rm -rf /usr/bin/sqoop-export "
    ssh $host "rm -rf /usr/bin/sqoop-help "
    ssh $host "rm -rf /usr/bin/sqoop-import "
    ssh $host "rm -rf /usr/bin/sqoop-import-all-tables "
    ssh $host "rm -rf /usr/bin/sqoop-job "
    ssh $host "rm -rf /usr/bin/sqoop-list-databases "
    ssh $host "rm -rf /usr/bin/sqoop-list-tables "
    ssh $host "rm -rf /usr/bin/sqoop-merge "
    ssh $host "rm -rf /usr/bin/sqoop-metastore "
    ssh $host "rm -rf /usr/bin/sqoop-version "
    ssh $host "rm -rf /usr/bin/slider "
    ssh $host "rm -rf /usr/bin/ranger-admin-start "
    ssh $host "rm -rf /usr/bin/ranger-admin-stop "
    ssh $host "rm -rf /usr/bin/ranger-kms"
    ssh $host "rm -rf /usr/bin/ranger-usersync-start"
    ssh $host "rm -rf /usr/bin/ranger-usersync-stop"
    ssh $host "rm -rf /usr/bin/pig "
    ssh $host "rm -rf /usr/bin/phoenix-psql "
    ssh $host "rm -rf /usr/bin/phoenix-queryserver "
    ssh $host "rm -rf /usr/bin/phoenix-sqlline "
    ssh $host "rm -rf /usr/bin/phoenix-sqlline-thin "
    ssh $host "rm -rf /usr/bin/oozie "
    ssh $host "rm -rf /usr/bin/oozied.sh "
    ssh $host "rm -rf /usr/bin/mapred "
    ssh $host "rm -rf /usr/bin/mahout "
    ssh $host "rm -rf /usr/bin/kafka "
    ssh $host "rm -rf /usr/bin/hive "
    ssh $host "rm -rf /usr/bin/hiveserver2 "
    ssh $host "rm -rf /usr/bin/hbase"
    ssh $host "rm -rf /usr/bin/hcat "
    ssh $host "rm -rf /usr/bin/hdfs "
    ssh $host "rm -rf /usr/bin/hadoop "
    ssh $host "rm -rf /usr/bin/flume-ng "
    ssh $host "rm -rf /usr/bin/falcon "
    ssh $host "rm -rf /usr/bin/beeline"
    ssh $host "rm -rf /usr/bin/atlas-start "
    ssh $host "rm -rf /usr/bin/atlas-stop "
    ssh $host "rm -rf /usr/bin/accumulo"
    ssh $host "rm -rf /tmp/hive"
    ssh $host "rm -rf /tmp/nagios" 
    ssh $host "rm -rf /tmp/ambari-qa" 
    ssh $host "rm -rf /tmp/sqoop-ambari-qa"
    ssh $host "rm -rf /tmp/hadoop" 
    ssh $host "rm -rf /tmp/hadoop-hdfs"
    ssh $host "rm -rf /tmp/hadoop-hive" 
    ssh $host "rm -rf /tmp/hadoop-nagios" 
    ssh $host "rm -rf /tmp/hadoop-hcat"
    ssh $host "rm -rf /tmp/hadoop-ambari-qa" 
    ssh $host "rm -rf /tmp/hsperfdata_hbase"
    ssh $host "rm -rf /tmp/hsperfdata_hive"
    ssh $host "rm -rf /tmp/hsperfdata_nagios"
    ssh $host "rm -rf /tmp/hsperfdata_oozie"
    ssh $host "rm -rf /tmp/hsperfdata_zookeeper"
    ssh $host "rm -rf /tmp/hsperfdata_mapred"
    ssh $host "rm -rf /tmp/hsperfdata_hdfs"
    ssh $host "rm -rf /tmp/hsperfdata_hcat"
    ssh $host "rm -rf /tmp/hsperfdata_ambari-qa"

    #删除ambari相关包
    ssh $host "yum remove -y ambari-*"
    ssh $host "yum remove -y postgresql"
    ssh $host "rm -rf /var/lib/ambari*"
    ssh $host "rm -rf /var/log/ambari*"
    ssh $host "rm -rf /etc/ambari*"

    echo "$logPre======>$host is done! 
"
done

原链接: https://www.cnblogs.com/ivan0626/p/4221964.html

以上是关于卸载ambari脚本的主要内容,如果未能解决你的问题,请参考以下文章

卸载ambari

Ambari2.5.3卸载smartsense

ambari卸载集群

Ambari Agent 源码分析

003-Ambari一键自动化脚本部署

Ambari安装