`

Ambari卸载shell

阅读更多
#!/bin/bash
# Program:
#    uninstall ambari automatic
# History:
#    2014/01/13    -    Ivan    -    2862099249@qq.com    -    First release
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH

#取得集群的所有主机名,这里需要注意:/etc/hosts配置的IP和主机名只能用一个空格分割
hostList=$(cat /etc/hosts | tail -n +3 | cut -d ' ' -f 2)
yumReposDir=/etc/yum.repos.d/
alterNativesDir=/etc/alternatives/
pingCount=5
logPre=TDP

read -p "Please input your master hostname: " master
master=${master:-"master"}
ssh $master "ambari-server stop"
#重置ambari数据库
ssh $master "ambari-server reset"

for host in $hostList
do
    #echo $host
    #检测主机的连通性
    unPing=$(ping $host -c $pingCount | grep 'Unreachable' | wc -l)
    if [ "$unPing" == "$pingCount" ]; then
        echo -e "$logPre======>$host is Unreachable,please check '/etc/hosts' file"
        continue
    fi

    echo "$logPre======>$host deleting... \n"
    #1.)删除hdp.repo、HDP.repo、HDP-UTILS.repo和ambari.repo
    ssh $host "cd $yumReposDir"
    ssh $host "rm -rf $yumReposDir/hdp.repo"
    ssh $host "rm -rf $yumReposDir/HDP*"
    ssh $host "rm -rf $yumReposDir/ambari.repo"
   
    #删除HDP相关的安装包
    ssh $host "yum remove -y  sqoop.noarch"
    ssh $host "yum remove -y  lzo-devel.x86_64"
    ssh $host "yum remove -y  hadoop-libhdfs.x86_64"
    ssh $host "yum remove -y  rrdtool.x86_64"
    ssh $host "yum remove -y  hbase.noarch"
    ssh $host "yum remove -y  pig.noarch"
    ssh $host "yum remove -y  lzo.x86_64"
    ssh $host "yum remove -y  ambari-log4j.noarch"
    ssh $host "yum remove -y  oozie.noarch"
    ssh $host "yum remove -y  oozie-client.noarch"
    ssh $host "yum remove -y  gweb.noarch"
    ssh $host "yum remove -y  snappy-devel.x86_64"
    ssh $host "yum remove -y  hcatalog.noarch"
    ssh $host "yum remove -y  python-rrdtool.x86_64"
    ssh $host "yum remove -y  nagios.x86_64"
    ssh $host "yum remove -y  webhcat-tar-pig.noarch"
    ssh $host "yum remove -y  snappy.x86_64"
    ssh $host "yum remove -y  libconfuse.x86_64"
    ssh $host "yum remove -y  webhcat-tar-hive.noarch"
    ssh $host "yum remove -y  ganglia-gmetad.x86_64"
    ssh $host "yum remove -y  extjs.noarch"
    ssh $host "yum remove -y  hive.noarch"
    ssh $host "yum remove -y  hadoop-lzo.x86_64"
    ssh $host "yum remove -y  hadoop-lzo-native.x86_64"
    ssh $host "yum remove -y  hadoop-native.x86_64"
    ssh $host "yum remove -y  hadoop-pipes.x86_64"
    ssh $host "yum remove -y  nagios-plugins.x86_64"
    ssh $host "yum remove -y  hadoop.x86_64"
    ssh $host "yum remove -y  zookeeper.noarch"  
    ssh $host "yum remove -y  hadoop-sbin.x86_64"
    ssh $host "yum remove -y  ganglia-gmond.x86_64"
    ssh $host "yum remove -y  libganglia.x86_64"
    ssh $host "yum remove -y  perl-rrdtool.x86_64"
    ssh $host "yum remove -y  epel-release.noarch"
    ssh $host "yum remove -y  compat-readline5*"
    ssh $host "yum remove -y  fping.x86_64"
    ssh $host "yum remove -y  perl-Crypt-DES.x86_64"
    ssh $host "yum remove -y  exim.x86_64"
    ssh $host "yum remove -y ganglia-web.noarch"
    ssh $host "yum remove -y perl-Digest-HMAC.noarch"
    ssh $host "yum remove -y perl-Digest-SHA1.x86_64"
    ssh $host "yum remove -y bigtop-jsvc.x86_64"
   
    #删除快捷方式
    ssh $host "cd $alterNativesDir"
    ssh $host "rm -rf hadoop-etc"
    ssh $host "rm -rf zookeeper-conf"
    ssh $host "rm -rf hbase-conf"
    ssh $host "rm -rf hadoop-log"
    ssh $host "rm -rf hadoop-lib"
    ssh $host "rm -rf hadoop-default"
    ssh $host "rm -rf oozie-conf"
    ssh $host "rm -rf hcatalog-conf"
    ssh $host "rm -rf hive-conf"
    ssh $host "rm -rf hadoop-man"
    ssh $host "rm -rf sqoop-conf"
    ssh $host "rm -rf hadoop-confone"

    #删除用户
    ssh $host "userdel -rf nagios"
    ssh $host "userdel -rf hive"
    ssh $host "userdel -rf ambari-qa"
    ssh $host "userdel -rf hbase"
    ssh $host "userdel -rf oozie"
    ssh $host "userdel -rf hcat"
    ssh $host "userdel -rf mapred"
    ssh $host "userdel -rf hdfs"
    ssh $host "userdel -rf rrdcached"
    ssh $host "userdel -rf zookeeper"
    ssh $host "userdel -rf sqoop"
    ssh $host "userdel -rf puppet"
    ssh $host "userdel -rf flume"
    ssh $host "userdel -rf tez"
    ssh $host "userdel -rf yarn"
ssh $host "userdel -rf storm"
ssh $host "userdel -rf knox"
ssh $host "userdel -rf kafka"
ssh $host "userdel -rf falcon"
ssh $host "userdel -rf hcat"
ssh $host "userdel -rf atlas"
ssh $host "userdel -rf mahout"
ssh $host "userdel -rf spark"


    #删除文件夹
    ssh $host "rm -rf /hadoop"
    ssh $host "rm -rf /etc/hadoop"
    ssh $host "rm -rf /etc/hbase"
    ssh $host "rm -rf /etc/hcatalog"
    ssh $host "rm -rf /etc/hive"
    ssh $host "rm -rf /etc/ganglia"
    ssh $host "rm -rf /etc/nagios"
    ssh $host "rm -rf /etc/oozie"
    ssh $host "rm -rf /etc/sqoop"
    ssh $host "rm -rf /etc/zookeeper"
ssh $host "rm -rf /etc/kafka"
ssh $host "rm -rf /etc/falcon"
ssh $host "rm -rf /etc/yarn"
ssh $host "rm -rf /etc/spark"
ssh $host "rm -rf /etc/flume"
ssh $host "rm -rf /etc/mapred"
ssh $host "rm -rf /etc/ambari-qa"
ssh $host "rm -rf /etc/tez"


    ssh $host "rm -rf /var/run/hadoop"
    ssh $host "rm -rf /var/run/hbase"
    ssh $host "rm -rf /var/run/hive"
    ssh $host "rm -rf /var/run/ganglia"
    ssh $host "rm -rf /var/run/nagios"
    ssh $host "rm -rf /var/run/oozie"
    ssh $host "rm -rf /var/run/zookeeper"
ssh $host "rm -rf /var/run/ambari-metrics-monitor"
ssh $host "rm -rf /var/run/ambari-server"
ssh $host "rm -rf /var/run/hadoop-mapreduce"
ssh $host "rm -rf /var/run/hadoop-yarn"
ssh $host "rm -rf /var/run/spark"



    ssh $host "rm -rf /var/log/hadoop"
    ssh $host "rm -rf /var/log/hbase"
    ssh $host "rm -rf /var/log/hive"
    ssh $host "rm -rf /var/log/nagios"
    ssh $host "rm -rf /var/log/oozie"
    ssh $host "rm -rf /var/log/zookeeper"
ssh $host "rm -rf /var/log/hadoop-mapreduce"
ssh $host "rm -rf /var/log/hadoop-yarn"
ssh $host "rm -rf /var/log/spark"
    ssh $host "rm -rf /var/nagios"

    ssh $host "rm -rf /usr/lib/hadoop"
    ssh $host "rm -rf /usr/lib/hbase"
    ssh $host "rm -rf /usr/lib/hcatalog"
    ssh $host "rm -rf /usr/lib/hive"
    ssh $host "rm -rf /usr/lib/oozie"
    ssh $host "rm -rf /usr/lib/sqoop"
    ssh $host "rm -rf /usr/lib/zookeeper"
    ssh $host "rm -rf /var/lib/hive"
    ssh $host "rm -rf /var/lib/ganglia"
    ssh $host "rm -rf /var/lib/oozie"
    ssh $host "rm -rf /var/lib/zookeeper"
ssh $host "rm -rf /var/lib/hadoop-hdfs"
ssh $host "rm -rf /var/lib/hadoop-mapreduce"
ssh $host "rm -rf /var/lib/hadoop-yarn"

    ssh $host "rm -rf /var/tmp/oozie"
    ssh $host "rm -rf /tmp/hive"
    ssh $host "rm -rf /tmp/nagios"
    ssh $host "rm -rf /tmp/ambari-qa"
    ssh $host "rm -rf /tmp/sqoop-ambari-qa"

    ssh $host "rm -rf /hadoop/oozie"
    ssh $host "rm -rf /hadoop/zookeeper"
    ssh $host "rm -rf /hadoop/mapred"
    ssh $host "rm -rf /hadoop/hdfs"
    ssh $host "rm -rf /tmp/hadoop-hive"
    ssh $host "rm -rf /tmp/hadoop-nagios"
    ssh $host "rm -rf /tmp/hadoop-hcat"
    ssh $host "rm -rf /tmp/hadoop-ambari-qa"
    ssh $host "rm -rf /tmp/hsperfdata_hbase"
    ssh $host "rm -rf /tmp/hsperfdata_hive"
    ssh $host "rm -rf /tmp/hsperfdata_nagios"
    ssh $host "rm -rf /tmp/hsperfdata_oozie"
    ssh $host "rm -rf /tmp/hsperfdata_zookeeper"
    ssh $host "rm -rf /tmp/hsperfdata_mapred"
    ssh $host "rm -rf /tmp/hsperfdata_hdfs"
    ssh $host "rm -rf /tmp/hsperfdata_hcat"
    ssh $host "rm -rf /tmp/hsperfdata_ambari-qa"
ssh $host "rm -rf /tmp/hsperfdata_admin"
ssh $host "rm -rf /tmp/hsperfdata_spark"
    #删除ambari相关包
    ssh $host "yum remove -y ambari-*"
    ssh $host "yum remove -y postgresql"
    ssh $host "rm -rf /var/lib/ambari*"
    ssh $host "rm -rf /var/log/ambari*"
    ssh $host "rm -rf /etc/ambari*"

    echo "$logPre======>$host is done! \n"
done
分享到:
评论

相关推荐

    Ambari2.6.*集群完全卸载清除脚本

    Ambari完全卸载脚本,参数加host列表文件,卸载前请确认脚本是否符合本地真实情况

    Ambari卸载脚本

    执行命令: sh cleanAmbariNew.sh hostfile 其中,hostfile文件内容(机器地址): 可以reboot重启下,防止启用组件端口会被占用 注意:脚本中删除Postgres数据库(重装会造成数据丢失)

    ambari卸载脚本

    安装ambari集群后,若果需要卸载的同学,可以参考下。未必能直接用,但是借鉴是可以的。毕竟环境不同,我这边的环境可以用,你那边的环境未必可行。

    ambri_hdp 完全卸载脚本

    ambari_HDP 完全卸载脚本,包括所有 Ambari 和 HDP 组件及数据库文件,日志文件。工具包由 3 个脚本组成,解压之后不要丢失其中的任何一个文件。使用方法:解压后,进入 uninstall_hdp_ambri 目录,运行: sudo ./...

    delete_hdp_ambri.zip

    ambari卸载脚本包含删除用户,卸载ambari,需要的直接下载

    ambari 集群 卸载

    ambari集群安装后不成功的反安装过程,经过积累的操作记录

    快速移除ambari和hadoop脚本

    通过ambari安装hadoop及组件有时候会失败,卸载清除非常麻烦,通过此脚本可以快速实现用户删除,目录删除,组件卸载,非常好用。

    ambari 2.7.5 安装包下载

    该资源为Ambari2.7.5预编译安装包,资源已放到百度网盘,可以先下载《ambari预编译安装包网盘下载地址.txt》获取网盘地址进行下载,资源内容:ambari-2.7.5.0-centos7.tar.gz 如有其他ambari相关资源需求可私信我

    局域网下离线安装Ambari详细说明

    局域网下离线安装Ambari,卸载旧版本数据库,安装新的Mysql,一些组件的验证,详细叙述过程

    [Ambari]==Ambari+Hadoop3集成impala3.2、kudu1.10.0

    ambari2.7.5集成HDP3,本身不带impala、kudu 故集成cloudera的impala、kudu安装方式 ambari插件安装方式。 解压放到/var/lib/ambari-server/resources/stacks/HDP/3.1/services/下

    ambari 搭建大数据平台

    ambari 搭建分布式的大数据平台,可对大数据平台进行集群管理

    ambari server 添加主机api

    通过ambari server api 添加节点 可以脱离ambari dashboard

    ambari 2.7.6 centos7编译后rpm包下载

    ambari-server-2.7.6.0-0.x86_64.rpm ambari-agent-2.7.6.0-0.x86_64.rpm 1.Ambari绝唱版!通过官网 Installation Guide for Ambari 2.7.6 创建的自编译rpm包 2.编译耗时近一周,依赖356+,大文件可以手动下载下来,...

    ambari-server汉化包

    ambari汉化包2.6.2.2,支持hadoop2.7.3. 替换/usr/lib/ambari-server/web/javascripts 下的app.js 1)上传app.js.bak到路径 2)mv app.js app.js_bak 3) mv app.js.bak app.js

    ambari邮件告警配置

    对ambari大数据平台进行邮件告警配置!

    Ambari和HDP下载网盘地址(Ambari-2.5.2.0 + HDP-2.6.2.14)

    一定要注意:其他版本如Ambari-2.7.3.0 + HDP-3.1.0.0,Ambari-2.7.4.0+HDP-3.1.4.0,Ambari-2.7.5.0 + HDP-3.1.5.0版本可以私信我。其他版本一定要联系博主,否则下载错误,不负责,链接失效也可以私信我。

    Ambari 2.7.5及其配套的HDP和HDF资源包

    ambari-2.7.5.0-centos7.tar.gz HDP-3.1.5.6091-centos7-rpm.tar.gz HDF-3.4.1.1-centos7-rpm.tar.gz HDP-GPL-3.1.5.0-centos7-gpl.tar.gz hdf-ambari-mpack-3.4.1.1-4.tar.gz HDP-UTILS-1.1.0.22-centos7.tar.gz ...

    Ambari部署Hadoop集群.doc

    本章首先对Ambari做简单介绍,然后详细介绍了Ambari的安装,以及如何使用Ambari搭建Hadoop集群,最后阐述了Ambari的架构和工作原理。 本章主要内容如下。 (1)Ambari简介。 (2)Ambari的安装。 (3)利用Ambari...

    Ambari 开源软件.doc

    Ambari 跟 Hadoop 等开源软件一样,也是 Apache Software Foundation 中的一个项目,并且是顶级项目。目前最新的发布版本是 2.0.1,未来不久将发布 2.1 版本。就 Ambari 的作用来说,就是创建、管理、监视 Hadoop 的...

Global site tag (gtag.js) - Google Analytics