zoukankan      html  css  js  c++  java
  • 一键发布shell脚本

    1.配置集群间免密登录

    (1)配置host:vim /etc/hosts

     (2)生成公钥 :ssh-keygen -t rsa -P ''

      这时会提示生成的公钥的存放地址,直接回车,公钥生成成功

    (3)cat id_rsa.pub 将生成的公钥添加到需要登录的机器的authorized_keys文件里

     然后直接ssh crawler01即可登录

    2.一键启动脚本

     1 #!/bin/bash
     2 
     3 #程序包名称
     4 ZIP_NAME="aikucun-distributed-spider-crawler.zip"
     5 
     6 zip_exist(){
     7     if [ ! -f "$ZIP_NAME" ]
     8     then
     9         return 1
    10     else
    11         return 0
    12     fi
    13 }
    14 
    15 backup(){
    16     echo "备份方法执行..."
    17     cd ~/crawler
    18     if zip_exist; then
    19         mv $ZIP_NAME ~/jar_bak
    20         echo "crawler程序包备份完成!"
    21     else
    22         echo "$ZIP_NAME 不存在,无法备份,请检查!"
    23     fi
    24 }
    25 
    26 check_single(){
    27     if [ ! -f "bin/single.sh" ]
    28     then
    29         return 1
    30     else
    31         return 0
    32     fi
    33 }
    34 
    35 stop(){
    36     echo "停止方法执行..."
    37     if check_single; then
    38         sh bin/single.sh stop
    39         CURRENT_PATH=`cd "$(dirname "$0")"; pwd`
    40         echo "当前所在目录:$CURRENT_PATH"
    41         rm -rf ~/crawler/*
    42         echo "已清除旧程序"
    43     else
    44         echo "single脚本文件不存在,无法停止程序,请检查程序结构是否完整!"
    45         exit 1
    46     fi
    47 }
    48 
    49 start(){
    50     echo "开始方法执行..."
    51     cd ~/jar_new
    52     if zip_exist; then
    53         mv $ZIP_NAME ~/crawler
    54         cd ~/crawler
    55         unzip -q $ZIP_NAME
    56         echo "解压程序包完成!"
    57         cd bin
    58         dos2unix single.sh
    59         echo "格式转换完成!"
    60         sh single.sh start
    61     else
    62         echo "$ZIP_NAME 不存在,程序无法发布,请检查!"
    63         exit 1
    64     fi
    65 }
    66 backup
    67 stop
    68 star

    3.上传一键启动脚本upload.sh

     1 #!/bin/bash
     2 
     3 #取当前目录
     4 BASE_PATH=`cd "$(dirname "$0")"; pwd`
     5 
     6 crawler_list="
     7 crawler02
     8 crawler03
     9 crawler04
    10 crawler05
    11 crawler06
    12 crawler07
    13 crawler08
    14 crawler09
    15 crawler10
    16 crawler11
    17 crawler12
    18 crawler13
    19 crawler14
    20 crawler15
    21 "
    22 for i in $crawler_list
    23 do
    24         echo "正在上传一键启动脚本至$i"
    25         scp $BASE_PATH/publish_crawler.sh $i:~
    26         echo "$i机器上传完成"
    27 done

    4.一键发布脚本

     1 #!/bin/bash
     2 
     3 #取当前目录
     4 BASE_PATH=`cd "$(dirname "$0")"; pwd`
     5 SHELL_NAME="publish_crawler.sh"
     6 
     7 crawler_list="
     8 crawler02
     9 crawler03
    10 crawler04
    11 crawler05
    12 crawler06
    13 crawler07
    14 crawler08
    15 crawler09
    16 crawler10
    17 crawler11
    18 crawler12
    19 crawler13
    20 crawler14
    21 crawler15
    22 "
    23 for i in $crawler_list
    24 do
    25         echo "正在上传程序包至$i"
    26         scp $BASE_PATH/crawler/aikucun-distributed-spider-crawler.zip $i:~/jar_new
    27         echo "登录并执行一键启动脚本"
    28         ssh $i "sh $SHELL_NAME"
    29 done

     5.通用脚本优化

    (1)上传一键启动脚本到相应的服务器的脚本

    (2)一键启动脚本

    (3)一键发布脚本

     1 #!/bin/bash
     2 
     3 #取当前目录
     4 BASE_PATH=`cd "$(dirname "$0")"; pwd`
     5 
     6 SERVER_LIST=""
     7 
     8 upload(){
     9 echo "上传服务器列表:$SERVER_LIST"
    10 for i in $SERVER_LIST
    11 do
    12         echo "正在上传一键启动脚本至$i"
    13         scp $BASE_PATH/publish.sh $i:~
    14         echo "$i机器上传完成"
    15 done
    16 }
    17 
    18 case "$1" in
    19 
    20 crawler)
    21 SERVER_LIST="
    22 crawler02
    23 crawler03
    24 crawler04
    25 crawler05
    26 crawler06
    27 crawler07
    28 crawler08
    29 crawler09
    30 crawler10
    31 crawler11
    32 crawler12
    33 crawler13
    34 crawler14
    35 crawler15
    36 "
    37 upload
    38 ;;
    39 transport)
    40 SERVER_LIST="
    41 transport01
    42 transport02
    43 transport03
    44 transport04
    45 transport05
    46 transport06
    47 "
    48 upload
    49 ;;
    50 storage)
    51     echo "尚未集成,敬请期待!"
    52 ;;
    53 *)
    54     echo "available operations: [crawler|transport|storage]"
    55     exit 1
    56 ;;
    57 esac
     1 #!/bin/bash
     2 
     3 #程序包名称
     4 APP_NAME=$1
     5 ZIP_NAME="aikucun-distributed-spider-$APP_NAME.zip"
     6 APP_PATH=$APP_NAME
     7 
     8 zip_exist(){
     9     if [ ! -f "$ZIP_NAME" ]
    10     then
    11         return 1
    12     else
    13         return 0
    14     fi
    15 }
    16 
    17 backup(){
    18     echo "备份方法执行..."
    19     cd ~/$APP_PATH
    20     if zip_exist; then
    21         mv $ZIP_NAME ~/jar_bak
    22         echo "$APP_NAME 程序包备份完成!"
    23     else
    24         echo "$ZIP_NAME 不存在,无法备份,请检查!"
    25     fi
    26 }
    27 
    28 check_single(){
    29         if [ ! -f "bin/single.sh" ]
    30     then
    31         return 1
    32     else
    33         return 0
    34     fi
    35 }
    36 
    37 stop(){
    38     echo "停止方法执行..."
    39     if check_single; then
    40         sh bin/single.sh stop
    41         CURRENT_PATH=`cd "$(dirname "$0")"; pwd`
    42         echo "当前所在目录:$CURRENT_PATH"
    43         rm -rf ~/$APP_PATH/*
    44         echo "已清除旧程序"
    45     else
    46         echo "single脚本文件不存在,无法停止程序,请检查程序结构是否完整!"
    47         exit 1
    48     fi
    49 }
    50 
    51 start(){
    52     echo "开始方法执行..."
    53     cd ~/jar_new
    54     if zip_exist; then
    55         mv $ZIP_NAME ~/$APP_PATH
    56         cd ~/$APP_PATH
    57         unzip -q $ZIP_NAME
    58         echo "解压程序包完成!"
    59         cd bin
    60         dos2unix single.sh
    61         echo "格式转换完成!"
    62         sh single.sh start
    63     else
    64         echo "$ZIP_NAME 不存在,程序无法发布,请检查!"
    65         exit 1
    66     fi
    67 }
    68 
    69 case "$1" in
    70 
    71 crawler|transport|storage)
    72     backup
    73     stop
    74     start
    75 ;;
    76 *)
    77     echo "available operations: [crawler|transport|storage]"
    78     exit 1
    79 ;;
    80 esac
     1 #!/bin/bash
     2 
     3 #取当前目录
     4 BASE_PATH=`cd "$(dirname "$0")"; pwd`
     5 SHELL_NAME="publish.sh"
     6 APP_NAME=$1
     7 
     8 SERVER_LIST="init"
     9 
    10 execute(){
    11         echo "服务器列表:$SERVER_LIST"
    12         echo "应用名称: $APP_NAME"
    13         for i in $SERVER_LIST
    14         do
    15                 echo "正在上传 $APP_NAME 程序包至$i"
    16                 scp $BASE_PATH/$APP_NAME/aikucun-distributed-spider-$APP_NAME.zip $i:~/jar_new
    17                 echo "登录并执行一键启动脚本"
    18                 ssh $i "sh $SHELL_NAME $APP_NAME"
    19         done
    20 }
    21 
    22 case "$1" in
    23 
    24 crawler)
    25 SERVER_LIST="
    26 crawler02
    27 crawler03
    28 crawler04
    29 crawler05
    30 crawler06
    31 crawler07
    32 crawler08
    33 crawler09
    34 crawler10
    35 crawler11
    36 crawler12
    37 crawler13
    38 crawler14
    39 crawler15
    40 "
    41 execute
    42 ;;
    43 transport)
    44 SERVER_LIST="
    45 transport01
    46 transport02
    47 transport03
    48 transport04
    49 transport05
    50 transport06
    51 "
    52 execute
    53 ;;
    54 storage)
    55     echo "尚未集成,敬请期待!"
    56 ;;
    57 *)
    58     echo "available operations: [crawler|transport|storage]"
    59     exit 1
    60 ;;
    61 esac

     6.在原来的基础上进一步优化,增加一键停止功能

    (1)上传发布脚本到集群各个服务器的脚本

    #!/bin/bash
    
    #取当前目录
    BASE_PATH=`cd "$(dirname "$0")"; pwd`
    
    SERVER_LIST=""
    
    upload(){
    echo "上传服务器列表:$SERVER_LIST"
    for i in $SERVER_LIST
    do
            echo "正在上传一键启动脚本至$i"
            scp $BASE_PATH/publish.sh $i:~
            echo "$i机器上传完成"
    done
    }
    
    case "$1" in
    
    crawler)
    SERVER_LIST="
    crawler02
    crawler03
    crawler04
    crawler05
    crawler06
    crawler07
    crawler08
    crawler09
    crawler10
    crawler11
    crawler12
    crawler13
    crawler14
    crawler15
    crawler16
    crawler17
    "
    upload
    ;;
    transport)
    SERVER_LIST="
    transport01
    transport02
    transport03
    transport04
    transport05
    transport06
    transport07
    transport08
    "
    upload
    ;;
    storage)
    SERVER_LIST="
    storage01
    storage02
    storage03
    storage04
    "
    upload
    ;;
    controller)
    SERVER_LIST="
    controller01
    controller02
    controller03
    "
    upload
    ;;
    *)
        echo "available operations: [crawler|transport|storage|controller]"
        exit 1
    ;;
    esac

    (2)发布脚本

    #!/bin/bash
    
    #程序包名称
    APP_NAME=$1
    ZIP_NAME="aikucun-distributed-spider-$APP_NAME.zip"
    APP_PATH=$APP_NAME
    
    zip_exist(){
        if [ ! -f "$ZIP_NAME" ]
        then
            return 1
        else
            return 0
        fi
    }
    
    backup(){
        echo "备份方法执行..."
        cd ~/$APP_PATH
        if zip_exist; then
            mv $ZIP_NAME ~/jar_bak
            echo "$APP_NAME 程序包备份完成!"
        else
            echo "$ZIP_NAME 不存在,无法备份,请检查!"
        fi
    }
    
    check_single(){
            if [ ! -f "bin/single.sh" ]
        then
            return 1
        else
            return 0
        fi
    }
    
    stop(){
        echo "停止方法执行..."
        cd ~/$APP_PATH
        if check_single; then
            sh bin/single.sh stop
            CURRENT_PATH=`cd "$(dirname "$0")"; pwd`
            echo "当前所在目录:$CURRENT_PATH"
        else
            echo "single脚本文件不存在,无法停止程序,请检查程序结构是否完整!"
            exit 1
        fi
    }
    
    start(){
        echo "开始方法执行..."
        rm -rf ~/$APP_PATH/*
        echo "已清除旧程序"
        cd ~/jar_new
        if zip_exist; then
            mv $ZIP_NAME ~/$APP_PATH
            cd ~/$APP_PATH
            unzip -q $ZIP_NAME
            echo "解压程序包完成!"
            cd bin
            dos2unix single.sh
            echo "格式转换完成!"
            sh single.sh start
        else
            echo "$ZIP_NAME 不存在,程序无法发布,请检查!"
            exit 1
        fi
    }
    
    case "$1" in
    
    crawler|transport|storage|controller)
        if [ "$2" == 'stop' ]; then
            stop
        else
            backup
            stop
            start
        fi
    ;;
    *)
        echo "available operations: [crawler|transport|storage|controller]"
        exit 1
    ;;
    esac

    (3)一键发布执行脚本

    #!/bin/bash
    
    #取当前目录
    BASE_PATH=`cd "$(dirname "$0")"; pwd`
    SHELL_NAME="publish.sh"
    APP_NAME=$1
    ACT=$2
    
    SERVER_LIST="init"
    
    execute(){
            echo "服务器列表:$SERVER_LIST"
            echo "应用名称: $APP_NAME"
            for i in $SERVER_LIST
            do
                    echo "正在上传 $APP_NAME 程序包至$i"
                    scp $BASE_PATH/$APP_NAME/aikucun-distributed-spider-$APP_NAME.zip $i:~/jar_new
                    echo "登录并执行一键 $ACT 执行脚本"
                    ssh $i "sh $SHELL_NAME $APP_NAME $ACT"
            done
    }
    
    case "$1" in
    
    crawler)
    SERVER_LIST="
    crawler02
    crawler03
    crawler04
    crawler05
    crawler06
    crawler07
    crawler08
    crawler09
    crawler10
    crawler11
    crawler12
    crawler13
    crawler14
    crawler15
    crawler16
    crawler17
    "
    execute
    ;;
    transport)
    SERVER_LIST="
    transport01
    transport02
    transport03
    transport04
    transport05
    transport06
    transport07
    transport08
    "
    execute
    ;;
    storage)
    SERVER_LIST="
    storage01
    storage02
    storage03
    storage04
    "
    execute
    ;;
    controller)
    SERVER_LIST="
    controller01
    controller02
    "
    execute
    ;;
    *)
        echo "available operations: [crawler|transport|storage|controller]"
        exit 1
    ;;
    esac
  • 相关阅读:
    XAF 如何在工具栏显示多参数
    XAF Study Recources
    常用工具
    Linux下Kill函数用法
    ipv6相关转换
    宣布回归
    微软夏令营
    APEX SDK阅后感
    寻求offer,开始记录我的征程
    衣服模拟结果
  • 原文地址:https://www.cnblogs.com/shuo1208/p/11851553.html
Copyright © 2011-2022 走看看