A full shell web monitoring scripts

shell web monitoring script, a full set are in it, determine the rules changed several times, combined with more than you can see the old version, follow too lazy to write a new one.

The old version link: https://blog.51cto.com/junhai/2407485

The composition has three scripts, write down the process substantially implemented

Calculated failure time effect:  https://blog.51cto.com/junhai/2430313

Robot using micro-channel: https://blog.51cto.com/junhai/2424374

Useless because the database can only use three txt, in fact, I will not. . . . . . - -! Sed sed -e more can be written one, too lazy to take the time to optimize.

url.txt (storage monitoring links)> url.del (stored URL unreachable, the second script analysis)> url.add (third script to analyze whether the URL is restored)

FIG monitor the performance of the following:

image.png

Foreign use to send e-mail alerts, monitor web cloud because government must send too many e-mail, mail is often considered to supply sending spam, it has been closed several times.

1 alarm 1 recovery, prevent multiple alarms here, a failure will only send out two messages, restored with third script control, government cloud platform for WEB monitoring.

image.png

3 unreachable stop the alarm and restore the alarm time, alarm system @-related personnel, use the second script control, system monitoring for Changwei internal alarm. Optional e-mail, internal enterprise micro letter is faster.


image.png

image.png

image.png

#!/bin/bash
#20190914 
#QQ450433231

. /root/weixin.sh
cur_time(){
        date "+%Y/%m/%d %H:%M:%S"
}   

systemname(){
name=`cat url2.txt|grep $url|wc -l`
if [ $name -eq 1 ];then
	cat url2.txt|grep -w $url
else
        echo "$url"
fi
}      

[ ! -f /root/url.txt ] && echo "url.txt文件不存在" && exit 1
sed -i '/^$/d' url.txt   
while read url
do
	[ -z $url  ] && echo "url.txt存在空格 检查文件格式" && exit 1
        for ((i=1;i<7;i++))
        do
			rule=`curl -i -s -k -L -m 10 $url|grep -w "HTTP/1.1 200"|wc -l` 
			if [ $rule -eq 1 ];then
				echo "$(cur_time) 第$i次检查$url网页访问成功" >> check.log 
				break
            elif [ $i = 5 ];then 
				echo $url >> url.del 
				info=`echo -e "$(cur_time) \n$(systemname) 网页无法访问,请检查!"`
				wx_web 
				echo -e "$(cur_time) \n\n$(systemname) 网页无法访问,请检查!"|mail -s "【重要告警】网页不可达" [email protected] 
				echo "$(cur_time) $(systemname) 网页$(expr $i \* 3)秒无法访问,请检查!" >> checkfail.log 
				sh /root/checkdel.sh #第二个脚本
            else
				echo "$(cur_time) 第$i次检查$url网页访问失败" >> checkfail.log 
				sleep 3 
             fi
        done
done < /root/url.txt
sh /root/checkadd.sh #第三个脚本 我喜欢把第三个脚本单独用设置间隔1分钟1次
#!/bin/bash
. /root/weixin.sh

cur_time(){
        date "+%Y/%m/%d %H:%M:%S"
} #显示时间

sed -i '/^$/d' url.del 
sed -i '/^$/d' url.delout 

cat url.del|sort|uniq -c >> url.delout 

while read line
do
	i=`echo $line|awk '{print$1}'`
	newurl=`echo $line|awk '{print$2}'`
	if [ -z $newurl ];then
		continue
	elif [ $i -eq 1 ];then  
		echo $newurl >> url.add  
		sed -i "s|$newurl||" url.txt 
		sed -i "s|$newurl||" url.del 
		sed -i '/^$/d' url.txt
		sed -i '/^$/d' url.del
		#info=`echo -e "$(cur_time) \n $i次告警网站$newurl未恢复 暂停发送告警"` 
		#wx_web
		#echo "$(cur_time)  $i次告警网站$newurl未恢复 暂停发送告警"|mail -s "【暂停告警】" [email protected]
		#echo "$(cur_time)  网站$newurl未恢复 暂停告警" >> checkfail.log
		sed -i "s|$newurl||" url.delout
		sed -i '/^$/d' url.delout
	else
		echo > url.del
		continue	
	fi
done< url.delout
echo > url.delout
#!/bin/bash

. /root/weixin.sh
. /root/downtime.sh 

cur_time(){
        date "+%Y/%m/%d %H:%M:%S"
}

systemname(){
name=`cat url2.txt|grep $url|wc -l`
if [ $name -eq 1 ];then
	cat url2.txt|grep $url|awk '{print$1,$2}'
else
        echo "$url"
fi
}

sed -i '/^$/d' url.add 

while read url
do
		[ -z $url  ] && echo "url.del存在空格 检查文件格式" && exit 1
		rule=`curl -i -s -k -L -m 10 $url|grep -w "HTTP/1.1 200"|wc -l` 
		if [ $rule -eq 1 ];then
            u=$(grep -w "$url" url.txt|wc -l)
			if [ $u -eq 1 ];then 
				echo "$url已存在" >> checkfail.log
			else
				echo "$url" >> url.txt
			fi
			sed -i "s|$url||" url.add 
			sed -i '/^$/d' url.add 
			echo "$(cur_time) $url 已恢复监控" >> checkfail.log
			info=`echo -e "$(cur_time) \n$(systemname) 网站恢复啦 \n$(fail_time)"`
			wx_ah
			echo -e "$(cur_time) $(systemname) 网站已恢复 \n$(fail_time)"|mail -s "【网站恢复】" [email protected]
        else
			echo "$(cur_time) 检查$url网页访问失败" >> /dev/null     #checkfail.log

        fi
done< url.add

后面写了个傻瓜式交互脚本 将就着用吧 用户增删改查监控网址 sh 脚本 add 就可以批量添加监控网站了。。。。。。。。。。。。。。。。

#!/bin/bash
#20190831
case $1 in
        add )
                 [ -z $2 ] && echo "请在add后面输入要增加监控的网站地址 用法: add baidu.com" && exit 1
                u=$(grep -w "$2" url.txt|wc -l)
                if [ $u -eq 1 ];then
                        echo "$2 网站已存在" 
                else
                        echo $2 >> url.txt
                        echo "$2 网站已加入监控列表"
                fi
                ;;
        del )
                [ -z $2 ] && echo "请在del后面输入要移除监控的网站地址 用法: del baidu.com" && exit 1
                u=$(grep -w "$2" url.txt|wc -l)
                if [ $u -eq 1 ];then
                        sed -i "s|$2||" url.txt
                        sed -i '/^$/d' url.txt
                        echo "$2 网站已删除" 
                else
                        echo "$2 网站未找到"
                fi
                ;;
        update )
                vi url2.txt
                ;;
        dis     )
                cat url.txt|sort|uniq -c
                ;;
        back )
                cp url.txt url.bk
                echo "监控列表备份文件url.bk"
                ;;
        disuniq )
                cp url.txt url.bk
                cat url.txt|sort|uniq -c|awk '{print$2}' > url.new
                cat url.new > url.txt
                rm -rf url.new
                -i sed '/ ^ $ / d' url.txt 
                echo "watch list to re-complete the backup file url.bk" 
                ;; 
        *) 
                echo "---- -----------` date` ------------- " 
                echo" $ 0 SH increase the Add monitor website " 
                echo" $ 0 SH del delete monitoring sites " 
                echo" $ 0 SH Update Web site to modify the contact information " 
                echo" $ 0 SH DIS display monitor list " 
                echo" $ 0 SH backup monitoring the Back list " 
                echo "sh $0 disuniq   监控列表去重"

                echo "----------------------------------"
                ;; 
esac

Incidentally commonly call frequency use crontab to see the old version blog.

* / 5 * * * * sh is rootchecksh 
* / 1 * * * * sh is rootcheckaddsh 
0 to 0 to * * * echo out the> urldel


Guess you like

Origin blog.51cto.com/junhai/2437965