Use Python to query MySQL database to generate Excel file to send monitoring weekly report.

The business side requires a weekly report to be sent out every week to send out the detailed information of the online projects in the past week. We use zabbix for monitoring. In the past, it has been filled in manually, which is very time-consuming and labor-intensive, and it seems very unprofessional. So I spent a few months learning Python and wrote the following script for your reference. I hope you don't laugh.

#!/usr/bin/env python
#-*- coding: utf8 -*-
import MySQLdb
import string
import xlsxwriter
import time,datetime
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

zdbhost = '  '
zdbuser = '  '
zdbpass = '  '
zdbport =  
zdbname = '  '

#时间戳 取当前时间和7天前的时间 
stop_time = int(time.time())
start_time = stop_time - 604800
date = time.strftime("%Y-%m-%d",time.localtime(stop_time))
xlsfilename = '%s业务周报.xlsx' % date

#keys格式:itemname itemid type 格式化 groupid
keys = [
    [
        ['UV/天','167365','max', '%.2f', 1,'48',],
        ['PV/周','167359','max', '%.2f', 1,'48',],
        ['最大并发','167364','max', '%.2f', 1,'48',],
        ['最大并发发生时间','167364','max', '%.2f', 1,'48',],
        ['平均QPS','167361','avg', '%.2f', 1,'48'],
        ['最大QPS','167361','max', '%.2f', 1,'48'],
        ['最大QPS发生时间','167361','max', '%.2f', 1,'48'],
        ['最大带宽(in)','81291','max', '%.2f', '1048576','48'],
        ['平均带宽(in)','81291','avg', '%.2f', '1048576','48'],
        ['最大带宽(out)','81292','max', '%.2f', '1048576','48'],
        ['平均带宽(out)','81292','avg', '%.2f', '1048576','48']
    ],
    [
        ['UV/天','168141','max', '%.2f', 1,'16'],
        ['PV/周','168135','max', '%.2f', 1,'16'],
        ['最大并发','168140','max', '%.2f', 1,'16'],
        ['最大并发发生时间','168140','max', '%.2f', 1,'16',''],
        ['平均QPS','168137','avg', '%.2f', 1,'16'],
        ['最大QPS','168137','max', '%.2f', 1,'16'],
        ['最大QPS发生时间','168137','max', '%.2f', 1,'16'],
        ['最大带宽(in)', '104522','max', '%.2f', '1048576','16'],
        ['平均带宽(in)', '104522','avg', '%.2f', '1048576','16'],
        ['最大带宽(out)', '104523','max', '%.2f', '1048576','16'],
        ['平均带宽(out)', '104523','avg', '%.2f', '1048576','16']
    ],
    [
        ['UV/天', '163672','max', '%.2f', 1,'71'],
        ['PV/周', '163666','max', '%.2f', 1,'71'],
        ['最大并发', '163671','max', '%.2f', 1,'71'],
        ['最大并发发生时间', '163671','max', '%.2f', 1,'71',''],
        ['平均QPS','163668','avg', '%.2f', 1,'71'],
        ['最大QPS','163668','max', '%.2f', 1,'71'],
        ['最大QPS发生时间','163668','max', '%.2f', 1,'71'],
        ['最大带宽(in)', '84942','max', '%.2f', '1048576','71'],
        ['平均带宽(in)', '84942','avg', '%.2f', '1048576','71'],
        ['最大带宽(out)', '84943','max', '%.2f', '1048576','71'],
        ['平均带宽(out)', '84943','avg', '%.2f', '1048576','71']
    ],
    [
        ['UV/天', '154707','max', '%.2f', 1,'132'],
        ['PV/周', '154722','max', '%.2f', 1,'132'],
        ['最大并发', '154706','max', '%.2f', 1,'132'],
        ['最大并发发生时间', '154706','max', '%.2f', 1,'132',''],
        ['平均QPS','154703','avg', '%.2f', 1,'132'],
        ['最大QPS','154703','max', '%.2f', 1,'132'],
        ['最大QPS发生时间','154703','max', '%.2f', 1,'132'],
        ['最大带宽(in)', '152400','max', '%.2f', '1048576','132'],
        ['平均带宽(in)', '152400','avg', '%.2f', '1048576','132'],
        ['最大带宽(out)', '152402','max', '%.2f', '1048576','132'],
        ['平均带宽(out)', '152402','avg', '%.2f', '1048576','132']
    ],
    [
        ['UV/天', '189559','max', '%.2f', 1,'31'],
        ['PV/周', '189553','max', '%.2f', 1,'31'],
        ['最大并发', '189558','max', '%.2f', 1,'31'],
        ['最大并发发生时间', '189558','max', '%.2f', 1,'31',''],
        ['平均QPS','189555','avg', '%.2f', 1,'31'],
        ['最大QPS','189555','max', '%.2f', 1,'31'],
        ['最大QPS发生时间','189555','max', '%.2f', 1,'31'],
        ['最大带宽(in)', '190757','max', '%.2f', '1048576','31'],
        ['平均带宽(in)', '190757','avg', '%.2f', '1048576','31'],
        ['最大带宽(out)', '190758','max', '%.2f', '1048576','31'],
        ['平均带宽(out)', '190758','avg', '%.2f', '1048576','31']
    ],
    [
        ['UV/天','188642','max', '%.2f', 1,'50'],
        ['PV/周','188636','max', '%.2f', 1,'50'],
        ['最大并发','188641','max', '%.2f', 1,'50'],
        ['最大并发发生时间','188641','max', '%.2f', 1,'50',''],
        ['平均QPS','188638','avg', '%.2f', 1,'50'],
        ['最大QPS','188638','max', '%.2f', 1,'50'],
        ['最大QPS发生时间','188638','max', '%.2f', 1,'50'],
        ['最大带宽(in)', '33707','max', '%.2f', '1048576','50'],
        ['平均带宽(in)', '33707','avg', '%.2f', '1048576','50'],
        ['最大带宽(out)', '33712','max', '%.2f', '1048576','50'],
        ['平均带宽(out)', '33712','avg', '%.2f', '1048576','50']
    ]
]

def report():
    '''打开数据库连接'''
    conn = MySQLdb.connect(host=zdbhost,user=zdbuser,passwd=zdbpass,port=zdbport,db=zdbname,charset='utf8')
    cursor = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor)

    i = 2
    group = {"132":"微信公众号", "71":"微信电视", "16":"chiq3音视频", "48":"launcher", "50":"个性化推荐", "31":"语义云"}
    value1 = {}
    value = {}
    #创建文件
    workbook = xlsxwriter.Workbook(xlsfilename)
            #定义Excel中的格式,参考http://xlsxwriter.readthedocs.io/
    merge_format = workbook.add_format({
        'bold':     True,
        'border':   True,
        'align':    'center',
        'valign':   'vcenter',
        'fg_color': '#D7E4BC',
    })
    merge_format1 = workbook.add_format({
        'border':   True,
        'align':    'center',
        'valign':   'vcenter',
        'text_wrap':   True,
    })

    #创建工作薄
    worksheet = workbook.add_worksheet()
    worksheet.set_column('A:L',11)
            #写入第一行
    worksheet.merge_range('A1:L1', '业务监控周报(%s)'.decode('utf-8') % date, merge_format) 
            #写入其他行
    worksheet.write(1,0,'项目名称'.decode('utf-8'),merge_format1)
    for targets in keys:
        groupid = targets[0][5]
        worksheet.write(i,0,group[groupid].decode('utf-8'),merge_format1)
        j = 1

        for target in targets:
            item = target[0]
            itemid = target[1]
            sql2 = '''select %s(value_%s) as result from trends_uint where itemid = %s and clock >= %s''' % (target[2], target[2], itemid, start_time)
            try:
                cursor.execute(sql2)
                result2 = cursor.fetchone()['result']
            except MySQLdb.OperationalError:
                result2 = "Not monitored"

            if target[0] in ['最大QPS发生时间','最大并发发生时间']:
                #sql3 = '''select clock from trends_uint where itemid = %s and value_%s = %s  and clock >= %s and clock <= %s ''' % (target[1], target[2], result2, start_time, stop_time)
                sql3 = '''select clock from trends_uint where itemid = %s and value_%s = %s  and clock >= %s limit 1''' % (target[1], target[2], result2, start_time)
                try:
                    cursor.execute(sql3)
                    result3 = cursor.fetchone()['clock']
                    result2 = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(result3)) 
                except MySQLdb.OperationalError:
                    result2 = "Not monitored"
            #处理数据
            if not result2:
                result2 = "Not monitored"
            if type(result2) is str:
                result = result2
            else:
                if result2 > 1000:
                    if result2 > 1000000:
                        if result2 > 1000000000:
                            result = str('%.2f' %(float(result2)/1000000000)) + 'G'
                        else:
                            result = str('%.2f' %(float(result2)/1000000)) + 'M'
                    else:
                        result = str('%.2f' %(float(result2)/1000)) + 'K'
                else:
                    result = result2
            value1.update({itemid:result})
            value.update({groupid:value1})
            worksheet.write(1,j,item.decode('utf-8'),merge_format1)
            worksheet.write(i,j,value[groupid][itemid],merge_format1)
            j += 1
        i += 1

    workbook.close()
    cursor.close()
    conn.close()
if __name__ == "__main__":
   report()

The final effect, emails are sent weekly by the script

Use Python to query MySQL database to generate Excel file to send monitoring weekly report.

Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=325999567&siteId=291194637