最近某项目突然提出一个新需求,需要每周五14点,获取他们监控项目每天20-24点监控平均数据,以小时为单位的,输出文件是excel的,要求以每天为单位单独一个sheet,汇总邮件转给业务。
他们主要是做业务使用量报表,每周周报使用,虽然需求困难,但作为运维也得解决,下面是邮件的效果图。
可以看到邮件标题是带有项目名称与时间,收集人是业务与我。
下面是excel的格式
每天一个sheet,获取这些项目自己每天20-24点的监控平均数据,以小时为单位。
主要是使用sql查看上面的监控数据,并通过python把数据汇总到excel里并使用crontab定时发送。
重要:我这里对默认的linux监控模板,添加了一个监控主机cpu数量的item,key名称是system.cpu.num,请大家也加入到模板里,否则导出的excel里是空的。
下面是脚本内容,大家改改自己需要获取项目组与发送邮箱信息就行(我是使用163邮箱)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
|
#!/usr/bin/env python
#coding=utf-8
#Author: Denglei
#Email: dl528888@gmail.com
#QQ: 244979152
import
MySQLdb
import
datetime
import
xlwt
import
sys
from email.mime.multipart
import
MIMEMultipart
from email.mime.base
import
MIMEBase
from email.mime.text
import
MIMEText
from email.utils
import
COMMASPACE,formatdate
from email
import
encoders
import
os
def send_mail(server, fro, to, subject, text, files=[]):
assert
type
(server) == dict
assert
type
(to) == list
assert
type
(files) == list
msg = MIMEMultipart()
msg[
'From'
] = fro
msg[
'Subject'
] = subject
msg[
'To'
] = COMMASPACE.
join
(to)
#COMMASPACE==', '
msg[
'Date'
] = formatdate(localtime=True)
msg.attach(MIMEText(text))
for
f
in
files:
part = MIMEBase(
'application'
,
'octet-stream'
)
#'octet-stream': binary data
part.set_payload(
open
(f,
'rb'
).
read
())
encoders.encode_base64(part)
part.add_header(
'Content-Disposition'
,
'attachment; filename="%s"'
% os.path.
basename
(f))
msg.attach(part)
import
smtplib
smtp = smtplib.SMTP(server[
'name'
], server[
'port'
])
smtp.ehlo()
smtp.starttls()
smtp.ehlo()
smtp.login(server[
'user'
], server[
'passwd'
])
smtp.sendmail(fro, to, msg.as_string())
smtp.close()
def get_mysql_data(sql):
cur.execute(sql)
results=cur.fetchall()
return
results
def cover_excel(msg,start_time):
#wb = xlwt.Workbook()
ws = wb.add_sheet(start_time,cell_overwrite_ok=True)
count=len(msg)
x=msg
title=[
'时间'
.encode(
'utf8'
),
'所属组'
.encode(
'utf8'
),
'主机IP'
.encode(
'utf8'
),
'CPU逻辑核数(单位:个)'
.encode(
'utf8'
),
'CPU空闲值(单位:%)'
.encode(
'utf8'
),
'可用内存值(单位:GB)'
.encode(
'utf8'
),
'总内存值(单位:GB)'
.encode(
'utf8'
),
'公网进入流量(单位:kbps)'
.encode(
'utf8'
),
'公网流出流量(单位:kbps)'
.encode(
'utf8'
)]
x.insert(0,title)
for
j
in
range(0,9):
for
i
in
range(0,count):
if
i == 0:
#ws.write(i,j,title[j].decode('utf8'))
value=x[0]
else
:
value=x[i]
if
isinstance(value[j],long) or isinstance(value[j],int) or isinstance(value[j],float):
ws.write(i,j,value[j])
else
:
ws.write(i,j,value[j].decode(
'utf8'
))
#wb.save('/tmp/zabbix_log/chance_zabbix_monitor_test.xls')
def run_select(start_time,end_time):
get_cpu_idle_sql=
"select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,round(avg(hi.value_avg),2) as Cpu_Idle from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends hi on i.itemid = hi.itemid where i.key_='system.cpu.util[,idle]' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%广告%%' group by h.host;"
%(start_time,end_time)
cpu_idle_result=get_mysql_data(get_cpu_idle_sql)
get_cpu_num_sql=
"select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,avg(hi.value_avg) as Cpu_Number from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='system.cpu.num' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%广告%%' group by h.host;"
%(start_time,end_time)
cpu_num_result=get_mysql_data(get_cpu_num_sql)
get_mem_avai_sql=
"select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,round(avg(hi.value_avg/1024/1024/1024),2) as Memory_Avaiable from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='vm.memory.size[available]' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%广告%%' group by h.host;"
%(start_time,end_time)
mem_avai_result=get_mysql_data(get_mem_avai_sql)
#get_mem_free_sql="select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,hi.value_avg/1024/1024/1024 as Memory_Avaiable from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='vm.memory.size[free]' and hi.clock >= UNIX_TIMESTAMP('%s') and hi.clock < UNIX_TIMESTAMP('%s') and g.name like '%%广告%%';"%(start_time,end_time)
#mem_free_result=get_mysql_data(get_mem_free_sql)
get_mem_total_sql=
"select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,round(avg(hi.value_avg/1024/1024/1024),2) as Memory_Total from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='vm.memory.size[total]' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%广告%%' group by h.host;"
%(start_time,end_time)
mem_total_result=get_mysql_data(get_mem_total_sql)
get_em2_in_sql=
"select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,round(avg(hi.value_avg/1000),2) as Network_Eth0_In from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='net.if.in[em2]' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%广告%%' group by h.host;"
%(start_time,end_time)
em2_in_result=get_mysql_data(get_em2_in_sql)
get_em2_out_sql=
"select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,round(avg(hi.value_avg/1000),2) as Network_Eth0_In from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='net.if.out[em2]' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%广告%%' group by h.host;"
%(start_time,end_time)
em2_out_result=get_mysql_data(get_em2_out_sql)
msg=[list(i)
for
i
in
cpu_num_result]
for
i
in
msg:
for
ii
in
cpu_idle_result:
if
i[0] ==ii[0] and i[1] == ii[1] and i[2] == ii[2]:
i[3]=int(i[3])
#msg.append([i[0],i[1],i[2],int(i[3]),ii[3]])
i.append(int(ii[3]))
for
iii
in
mem_avai_result:
if
i[0] ==iii[0] and i[1] == iii[1] and i[2] == iii[2]:
i.append(round(float(iii[3]),2))
for
iiii
in
mem_total_result:
if
i[0] ==iiii[0] and i[1] == iiii[1] and i[2] == iiii[2]:
i.append(int(iiii[3]))
for
a
in
em2_in_result:
if
i[0] == a[0] and i[1] == a[1] and i[2] == a[2]:
i.append(int(a[3]))
if
len(i) == 7:
i.append(0)
for
b
in
em2_out_result:
if
i[0] == b[0] and i[1] == b[1] and i[2] == b[2]:
i.append(int(b[3]))
if
len(i) == 8:
i.append(0)
cover_excel(msg,start_time)
def main():
for
i
in
range(7,0,-1):
start_time=((datetime.datetime.now() - datetime.timedelta(days = i))).strftime(
"%Y-%m-%d"
)
end_time=((datetime.datetime.now() - datetime.timedelta(days = i-1))).strftime(
"%Y-%m-%d"
)
run_select(start_time,end_time)
if
__name__ ==
"__main__"
:
default_encoding =
'utf-8'
if
sys.getdefaultencoding() != default_encoding:
reload(sys)
sys.setdefaultencoding(default_encoding)
if
os.path.exists(
"/tmp/zabbix_log/"
) is False:
os.
mkdir
(
"/tmp/zabbix_log/"
)
conn=MySQLdb.connect(host=
'10.10.14.19'
,user=
'zabbix'
,
passwd
=
'zabbix'
,port=3306,charset=
"utf8"
)
cur=conn.cursor()
conn.select_db(
'zabbix'
)
wb = xlwt.Workbook()
main()
wb.save(
'/tmp/zabbix_log/chance_zabbix_monitor_hour_avg.xls'
)
cur.close()
conn.close()
#follow is send mail
server = {
'name'
:
'smtp.163.com'
,
'user'
:
'ops_monitor'
,
'passwd'
:
'xxxx'
,
'port'
:25}
fro =
'xxx@163.com'
to = [
'xx@xx.com'
,
'244979152@qq.com'
]
now_time=((datetime.datetime.now() - datetime.timedelta(days = 1))).strftime(
"%Y/%m/%d"
)
last_time=((datetime.datetime.now() - datetime.timedelta(days = 7))).strftime(
"%Y/%m/%d"
)
subject =
'xx平台监控数据【%s-%s】'
%(last_time,now_time)
text =
'xx你好,附件是畅思平台最近一周每天20至24点平均值监控数据,请查收!\n有问题请联系邓磊.'
files = [
'/tmp/zabbix_log/chance_zabbix_monitor_hour_avg.xls'
]
send_mail(server, fro, to, subject, text, files=files)
|
想修改获取监控组的话,就把上面%%广告%%里广告改为你要求的组就行,其他的自动修改。
脚本我放入github里(博客里脚本格式难调整,大家直接去github吧),地址是https://github.com/dl528888/public_script/blob/master/zabbix_hour_avg_monitor.py
想定时发送就把这个脚本放入crontab里,设置好时间允许即可。
下面是我的crontab
1
|
00 14 * * 5
/usr/bin/python
/data/software/zabbix_hour_avg_monitor
.py
|
BTW:我公司现在提供IT与运维方面技术外包,主要提供物理机租赁、云主机租赁与管理、一站式运维外包支持(包含程序部署与维护、主机维护与管理、数据库管理与维护、CDN管理与维护、监控管理与维护等);
有需要运维外包可以登录官网
本文转自 dl528888 51CTO博客,原文链接:http://blog.51cto.com/dl528888/1877246,如需转载请自行联系原作者