我们经常会遇到监控一个文件或目录的变化,如果有变化,把文件上传备份至备份主机,并且我们还要监控上传过程是否有问题等,根据此需求,查阅了相关的一些材料,编写如下脚本实现这个功能
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
|
#!/usr/bin/env python
#coding=utf-8
#######################
#
#Status wd gs/ccs sql file changed
#date:2013-08-26 王伟
#文件有变化上传至备份主机,上传之后验证文件是否正确
#
#######################
import
paramiko,os,sys,datetime,time,MySQLdb
from
pyinotify
import
WatchManager, Notifier, ProcessEvent, IN_DELETE, IN_CREATE,IN_MODIFY
'''
CREATE TABLE `wddel_log.status_sql` (
`ip` varchar(16) NOT NULL COMMENT '机器IP',
`tar_name` varchar(50) NOT NULL COMMENT '备份文件名字',
`md5` varchar(50) NOT NULL COMMENT '备份文件MD5',
`flag` int(2) NOT NULL COMMENT '0:成功;1:失败',
`error_log` varchar(100) NOT NULL COMMENT '错误日志',
`uptime` datetime NOT NULL COMMENT '更新时间',
KEY `ip` (`ip`),
KEY `uptime` (`uptime`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8'''
#日志表创建脚本
GM_path
=
'/home/asktao/'
center_hostname
=
'192.168.1.100'
center_username
=
'root'
center_password
=
'123456'
center_port
=
63008
def
log2db(ip,tar_name,md5,flag,error
=
'0'
):
#删除日志入库
try
:
tar_name
=
os.path.split(tar_name)[
1
]
now
=
time.strftime(
"%Y-%m-%d %H:%M:%S"
)
conn
=
MySQLdb.connect(host
=
'192.168.1.104'
,user
=
'root'
,passwd
=
'1q2w3e4r'
,charset
=
'utf8'
,connect_timeout
=
20
)
cursor
=
conn.cursor()
sql
=
"SELECT ip FROM wddel_log.status_sql WHERE ip='%s'"
%
ip
cursor.execute(sql)
res
=
cursor.fetchall()
if
len
(res)
=
=
0
:
inster_sql
=
"insert into wddel_log.status_sql VALUES('%s','%s','%s',%s,'%s','%s')"
%
(ip,tar_name,md5,flag,error,now)
cursor.execute(inster_sql)
conn.commit()
else
:
update_sql
=
"UPDATE wddel_log.status_sql SET md5='%s',flag='%s',error_log='%s',uptime='%s' WHERE ip='%s'"
%
(md5,flag,error,now,ip)
cursor.execute(update_sql)
conn.commit()
cursor.close()
conn.close()
except
Exception,e:
print
e
def
find_ip():
#获取本地eth0的IP地址
ip
=
os.popen(
"/sbin/ip a|grep 'global eth0'"
).readlines()[
0
].split()[
1
].split(
"/"
)[
0
]
if
"192.168."
in
ip:
ip
=
os.popen(
"/sbin/ip a|grep 'global eth1'"
).readlines()[
0
].split()[
1
].split(
"/"
)[
0
]
return
ip
def
md5sum(file_name):
#验证sql打包文件的MD5
if
os.path.isfile(file_name):
f
=
open
(file_name,
'rb'
)
py_ver
=
sys.version[:
3
]
if
py_ver
=
=
"2.4"
:
import
md5 as hashlib
else
:
import
hashlib
md5
=
hashlib.md5(f.read()).hexdigest()
f.close()
return
md5
else
:
return
0
def
center_md5(file_name):
#上传至备份中心的文件的MD5
try
:
s
=
paramiko.SSHClient()
s.set_missing_host_key_policy(paramiko.AutoAddPolicy())
s.connect(hostname
=
center_hostname,port
=
center_port,username
=
center_username, password
=
center_password)
conm
=
"/usr/bin/md5sum %s"
%
file_name
stdin,stdout,stderr
=
s.exec_command(conm)
result
=
stdout.readlines()[
0
].split()[
0
].strip()
s.close()
return
result
except
Exception,e:
return
e
def
back_file(ip,tar_name,tar_md5):
#上传文件到备份中心
remote_dir
=
'/data/sql'
file_name
=
os.path.join(remote_dir,os.path.split(tar_name)[
1
])
try
:
t
=
paramiko.Transport((center_hostname,center_port))
t.connect(username
=
center_username,password
=
center_password)
sftp
=
paramiko.SFTPClient.from_transport(t)
sftp.put(tar_name,file_name)
t.close()
#print "%s back_file OK" % tar_name
os.remove(tar_name)
remot_md5
=
center_md5(file_name)
if
remot_md5
=
=
tar_md5:
log2db(ip,tar_name,tar_md5,
0
)
else
:
log2db(ip,tar_name,tar_md5,
1
,
'remot_md5!=tar_md5'
)
except
Exception,e:
#print "connect error!"
log2db(ip,tar_name,tar_md5,
1
,e)
os.remove(tar_name)
def
back_sql():
#执行备份
ip
=
find_ip()
tar_name
=
"/tmp/%s.tar.gz"
%
ip
sql_conn
=
"/usr/bin/find %s -type f -name '*.sql'|/usr/bin/xargs /bin/tar zcvPf %s"
%
(GM_path,tar_name)
sql_tar
=
os.popen(sql_conn).readlines()
tar_md5
=
md5sum(tar_name)
if
tar_md5 !
=
0
:
back_file(ip,tar_name,tar_md5)
else
:
error_log
=
"%s not find"
%
tar_name
log2db(ip,tar_name,tar_md5,
0
,error_log)
class
PFilePath(ProcessEvent):
#文件变化的触发
def
process_IN_CREATE(
self
, event):
if
os.path.splitext(event.name)[
1
]
=
=
".sql"
:
text
=
"Create file: %s "
%
os.path.join(event.path, event.name)
#print text
back_sql()
def
process_IN_MODIFY(
self
, event):
if
os.path.splitext(event.name)[
1
]
=
=
".sql"
:
text
=
"Modify file: %s "
%
os.path.join(event.path, event.name)
#print text
back_sql()
def
FSMonitor():
#主监控函数
back_sql()
#运行脚本先备份sql文件
wm
=
WatchManager()
mask
=
IN_CREATE |IN_MODIFY
notifier
=
Notifier(wm, PFilePath())
wdd
=
wm.add_watch(GM_path, mask, rec
=
True
)
print
'now starting monitor %s'
%
(GM_path)
while
True
:
try
:
notifier.process_events()
if
notifier.check_events():
notifier.read_events()
except
KeyboardInterrupt:
notifier.stop()
break
if
__name__
=
=
"__main__"
:
FSMonitor()
|
自己犯了错,就是命名为了pyinotify.py.
初学者就是容易犯这么简单的错误。
为什么要测试这个程序,只是想看看是不是增量备份的。
本文转自 liqius 51CTO博客,原文链接:http://blog.51cto.com/szgb17/1865838,如需转载请自行联系原作者