作者:容 易 2015-12-03 15:36:23
基于python写了个慢查询分析脚本,目前是初稿,请大家帮忙测试下,看是否可以正常解析慢查询日志。stin_log代表慢查询日志的位置,stdout_log是解析结果的输出。将代码内容直接复制保存为文件然后直接运行 python xxx即可。
__author__ = 'tiger'
#!/usr/bin/python
# -*- coding: utf-8 -*-
import re, hashlib
stdin_log = open('/app/mysql/log/slow.log', 'r')
#read 10mb
rsize = 1024 * 1024 * 10
stdout_log = open('/app/mysql/log/log.output', 'a+', 0)
regex_start_host = re.compile('# User@Host:')
regex_timestamp = re.compile('SET timestamp')
lines = stdin_log.readlines(rsize)
sql_info = []
sql = ''
regex_str = re.compile("\B'.*?'\B")
regex_user_info = re.compile("\[.*?\]")
regex_float = re.compile(r"(\d+\.\d+)")
regex_start_time = re.compile('# Time:')
#regex_int = re.compile(r"(\d+\d+)")
regex_int = re.compile("(\d+)")
m_values = hashlib.md5()
while lines:
for line in lines:
if line.strip():
if regex_start_host.match(line):
if len(sql_info) > 0:
sql_format = sql.replace('"', "'")
sql_format = regex_str.sub('?', sql_format)
sql_format = regex_float.sub('?', sql_format)
sql_format = regex_int.sub('?', sql_format)
m_values.update(sql_format)
sql_info.append(sql.strip())
sql_info.append(sql_format.strip())
conn_info = sql_info[0].split("Id")[0].split("User@Host:")
a = regex_user_info.findall(conn_info[1].strip())
time_stamp = sql_info[2].split('=')[1].split(';')
sql_write_log = [a[0][1:-1], a[1][1:-1], time_stamp[0], sql.strip(),
sql_format.strip(), m_values.hexdigest()]
stdout_log.write(str(sql_write_log) + '\n')
sql_info = [line.strip()]
sql = ''
elif len(sql_info) == 0:
print line
sql_info = [line]
elif regex_start_time.match(line):
pass
else:
if len(sql_info) > 0 and len(sql_info) <= 1:
sql_info.append(line.strip())
elif len(sql_info) == 2:
if regex_timestamp.match(line):
sql_info.append(line.strip())
elif len(sql_info) != 0:
sql = sql + ' ' + line.strip()
lines = stdin_log.readlines(rsize)
# print "read again."
stdin_log.close()
stdout_log.close()
print "Analysis of complete."
One Response