def analysis_table_map_event(info, init_binlog_file_name): row_values = eval(info["row_values"]) loging.debug( "解析日志时间 : %s Position id %s Table id %d Schema [%s] Table [%s] " % (info["Date"], str(info["Log position"]), row_values["Table id"], row_values["Schema"], row_values["Table"])) # update_binlog_pos(pos_id=str(info["Log position"]), binlog_file=init_binlog_file_name) loging.debug('switch database : use %s ' % row_values["Schema"]) if merge_db_table is False: table_map = "`%s`.`%s`" % (row_values["Schema"], row_values["Table"]) hi_table_map = "`%s`.`%s`" % (row_values["Schema"], row_values["Table"]) loging.debug('switch database : use %s ' % row_values["Schema"]) if write_db is True: mysql.my_sql('use %s' % row_values["Schema"]) else: table_map = "`%s`.`%s`" % (merge_replicate_table( row_values["Schema"], row_values["Table"])) hi_table_map = "`%s`.`%s`" % (row_values["Schema"], row_values["Table"]) dt = merge_replicate_table(row_values["Schema"], row_values["Table"]) loging.info("规则库表变更 %s.%s ---> %s.%s" % (row_values["Schema"], row_values["Table"], dt[0], dt[1])) loging.debug('switch database : use %s ' % dt[0]) if write_db is True: mysql.my_sql('use %s' % dt[0]) return table_map, hi_table_map
def analysis_stop_event(info, init_binlog_file_name): loging.info("解析日志时间 : %s 切换binlog file Position id %s" % (info["Date"], str(info["Log position"]))) print("%s解析日志时间 : %s 切换binlog file Position id %s" % (update_datetime(), info["Date"], str(info["Log position"]))) update_binlog_pos(pos_id=str(info["Log position"]), binlog_file=init_binlog_file_name)
def analysis_update_rows_event(info, init_binlog_file_name, table_map=None, hi_table_map=None): if exclude_merge_db_table( str(hi_table_map).replace('`', '').split('.')[0], str(hi_table_map).replace('`', '').split('.')[1]) is False: loging.warning("忽略不在规则中表%s" % hi_table_map) return True values = eval(info["row_values"])["Values"] if len(values) > 1: for v in values: set_values = update_before_values(v["before_values"], table_map) where_values = update_after_values(v["after_values"], table_map) analysis_sql = "update %s set %s where %s " % ( table_map, where_values.replace("'None'", 'Null'), set_values.replace("= 'None'", ' is null ')) loging.debug("Query : %s " % analysis_sql) if write_db is True: mysql.my_sql(analysis_sql) # update_binlog_pos(pos_id=str(info["Log position"]), binlog_file=init_binlog_file_name) loging.info("解析日志时间 : %s Position id %s" % (info["Date"], str(info["Log position"]))) else: values = eval(info["row_values"])["Values"][0] set_values = update_before_values(values["before_values"], table_map) where_values = update_after_values(values["after_values"], table_map) analysis_sql = "update %s set %s where %s " % ( table_map, where_values.replace( "'None'", 'Null'), set_values.replace("= 'None'", ' is null ')) loging.info("解析日志时间 : %s Position id %s " % (info["Date"], str(info["Log position"]))) loging.debug("Query : %s " % analysis_sql) if write_db is True: mysql.my_sql(analysis_sql)
def analysis_rotate_event(info): print("%s获取文件 %s Position id %s " % (update_datetime(), info["Next binlog file"], info["Position"])) loging.info("获取文件 %s Position id %s " % (info["Next binlog file"], info["Position"])) init_binlog_file_name = "%s\n" % info["Next binlog file"] update_binlog_pos(pos_id=info["Position"], binlog_file=init_binlog_file_name)
def shutdown_program(): hostname = socket.gethostname() if os.path.exists('%s.pid' % hostname): os.remove('%s.pid' % hostname) loging.info("Starting shutdown...") else: print('%s%s.pid 文件不存在' % (update_datetime(), hostname)) loging.warn('%s.pid 文件不存在' % hostname)
def safety_shutdown(): if not os.path.exists('%s.pid' % hostname): print("%s未能检测到%s.pid,系统正常退出" % (update_datetime(), hostname)) obj = batch_sql.analysis_sql if len(obj) != 0: analysis_sql = obj[:obj.rindex(',')] if len(analysis_sql) > 0: if write_db: loging.debug("Query : %s " % analysis_sql) mysql.my_sql(analysis_sql) loging.info("未能检测到%s.pid, System shutdown" % hostname) sys.exit()
def analysis_xid_event(info, init_binlog_file_name): if 'row_values' in info.keys(): loging.debug("解析日志时间 : %s Position id %s Transaction ID : %s " % (info["Date"], str(info["Log position"]), eval(info["row_values"])["Transaction ID"])) else: loging.debug("解析日志时间 : %s Position id %s" % (info["Date"], str(info["Log position"]))) loging.info("解析xid event : %s Position id %s" % (info["Date"], str(info["Log position"]))) update_binlog_pos(pos_id=str(info["Log position"]), binlog_file=init_binlog_file_name)
def analysis_delete_rows_event(info, init_binlog_file_name, hi_table_map=None, table_map=None): if exclude_merge_db_table( str(hi_table_map).replace('`', '').split('.')[0], str(hi_table_map).replace('`', '').split('.')[1]) is False: loging.warning("忽略不在规则中表%s" % hi_table_map) return True values = eval(info["row_values"])["Values"] if len(values) > 1: for v in values: if merge_db_table is False: rows_info = eval(info["row_values"])["info"]["Table"] else: rows = eval(info["row_values"])["info"]["Table"] row = str(rows).split('.') dt = merge_replicate_table(row[0], row[1]) loging.info("规则库表变更 %s ---> %s.%s" % (rows, dt[0], dt[1])) rows_info = '.'.join(dt) analysis_sql = "delete from %s where %s" % ( rows_info, delete_rows_values(v["values"], table_map).replace( "= 'None'", ' is null ')) loging.debug("Query : %s " % analysis_sql) if write_db is True: mysql.my_sql(analysis_sql) # update_binlog_pos(pos_id=str(info["Log position"]), binlog_file=init_binlog_file_name) loging.info("解析日志时间 : %s Position id %s " % (info["Date"], str(info["Log position"]))) else: values = eval(info["row_values"])["Values"][0]["values"] if merge_db_table is False: rows_info = eval(info["row_values"])["info"]["Table"] else: rows = eval(info["row_values"])["info"]["Table"] row = str(rows).split('.') dt = merge_replicate_table(row[0], row[1]) loging.info("规则库表变更 %s ---> %s.%s" % (rows, dt[0], dt[1])) rows_info = '.'.join(dt) analysis_sql = "delete from %s where %s" % ( rows_info, delete_rows_values(values, table_map).replace( "= 'None'", ' is null ')) loging.info("解析日志时间 : %s Position id %s" % (info["Date"], str(info["Log position"]))) loging.debug("Query : %s " % analysis_sql) if write_db is True: mysql.my_sql(analysis_sql)
def batch_analysis_insert_binlog(self, info, init_binlog_file_name, table_map): analysis_sqls = '' values = eval(info["row_values"])["Values"] if self.count_num == 0: self.log_position = str(info["Log position"]) else: self.log_position = str(info["Log position"]) if len(values) > 1: rows = insert_key_values(values[0]["values"], table_map) if len(self.analysis_sql) == 0: init_sql = "insert into %s (%s) VALUES \n" % (table_map, rows[0]) else: init_sql = '' for v in values: rows = insert_key_values(v["values"], table_map) sql_values = "(%s), \n" % (rows[1].replace("'None'", 'Null')) analysis_sqls += sql_values self.count_num += 1 read_time_position = str(info["Log position"]) loging.debug("解析日志时间 : %s Position id %s" % (info["Date"], read_time_position)) loging.info("批量解析insert id : %s:%d-%s" % (self.server_uuid, self.batch_number_count, read_time_position)) self.analysis_sql += init_sql + analysis_sqls # loging.debug("Query : %s " % analysis_sql) self.db_table_map = table_map # if write_db is True: # mysql.my_sql(analysis_sql) update_binlog_pos(pos_id=str(info["Log position"]), binlog_file=init_binlog_file_name) else: values = eval(info["row_values"])["Values"][0]["values"] rows = insert_key_values(values, table_map) self.count_num += 1 if len(self.analysis_sql) == 0: self.analysis_sql = "insert into %s (%s) VALUES (%s)," % (table_map, rows[0], rows[1].replace("'None'", 'Null')) else: self.analysis_sql += "(%s), \n" % rows[1].replace("'None'", 'Null') read_time_position = str(info["Log position"]) loging.debug("解析日志时间 : %s Position id %s " % (info["Date"], read_time_position)) loging.info("批量解析insert id : %s:%d-%s" % (self.server_uuid, self.batch_number_count, read_time_position)) # loging.debug("Query : %s " % analysis_sql) self.db_table_map = table_map # if write_db is True: # mysql.my_sql(analysis_sql) update_binlog_pos(pos_id=str(info["Log position"]), binlog_file=init_binlog_file_name) if write_db is True and self.count_num >= batch_number: self.analysis_sql = self.analysis_sql[:self.analysis_sql.rindex(',')] loging.debug("Query : %s " % self.analysis_sql) mysql.my_sql(self.analysis_sql) loging.info("批量解析insert id : %s:%d-(%s-%s) 提交处理" % (self.server_uuid, self.batch_number_count, self.log_position, str(info["Log position"]))) self.batch_number_count += 1 self.analysis_sql = '' self.count_num = 0 return self.count_num
def cntrast_insert_class_tab(self, info, table_map): class_type = info['class'] if class_type == "WriteRowsEvent" and self.count_num > 0 and table_map != self.db_table_map: self.db_table_map = table_map self.analysis_sql = self.analysis_sql[:self.analysis_sql.rindex(',')] if write_db: mysql.my_sql(self.analysis_sql) loging.debug("Query : %s " % self.analysis_sql) loging.info("批量解析insert id : %s:%d-(%s-%s) 提交处理" % (self.server_uuid, self.batch_number_count, self.log_position, str(info["Log position"]))) self.batch_number_count += 1 self.analysis_sql = '' self.count_num = 0 return False elif class_type in ("UpdateRowsEvent", "DeleteRowsEvent"): if len(self.analysis_sql) == 0: return True self.analysis_sql = self.analysis_sql[:self.analysis_sql.rindex(',')] if write_db: loging.debug("Query : %s " % self.analysis_sql) mysql.my_sql(self.analysis_sql) loging.info("批量解析insert id : %s:%d-(%s-%s) 提交处理" % (self.server_uuid, self.batch_number_count, self.log_position, str(info["Log position"]))) self.batch_number_count += 1 self.analysis_sql = '' self.count_num = 0 return False elif class_type == "QueryEvent": row_values = eval(info["row_values"]) if row_values["Query"] != "BEGIN" and self.count_num > 0: self.analysis_sql = self.analysis_sql[:self.analysis_sql.rindex(',')] if write_db: loging.debug("Query : %s " % self.analysis_sql) mysql.my_sql(self.analysis_sql) loging.info("批量解析insert id : %s:%d-(%s-%s) 提交处理" % (self.server_uuid, self.batch_number_count, self.log_position, str(info["Log position"]))) self.batch_number_count += 1 self.analysis_sql = '' self.count_num = 0 return False
def analysis_query_event(info, init_binlog_file_name): row_values = eval(info["row_values"]) schema = row_values["Schema"] loging.debug( "解析日志时间 : %s Position id %s 当前 Schema : [%s] Query : %s " % (info["Date"], str(info["Log position"]), schema, row_values["Query"])) loging.info("解析event时间 : %s Position id %s" % (info["Date"], str(info["Log position"]))) if len(schema) != 0: loging.debug('switch database : use %s ' % schema) if merge_db_table is True: merge_db = merge_replicate_table(schema) loging.info("规则库变更 %s ---> %s " % (schema, merge_db)) if write_db is True: if merge_db_table: merge_schema = merge_replicate_table(schema) else: merge_schema = merge_replicate_table(schema) if schema != merge_db: if merge_db_table: if merge_schema in only_schemas: mysql.my_sql('use %s' % merge_schema) else: loging.info("skip execute [use %s]" % merge_schema) else: mysql.my_sql('use %s' % schema) else: if merge_db_table: if schema in only_schemas: mysql.my_sql('use %s' % schema) else: loging.info("skip execute [use %s]" % schema) else: mysql.my_sql('use %s' % schema) else: if write_db is True: if only_schemas is None: if "create database" not in str( row_values["Query"]).lower(): mysql.my_sql('use %s' % schema) else: if schema in only_schemas: mysql.my_sql('use %s' % schema) else: loging.info("skip execute [use %s]" % schema) if row_values["Query"] == "BEGIN": loging.debug("skip sql begin transaction") else: if write_ddl is True: if merge_db_table: map_database = merge_table_rule["database"] for d in map_database: for k in d: if merge_schema in d[k]: loging.info("同步复制DDL --> %s" % row_values["Query"]) mysql.my_sql("/*!40014 SET FOREIGN_KEY_CHECKS=0*/") mysql.my_sql(row_values["Query"]) else: loging.info("skip DDL sql: %s " % row_values["Query"]) break else: if write_ddl: if only_schemas is None: loging.info("同步复制DDL --> %s" % row_values["Query"]) mysql.my_sql("/*!40014 SET FOREIGN_KEY_CHECKS=0*/") mysql.my_sql(row_values["Query"]) else: if schema in only_schemas: loging.info("同步复制DDL --> %s" % row_values["Query"]) mysql.my_sql("/*!40014 SET FOREIGN_KEY_CHECKS=0*/") mysql.my_sql(row_values["Query"]) elif len(schema) == 0: loging.info("同步复制DDL --> %s" % row_values["Query"]) mysql.my_sql("/*!40014 SET FOREIGN_KEY_CHECKS=0*/") mysql.my_sql(row_values["Query"]) else: loging.info("skip DDL sql: %s " % row_values["Query"]) else: loging.warning("DDL 语句 暂不支持") update_binlog_pos(pos_id=str(info["Log position"]), binlog_file=init_binlog_file_name)
def analysis_gtid_event(info, init_binlog_file_name): loging.info("解析日志时间 : %s Position id %s GTID_NEXT : %s " % (info["Date"], str(info["Log position"]), eval(info["row_values"])["GTID_NEXT"])) update_binlog_pos(pos_id=str(info["Log position"]), binlog_file=init_binlog_file_name)
def analysis_format_description_event(info): print("%sbinlog日志开始写入时间: %s Position id %s " % (update_datetime(), info["Date"], str(info["Log position"]))) loging.info("binlog日志开始写入时间: %s Position id %s " % (info["Date"], str(info["Log position"])))
def judgeprocess(processname): pl = psutil.pids() for pid in pl: try: cmdlines = psutil.Process(pid).cmdline() except Exception: continue for cmdline in cmdlines: if processname in cmdline: return pid else: return False # Shutdown complete if __name__ == "__main__": print("%sStarting shutdown..." % update_datetime()) shutdown_program() time.sleep(3) process_id = judgeprocess('startup.py') if process_id is not False: psutil.Process(process_id).kill() print("%sShutdown complete" % update_datetime()) loging.info("Shutdown complete") else: print("%s程序自动关闭,请手工检查" % update_datetime()) loging.info("程序自动关闭,请手工检查")
def my_sql(self, sql): current_time = int(time.time()) if current_time - self.startup_time >= 1200: try: self.startup_time = int(time.time()) self.conn.ping() except Exception: conn = self.conn self.cur = conn.cursor() data = self.cur.execute(sql) self.old_sql = sql else: try: if self.old_sql == sql: loging.debug("skip sentence : %s " % sql) self.old_sql = sql data = 0 else: data = self.cur.execute(sql) self.old_sql = sql except Exception as er: error = er.args if 1205 in error: loging.error(er.args) loging.warn("Retry after 5 seconds , execute sql %s :" % sql) time.sleep(5) try: data = self.cur.execute(sql) self.old_sql = sql except Exception as e: loging.error(e.args) loging.critical("--->> %s " % sql) loging.critical("执行SQL错误:%s" % er) loging.critical("--->> %s " % sql) sys.exit("执行SQL错误:%s" % er) if data == 0: if sql[:3] == 'use': loging.debug(sql) elif sql[:6] == "insert": error_code = '1062' loging.error("执行sql影响 %d 条 Error_code: 1062; handler error HA_ERR_FOUND_DUPP_KEY; ----->> %s " % (data, sql)) if skip_err_code is None: print("%s执行sql影响 %d 条 Error_code: 1062; handler error HA_ERR_FOUND_DUPP_KEY; ----->> %s " % (update_datetime(), data, sql)) sys.exit("执行SQL线程异常退出!请检查详细日志") elif error_code in skip_err_code or 'all' in skip_err_code: print("%s执行sql影响 %d 条 Error_code: 1062; handler error HA_ERR_FOUND_DUPP_KEY; ----->> %s " % (update_datetime(), data, sql)) elif sql[:6] == "update": error_code = '1032' loging.error("执行sql影响 %d 条 Error_code: 1032; handler error HA_ERR_KEY_NOT_FOUND; ----->> %s " % (data, sql)) if skip_err_code is None: print("%s执行sql影响 %d 条 Error_code: 1032; handler error HA_ERR_KEY_NOT_FOUND; ----->> %s " % (update_datetime(), data, sql)) sys.exit("执行SQL线程异常退出!请检查详细日志") elif error_code in skip_err_code or 'all' in skip_err_code: print("%s执行sql影响 %d 条 Error_code: 1032; handler error HA_ERR_KEY_NOT_FOUND; ----->> %s " % (update_datetime(), data, sql)) elif sql[:6] == "delete": error_code = '1032' loging.error("执行sql影响 %d 条 Error_code: 1032; handler error HA_ERR_KEY_NOT_FOUND; ----->> %s " % (data, sql)) if skip_err_code is None: print("%s执行sql影响 %d 条 Error_code: 1032; handler error HA_ERR_KEY_NOT_FOUND; ----->> %s " % (update_datetime(), data, sql)) sys.exit("执行SQL线程异常退出!请检查详细日志") elif error_code in skip_err_code or 'all' in skip_err_code: print("%s执行sql影响 %d 条 Error_code: 1032; handler error HA_ERR_KEY_NOT_FOUND; ----->> %s " % (update_datetime(), data, sql)) else: loging.info(sql) else: loging.info("执行sql影响 %d 条" % data)