Beispiel #1
0
def analysis_stop_event(info, init_binlog_file_name):
    loging.info("解析日志时间 : %s 切换binlog file Position id %s" %
                (info["Date"], str(info["Log position"])))
    print("%s解析日志时间 : %s 切换binlog file Position id %s" %
          (update_datetime(), info["Date"], str(info["Log position"])))
    update_binlog_pos(pos_id=str(info["Log position"]),
                      binlog_file=init_binlog_file_name)
Beispiel #2
0
def analysis_rotate_event(info):
    print("%s获取文件 %s Position id %s " %
          (update_datetime(), info["Next binlog file"], info["Position"]))
    loging.info("获取文件 %s Position id %s " %
                (info["Next binlog file"], info["Position"]))
    init_binlog_file_name = "%s\n" % info["Next binlog file"]
    update_binlog_pos(pos_id=info["Position"],
                      binlog_file=init_binlog_file_name)
Beispiel #3
0
def analysis_xid_event(info, init_binlog_file_name):
    if 'row_values' in info.keys():
        loging.debug("解析日志时间 : %s Position id %s Transaction ID : %s " %
                     (info["Date"], str(info["Log position"]),
                      eval(info["row_values"])["Transaction ID"]))
    else:
        loging.debug("解析日志时间 : %s Position id %s" %
                     (info["Date"], str(info["Log position"])))
    loging.info("解析xid event : %s Position id %s" %
                (info["Date"], str(info["Log position"])))
    update_binlog_pos(pos_id=str(info["Log position"]),
                      binlog_file=init_binlog_file_name)
Beispiel #4
0
 def batch_analysis_insert_binlog(self, info, init_binlog_file_name, table_map):
     analysis_sqls = ''
     values = eval(info["row_values"])["Values"]
     if self.count_num == 0:
         self.log_position = str(info["Log position"])
     else:
         self.log_position = str(info["Log position"])
     if len(values) > 1:
         rows = insert_key_values(values[0]["values"], table_map)
         if len(self.analysis_sql) == 0:
             init_sql = "insert into %s (%s) VALUES \n" % (table_map, rows[0])
         else:
             init_sql = ''
         for v in values:
             rows = insert_key_values(v["values"], table_map)
             sql_values = "(%s), \n" % (rows[1].replace("'None'", 'Null'))
             analysis_sqls += sql_values
             self.count_num += 1
         read_time_position = str(info["Log position"])
         loging.debug("解析日志时间 : %s Position id %s" % (info["Date"], read_time_position))
         loging.info("批量解析insert id : %s:%d-%s" % (self.server_uuid, self.batch_number_count, read_time_position))
         self.analysis_sql += init_sql + analysis_sqls
         # loging.debug("Query : %s " % analysis_sql)
         self.db_table_map = table_map
         # if write_db is True:
         #    mysql.my_sql(analysis_sql)
         update_binlog_pos(pos_id=str(info["Log position"]), binlog_file=init_binlog_file_name)
     else:
         values = eval(info["row_values"])["Values"][0]["values"]
         rows = insert_key_values(values, table_map)
         self.count_num += 1
         if len(self.analysis_sql) == 0:
             self.analysis_sql = "insert into %s (%s) VALUES (%s)," % (table_map, rows[0], rows[1].replace("'None'", 'Null'))
         else:
             self.analysis_sql += "(%s), \n" % rows[1].replace("'None'", 'Null')
         read_time_position = str(info["Log position"])
         loging.debug("解析日志时间 : %s Position id %s " % (info["Date"], read_time_position))
         loging.info("批量解析insert id : %s:%d-%s" % (self.server_uuid, self.batch_number_count, read_time_position))
         # loging.debug("Query : %s " % analysis_sql)
         self.db_table_map = table_map
         # if write_db is True:
         #    mysql.my_sql(analysis_sql)
         update_binlog_pos(pos_id=str(info["Log position"]), binlog_file=init_binlog_file_name)
     if write_db is True and self.count_num >= batch_number:
         self.analysis_sql = self.analysis_sql[:self.analysis_sql.rindex(',')]
         loging.debug("Query : %s " % self.analysis_sql)
         mysql.my_sql(self.analysis_sql)
         loging.info("批量解析insert id : %s:%d-(%s-%s) 提交处理" % (self.server_uuid, self.batch_number_count,
                                                             self.log_position, str(info["Log position"])))
         self.batch_number_count += 1
         self.analysis_sql = ''
         self.count_num = 0
     return self.count_num
Beispiel #5
0
def analysis_query_event(info, init_binlog_file_name):
    row_values = eval(info["row_values"])
    schema = row_values["Schema"]
    loging.debug(
        "解析日志时间 : %s Position id %s 当前 Schema : [%s] Query : %s " %
        (info["Date"], str(info["Log position"]), schema, row_values["Query"]))
    loging.info("解析event时间 : %s Position id %s" %
                (info["Date"], str(info["Log position"])))
    if len(schema) != 0:
        loging.debug('switch database : use %s ' % schema)
        if merge_db_table is True:
            merge_db = merge_replicate_table(schema)
            loging.info("规则库变更 %s ---> %s " % (schema, merge_db))
            if write_db is True:
                if merge_db_table:
                    merge_schema = merge_replicate_table(schema)
                else:
                    merge_schema = merge_replicate_table(schema)
                if schema != merge_db:
                    if merge_db_table:
                        if merge_schema in only_schemas:
                            mysql.my_sql('use %s' % merge_schema)
                        else:
                            loging.info("skip execute [use %s]" % merge_schema)
                    else:
                        mysql.my_sql('use %s' % schema)
                else:
                    if merge_db_table:
                        if schema in only_schemas:
                            mysql.my_sql('use %s' % schema)
                        else:
                            loging.info("skip execute [use %s]" % schema)
                    else:
                        mysql.my_sql('use %s' % schema)
        else:
            if write_db is True:
                if only_schemas is None:
                    if "create database" not in str(
                            row_values["Query"]).lower():
                        mysql.my_sql('use %s' % schema)
                else:
                    if schema in only_schemas:
                        mysql.my_sql('use %s' % schema)
                    else:
                        loging.info("skip execute [use %s]" % schema)
    if row_values["Query"] == "BEGIN":
        loging.debug("skip sql begin transaction")
    else:
        if write_ddl is True:
            if merge_db_table:
                map_database = merge_table_rule["database"]
                for d in map_database:
                    for k in d:
                        if merge_schema in d[k]:
                            loging.info("同步复制DDL --> %s" % row_values["Query"])
                            mysql.my_sql("/*!40014 SET FOREIGN_KEY_CHECKS=0*/")
                            mysql.my_sql(row_values["Query"])
                        else:
                            loging.info("skip DDL sql: %s " %
                                        row_values["Query"])
                            break

            else:
                if write_ddl:
                    if only_schemas is None:
                        loging.info("同步复制DDL --> %s" % row_values["Query"])
                        mysql.my_sql("/*!40014 SET FOREIGN_KEY_CHECKS=0*/")
                        mysql.my_sql(row_values["Query"])
                    else:
                        if schema in only_schemas:
                            loging.info("同步复制DDL --> %s" % row_values["Query"])
                            mysql.my_sql("/*!40014 SET FOREIGN_KEY_CHECKS=0*/")
                            mysql.my_sql(row_values["Query"])
                        elif len(schema) == 0:
                            loging.info("同步复制DDL --> %s" % row_values["Query"])
                            mysql.my_sql("/*!40014 SET FOREIGN_KEY_CHECKS=0*/")
                            mysql.my_sql(row_values["Query"])
                        else:
                            loging.info("skip DDL sql: %s " %
                                        row_values["Query"])
        else:
            loging.warning("DDL 语句 暂不支持")
    update_binlog_pos(pos_id=str(info["Log position"]),
                      binlog_file=init_binlog_file_name)
Beispiel #6
0
def analysis_gtid_event(info, init_binlog_file_name):
    loging.info("解析日志时间 : %s Position id %s GTID_NEXT : %s " %
                (info["Date"], str(info["Log position"]),
                 eval(info["row_values"])["GTID_NEXT"]))
    update_binlog_pos(pos_id=str(info["Log position"]),
                      binlog_file=init_binlog_file_name)