def __init__(self, destination_db_map, source_db, destination_db, config, counter, logger): self._destination_db_map = destination_db_map self._increment_step = config.increment_step self._source_db = source_db self._destination_db = destination_db self._config = config self._counter = counter self._logger = logger self._conn = create_connection(self._source_db) self._cursor = self._conn.cursor() self.prepare_db() self._logger.log("Processing database '%s'..." % self._source_db['db']) # Indexes may be named differently in each database, therefore we need # to remap them self._logger.log(" -> Re-mapping database") self._source_mapper = Mapper(self._conn, source_db['db'], config, MiniLogger(), verbose=False) db_map = self._source_mapper.map_db() self._db_map = db_map
def merge(self): self._conn.begin() self._logger.log(" ") self._logger.log("Processing database '%s'..." % self._source_db['db']) self._logger.log( " -> 1/6 Executing preprocess_queries (specified in config)") self.execute_preprocess_queries() self._logger.log(" -> 2/6 Re-mapping database") self._source_mapper = Mapper(self._conn, self._source_db['db'], self._config, MiniLogger(), verbose=False) db_map = self._source_mapper.map_db() self._db_map = db_map self._logger.log(" -> 3/6 Incrementing PKs") #Do not touch the main DB if (self._source_db['db'] != self._config.main_db): self.increment_pks() self._logger.log(" -> 4/6 Incrementing FKs") #Do not touch the main DB if (self._source_db['db'] != self._config.main_db): self.increment_fks() self._logger.log(" -> 5/6 Copying data to the destination db") self.copy_data_to_target() self._logger.log(" -> 6/6 Committing changes") self._conn.commit() self._logger.log("----------------------------------------")
def __init__(self, destination_db_map, source_db, destination_db, config, counter, logger): self._destination_db_map = destination_db_map self._increment_step = config.increment_step self._orphaned_rows_update_values = config.orphaned_rows_update_values self._source_db = source_db self._destination_db = destination_db self._config = config self._counter = counter self._logger = logger self._conn = create_connection(self._source_db) self._cursor = self._conn.cursor() self.prepare_db() self._logger.log("Processing database '%s'..." % self._source_db['db']) # Indexes may be named differently in each database, therefore we need # to remap them self._logger.log(" -> 1/2 Re-mapping database") self._source_mapper = Mapper(self._conn, source_db['db'], MiniLogger(), verbose=False) db_map = self._source_mapper.map_db() self._logger.log(" -> 2/2 Re-applying FKs mapping to current database schema - in case execution broke before") map_fks(db_map, True) self._db_map = db_map
def __init__(self, destination_db_map, source_db, destination_db, config, counter, logger): self._destination_db_map = destination_db_map self._increment_step = config.increment_step self._orphaned_rows_update_values = config.orphaned_rows_update_values self._source_db = source_db self._destination_db = destination_db self._config = config self._counter = counter self._logger = logger self._conn = create_connection(self._source_db) self._cursor = self._conn.cursor() self.prepare_db() self._logger.log("Processing database '%s'..." % self._source_db["db"]) # Indexes may be named differently in each database, therefore we need # to remap them self._logger.log(" -> 1/2 Re-mapping database") self._source_mapper = Mapper(self._conn, source_db["db"], MiniLogger(), verbose=False) db_map = self._source_mapper.map_db() self._logger.log(" -> 2/2 Re-applying FKs mapping to current database schema - in case execution broke before") map_fks(db_map, False) self._db_map = db_map
if len(config.merged_dbs) == 0: print "You must specify at least one database to merge" sys.exit() # Prepare logger ##################################################################### # STEP 1 - map database schema, relations and indexes print "STEP 1. Initial mapping of DB schema" print " -> 1.1 First merged db" mapped_db = config.merged_dbs[0] conn = create_connection(mapped_db, config.common_data) mapper = Mapper(conn, mapped_db['db'], MiniLogger()) db_map = mapper.map_db() conn.close() print " -> 1.2 Destination db" conn = create_connection(config.destination_db, config.common_data) mapper = Mapper(conn, config.destination_db['db'], MiniLogger()) destination_db_map = mapper.map_db() conn.close()
class Merger(object): _conn = None _cursor = None _source_mapper = None _db_map = None _destination_db_map = None _config = None _logger = None _counter = 0 _destination_db = None _source_db = None _increment_step = 0 _increment_value = property( lambda self: self._counter * self._increment_step) def __init__(self, destination_db_map, source_db, destination_db, config, counter, logger): self._destination_db_map = destination_db_map self._increment_step = config.increment_step self._source_db = source_db self._destination_db = destination_db self._config = config self._counter = counter self._logger = logger self._conn = create_connection(self._source_db) self._cursor = self._conn.cursor() self.prepare_db() self._logger.log("Processing database '%s'..." % self._source_db['db']) # Indexes may be named differently in each database, therefore we need # to remap them self._logger.log(" -> Re-mapping database") self._source_mapper = Mapper(self._conn, source_db['db'], config, MiniLogger(), verbose=False) db_map = self._source_mapper.map_db() self._db_map = db_map def prepare_db(self): cur = self._cursor self._logger.qs = "set names utf8" cur.execute(self._logger.qs) warnings.filterwarnings('error', category=MySQLdb.Warning) def __del__(self): if self._cursor: self._cursor.close() if self._conn: self._conn.close() def merge(self): self._conn.begin() self._logger.log(" ") self._logger.log("Processing database '%s'..." % self._source_db['db']) self._logger.log( " -> 1/6 Executing preprocess_queries (specified in config)") self.execute_preprocess_queries() self._logger.log(" -> 2/6 Re-mapping database") self._source_mapper = Mapper(self._conn, self._source_db['db'], self._config, MiniLogger(), verbose=False) db_map = self._source_mapper.map_db() self._db_map = db_map self._logger.log(" -> 3/6 Incrementing PKs") #Do not touch the main DB if (self._source_db['db'] != self._config.main_db): self.increment_pks() self._logger.log(" -> 4/6 Incrementing FKs") #Do not touch the main DB if (self._source_db['db'] != self._config.main_db): self.increment_fks() self._logger.log(" -> 5/6 Copying data to the destination db") self.copy_data_to_target() self._logger.log(" -> 6/6 Committing changes") self._conn.commit() self._logger.log("----------------------------------------") def execute_preprocess_queries(self): cur = self._cursor cur._defer_warnings = True for q in self._config.preprocess_queries: try: self._logger.qs = q cur.execute(self._logger.qs) except Exception, e: handle_exception( "There was an error while executing preprocess_queries\nPlease fix your config and try again", e, self._conn)
class Merger(object): _conn = None _cursor = None _source_mapper = None _db_map = None _destination_db_map = None _config = None _logger = None _counter = 0 _destination_db = None _source_db = None _orphaned_rows_update_values = {} _increment_step = 0 _increment_value = property(lambda self: self._counter * self._increment_step) def __init__(self, destination_db_map, source_db, destination_db, config, counter, logger): self._destination_db_map = destination_db_map self._increment_step = config.increment_step self._orphaned_rows_update_values = config.orphaned_rows_update_values self._source_db = source_db self._destination_db = destination_db self._config = config self._counter = counter self._logger = logger self._conn = create_connection(self._source_db) self._cursor = self._conn.cursor() self.prepare_db() self._logger.log("Processing database '%s'..." % self._source_db['db']) # Indexes may be named differently in each database, therefore we need # to remap them self._logger.log(" -> 1/2 Re-mapping database") self._source_mapper = Mapper(self._conn, source_db['db'], MiniLogger(), verbose=False) db_map = self._source_mapper.map_db() self._logger.log(" -> 2/2 Re-applying FKs mapping to current database schema - in case execution broke before") map_fks(db_map, True) self._db_map = db_map # Remove from the map tables that are missing in destination db #for table in lists_diff(self._destination_db, self.get_overlapping_tables()): # del self._destination_db[table] def prepare_db(self): cur = self._cursor self._logger.qs = "set names UTF8MB4" cur.execute(self._logger.qs) warnings.filterwarnings('error', category=MySQLdb.Warning) def _fk_checks(self, enable): self._logger.qs = "set foreign_key_checks=%d" % (enable) self._cursor.execute(self._logger.qs) def __del__(self): if self._cursor: self._cursor.close() if self._conn: self._conn.close() def merge(self): self._conn.begin() self._logger.log(" ") self._logger.log("Processing database '%s'..." % self._source_db['db']) self._fk_checks(False) self._logger.log(" -> 1/9 Executing preprocess_queries (specified in config)") self.execute_preprocess_queries() self._logger.log(" -> 2/9 Converting tables to InnoDb") self.convert_tables_to_innodb() self._logger.log(" -> 3/9 Converting FKs to UPDATE CASCADE") self.convert_fks_to_update_cascade() self._logger.log(" -> 4/9 Converting mapped FKs to real FKs") self.convert_mapped_fks_to_real_fks() self._logger.log(" -> 5/9 Nulling orphaned FKs") self.null_orphaned_fks() self._fk_checks(True) self._logger.log(" -> 6/9 Incrementing PKs") self.increment_pks() self._logger.log(" -> 7/9 Mapping pk in case of uniques conflict") self.map_pks_to_target_on_unique_conflict() self._fk_checks(False) self._logger.log(" -> 8/9 Copying data to the destination db") self.copy_data_to_target() self._fk_checks(True) self._logger.log(" -> 9/9 Decrementing pks") self.rollback_pks() self._logger.log(" -> 10/9 Committing changes") self._conn.commit() self._logger.log("----------------------------------------") def execute_preprocess_queries(self): cur = self._cursor for q in self._config.preprocess_queries: try: self._logger.qs = q cur.execute(self._logger.qs) #except _mysql_exceptions.OperationalError,e: except Exception, e: handle_exception( "There was an error while executing preprocess_queries\nPlease fix your config and try again", e, self._conn)
if len(config.merged_dbs) == 0: print "You must specify at least one database to merge" sys.exit() # Prepare logger ##################################################################### # STEP 1 - map database schema, relations and indexes print "STEP 1. Initial mapping of DB schema" print " -> 1.1 First merged db" mapped_db = config.merged_dbs[0] conn = create_connection(mapped_db, config.common_data) mapper = Mapper(conn, mapped_db['db'], config, MiniLogger()) db_map = mapper.map_db() conn.close() print " -> 1.2 Destination db" conn = create_connection(config.destination_db, config.common_data) mapper = Mapper(conn, config.destination_db['db'], config, MiniLogger()) mapper.execute_preprocess_queries_target() destination_db_map = mapper.map_db() conn.commit() conn.close()
class Merger(object): _conn = None _cursor = None _source_mapper = None _db_map = None _destination_db_map = None _config = None _logger = None _counter = 0 _destination_db = None _source_db = None _orphaned_rows_update_values = {} _increment_step = 0 _increment_value = property(lambda self: self._counter * self._increment_step) def __init__(self, destination_db_map, source_db, destination_db, config, counter, logger): self._destination_db_map = destination_db_map self._increment_step = config.increment_step self._orphaned_rows_update_values = config.orphaned_rows_update_values self._source_db = source_db self._destination_db = destination_db self._config = config self._counter = counter self._logger = logger self._conn = create_connection(self._source_db) self._cursor = self._conn.cursor() self.prepare_db() self._logger.log("Processing database '%s'..." % self._source_db["db"]) # Indexes may be named differently in each database, therefore we need # to remap them self._logger.log(" -> 1/2 Re-mapping database") self._source_mapper = Mapper(self._conn, source_db["db"], MiniLogger(), verbose=False) db_map = self._source_mapper.map_db() self._logger.log(" -> 2/2 Re-applying FKs mapping to current database schema - in case execution broke before") map_fks(db_map, False) self._db_map = db_map # Remove from the map tables that are missing in destination db # for table in lists_diff(self._destination_db, self.get_overlapping_tables()): # del self._destination_db[table] def prepare_db(self): cur = self._cursor self._logger.qs = "set names utf8" cur.execute(self._logger.qs) warnings.filterwarnings("error", category=MySQLdb.Warning) def _fk_checks(self, enable): self._logger.qs = "set foreign_key_checks=%d" % (enable) self._cursor.execute(self._logger.qs) def __del__(self): if self._cursor: self._cursor.close() if self._conn: self._conn.close() def merge(self): self._conn.begin() self._logger.log(" ") self._logger.log("Processing database '%s'..." % self._source_db["db"]) self._fk_checks(False) self._logger.log(" -> 1/9 Executing preprocess_queries (specified in config)") self.execute_preprocess_queries() self._logger.log(" -> 2/9 Converting tables to InnoDb") self.convert_tables_to_innodb() self._logger.log(" -> 3/9 Converting FKs to UPDATE CASCADE") self.convert_fks_to_update_cascade() self._logger.log(" -> 4/9 Converting mapped FKs to real FKs") self.convert_mapped_fks_to_real_fks() self._logger.log(" -> 5/9 Nulling orphaned FKs") self.null_orphaned_fks() self._fk_checks(True) self._logger.log(" -> 6/9 Incrementing PKs") self.increment_pks() self._logger.log(" -> 7/9 Mapping pk in case of uniques conflict") self.map_pks_to_target_on_unique_conflict() self._fk_checks(False) self._logger.log(" -> 8/9 Copying data to the destination db") self.copy_data_to_target() self._fk_checks(True) self._logger.log(" -> 9/9 Decrementing pks") self.rollback_pks() self._logger.log(" -> 10/9 Committing changes") self._conn.commit() self._logger.log("----------------------------------------") def execute_preprocess_queries(self): cur = self._cursor for q in self._config.preprocess_queries: try: self._logger.qs = q cur.execute(self._logger.qs) # except _mysql_exceptions.OperationalError,e: except Exception, e: handle_exception( "There was an error while executing preprocess_queries\nPlease fix your config and try again", e, self._conn, )