def page_activated(self, advancing): if advancing and self.main.plan.state.dataBulkTransferParams[ "tableList"]: options = self.main.plan.state.dataBulkTransferParams copy_script = options.get("GenerateCopyScript", None) bulk_copy_script = options.get("GenerateBulkCopyScript", None) self.add_task(self._prepare_copy, "Prepare information for data copy", "Prepare Information for Data Copy") if copy_script != None: self._copy_script_task = self.add_task( self._create_copy_script, "Create shell script for data copy", "Create Shell Script for Data Copy") if bulk_copy_script != None: self._bulk_copy_script_task = self.add_task( self._create_bulk_copy_script, "Create shell script for bulk data copy", "Create Shell Script for Bulk Data Copy") if options.get("LiveDataCopy", False) or options.get( "GenerateDumpScript", False): self._migrate_task1 = self.add_threaded_task( self._count_rows, "Determine number of rows to copy", "Determine number of rows to copy") self._migrate_task2 = self.add_threaded_task( self._migrate_data, "Copy data to target RDBMS", "Copy data to target RDBMS") self._migrating_data = False self._progress_per_table = {} if options.get("LiveDataCopy", False): source_password = self.main.plan.migrationSource.password if source_password is None: source_password = request_password( self.main.plan.migrationSource.connection) target_password = self.main.plan.migrationTarget.password if target_password is None: if self.main.plan.migrationTarget.connection.hostIdentifier == self.main.plan.migrationSource.connection.hostIdentifier: if self.main.plan.migrationTarget.connection.parameterValues[ 'userName'] == self.main.plan.migrationSource.connection.parameterValues[ 'userName']: target_password = source_password if target_password is None: target_password = request_password( self.main.plan.migrationTarget.connection) else: source_password = None target_password = None self._transferer = DataMigrator( self, self.main.plan.state.dataBulkTransferParams, self.main.plan.migrationSource.connection, source_password, self.main.plan.migrationTarget.connection, target_password) self._transferer.copytable_path = self.main.plan.wbcopytables_path_bin WizardProgressPage.page_activated(self, advancing)
def page_activated(self, advancing): if advancing: options = self.main.plan.state.dataBulkTransferParams copy_script = options.get("GenerateCopyScript", None) self._copy_script_task.set_enabled(copy_script != None) if options.get("LiveDataCopy", False) or options.get( "GenerateDumpScript", False): self._migrate_task1.set_enabled(True) self._migrate_task2.set_enabled(True) else: self._migrate_task1.set_enabled(False) self._migrate_task2.set_enabled(False) self._migrating_data = False self._progress_per_table = {} if options.get("LiveDataCopy", False): source_password = self.main.plan.migrationSource.password if source_password is None: source_password = request_password( self.main.plan.migrationSource.connection) target_password = self.main.plan.migrationTarget.password if target_password is None: if self.main.plan.migrationTarget.connection.hostIdentifier == self.main.plan.migrationSource.connection.hostIdentifier: if self.main.plan.migrationTarget.connection.parameterValues[ 'userName'] == self.main.plan.migrationSource.connection.parameterValues[ 'userName']: target_password = source_password if target_password is None: target_password = request_password( self.main.plan.migrationTarget.connection) else: source_password = None target_password = None self._transferer = DataMigrator( self, self.main.plan.state.dataBulkTransferParams, self.main.plan.migrationSource.connection, source_password, self.main.plan.migrationTarget.connection, target_password) self._transferer.copytable_path = self.main.plan.wbcopytables_path WizardProgressPage.page_activated(self, advancing)
def page_activated(self, advancing): if advancing: options = self.main.plan.state.dataBulkTransferParams copy_script = options.get("GenerateCopyScript", None) bulk_copy_script = options.get("GenerateBulkCopyScript", None) self.add_task(self._prepare_copy, "Prepare information for data copy") if copy_script != None: self._copy_script_task = self.add_task(self._create_copy_script, "Create shell script for data copy") if bulk_copy_script != None: self._bulk_copy_script_task = self.add_task( self._create_bulk_copy_script, "Create shell script for bulk data copy" ) if options.get("LiveDataCopy", False) or options.get("GenerateDumpScript", False): self._migrate_task1 = self.add_threaded_task(self._count_rows, "Determine number of rows to copy") self._migrate_task2 = self.add_threaded_task(self._migrate_data, "Copy data to target RDBMS") self._migrating_data = False self._progress_per_table = {} if options.get("LiveDataCopy", False): source_password = self.main.plan.migrationSource.password if source_password is None: source_password = request_password(self.main.plan.migrationSource.connection) target_password = self.main.plan.migrationTarget.password if target_password is None: if ( self.main.plan.migrationTarget.connection.hostIdentifier == self.main.plan.migrationSource.connection.hostIdentifier ): if ( self.main.plan.migrationTarget.connection.parameterValues["userName"] == self.main.plan.migrationSource.connection.parameterValues["userName"] ): target_password = source_password if target_password is None: target_password = request_password(self.main.plan.migrationTarget.connection) else: source_password = None target_password = None self._transferer = DataMigrator( self, self.main.plan.state.dataBulkTransferParams, self.main.plan.migrationSource.connection, source_password, self.main.plan.migrationTarget.connection, target_password, ) self._transferer.copytable_path = self.main.plan.wbcopytables_path WizardProgressPage.page_activated(self, advancing)
def page_activated(self, advancing): if advancing: options = self.main.plan.state.dataBulkTransferParams copy_script = options.get("GenerateCopyScript", None) self._copy_script_task.set_enabled(copy_script != None) if options.get("LiveDataCopy", False) or options.get("GenerateDumpScript", False): self._migrate_task1.set_enabled(True) self._migrate_task2.set_enabled(True) else: self._migrate_task1.set_enabled(False) self._migrate_task2.set_enabled(False) self._migrating_data = False self._progress_per_table = {} if options.get("LiveDataCopy", False): source_password = self.main.plan.migrationSource.password if source_password is None: source_password = request_password(self.main.plan.migrationSource.connection) target_password = self.main.plan.migrationTarget.password if target_password is None: if self.main.plan.migrationTarget.connection.hostIdentifier == self.main.plan.migrationSource.connection.hostIdentifier: if self.main.plan.migrationTarget.connection.parameterValues['userName'] == self.main.plan.migrationSource.connection.parameterValues['userName']: target_password = source_password if target_password is None: target_password = request_password(self.main.plan.migrationTarget.connection) else: source_password = None target_password = None self._transferer = DataMigrator(self, self.main.plan.state.dataBulkTransferParams, self.main.plan.migrationSource.connection, source_password, self.main.plan.migrationTarget.connection, target_password) self._transferer.copytable_path = self.main.plan.wbcopytables_path WizardProgressPage.page_activated(self, advancing)
class TransferMainView(WizardProgressPage): def __init__(self, main): WizardProgressPage.__init__(self, main, "Bulk Data Transfer", use_private_message_handling=True) self._autostart = True self._resume = False self.retry_button = mforms.newButton() self.retry_button.set_text('Retry') self.retry_button.add_clicked_callback(self.go_retry) self.retry_box = mforms.newBox(True) self.content.remove(self._detail_label) self.retry_box.add(self._detail_label, True, True) self.retry_box.add(self.retry_button, False, True) self.content.add(self.retry_box, False, False) self.retry_button.show(False) self.main.add_wizard_page(self, "DataMigration", "Bulk Data Transfer") self._tables_to_exclude = list() def page_activated(self, advancing): if advancing: options = self.main.plan.state.dataBulkTransferParams copy_script = options.get("GenerateCopyScript", None) bulk_copy_script = options.get("GenerateBulkCopyScript", None) self.add_task(self._prepare_copy, "Prepare information for data copy") if copy_script != None: self._copy_script_task = self.add_task( self._create_copy_script, "Create shell script for data copy") if bulk_copy_script != None: self._bulk_copy_script_task = self.add_task( self._create_bulk_copy_script, "Create shell script for bulk data copy") if options.get("LiveDataCopy", False) or options.get( "GenerateDumpScript", False): self._migrate_task1 = self.add_threaded_task( self._count_rows, "Determine number of rows to copy") self._migrate_task2 = self.add_threaded_task( self._migrate_data, "Copy data to target RDBMS") self._migrating_data = False self._progress_per_table = {} if options.get("LiveDataCopy", False): source_password = self.main.plan.migrationSource.password if source_password is None: source_password = request_password( self.main.plan.migrationSource.connection) target_password = self.main.plan.migrationTarget.password if target_password is None: if self.main.plan.migrationTarget.connection.hostIdentifier == self.main.plan.migrationSource.connection.hostIdentifier: if self.main.plan.migrationTarget.connection.parameterValues[ 'userName'] == self.main.plan.migrationSource.connection.parameterValues[ 'userName']: target_password = source_password if target_password is None: target_password = request_password( self.main.plan.migrationTarget.connection) else: source_password = None target_password = None self._transferer = DataMigrator( self, self.main.plan.state.dataBulkTransferParams, self.main.plan.migrationSource.connection, source_password, self.main.plan.migrationTarget.connection, target_password) self._transferer.copytable_path = self.main.plan.wbcopytables_path_bin WizardProgressPage.page_activated(self, advancing) def go_back(self): self.clear_tasks() self.reset(True) WizardProgressPage.go_back(self) def update_status(self): return WizardProgressPage.update_status(self) def _prepare_copy(self): # create work list source_catalog = self.main.plan.migrationSource.catalog tables = self.main.plan.state.dataBulkTransferParams["tableList"] has_catalogs = self.main.plan.migrationSource.connection.driver.owner.doesSupportCatalogs > 0 has_schema = self.main.plan.migrationSource.connection.driver.owner.doesSupportCatalogs >= 0 source_db_module = self.main.plan.migrationSource.module_db() target_db_module = self.main.plan.migrationTarget.module_db() self._working_set = {} for table in tables: # find the source table stable = None for sschema in source_catalog.schemata: if sschema.name == table.owner.oldName: for t in sschema.tables: if t.name == table.oldName: stable = t break break if not stable: self.send_error( "Source table for %s (%s) not found, skipping...\n" % (table.name, table.oldName)) continue if table.name in self._tables_to_exclude: continue if has_catalogs: schema_name = source_db_module.quoteIdentifier( stable.owner.owner.name) if stable.oldName: # oldName already comes pre-quoted from the reveng stage table_name = stable.oldName else: table_name = source_db_module.quoteIdentifier( stable.owner.name ) + "." + source_db_module.quoteIdentifier(stable.name) else: if has_schema: schema_name = source_db_module.quoteIdentifier( stable.owner.name) else: schema_name = '' table_name = source_db_module.quoteIdentifier(stable.name) targ_schema_name = target_db_module.quoteIdentifier( table.owner.name) targ_table_name = target_db_module.quoteIdentifier(table.name) self._working_set[schema_name + "." + table_name] = { "table": table, "source_schema": schema_name, "source_table": table_name, "target_schema": targ_schema_name, "target_table": targ_table_name, "target_table_object": table } select_expression = [] source_pk_list = [] target_pk_list = [] for column in table.columns: if column.generated: continue if table.isPrimaryKeyColumn(column): source_pk_list.append( source_db_module.quoteIdentifier(column.oldName)) target_pk_list.append( target_db_module.quoteIdentifier(column.name)) cast = table.customData.get( "columnTypeCastExpression:%s" % column.name, None) if cast: select_expression.append( cast.replace( "?", source_db_module.quoteIdentifier(column.oldName))) else: select_expression.append( source_db_module.quoteIdentifier(column.oldName)) self._working_set[ schema_name + "." + table_name]["source_primary_key"] = ",".join( source_pk_list) if len(source_pk_list) > 0 else "-" self._working_set[ schema_name + "." + table_name]["target_primary_key"] = ",".join( target_pk_list) if len(target_pk_list) > 0 else "-" self._working_set[schema_name + "." + table_name]["select_expression"] = ", ".join( select_expression) # source_db_module = self.main.plan.migrationSource.module_db() # source_table = source_db_module.fullyQualifiedObjectName(stable) def _create_copy_script(self): path = self.main.plan.state.dataBulkTransferParams[ "GenerateCopyScript"] debug_table_copy = self.main.plan.state.dataBulkTransferParams[ "DebugTableCopy"] truncate_target_tables = self.main.plan.state.dataBulkTransferParams[ "TruncateTargetTables"] worker_count = self.main.plan.state.dataBulkTransferParams[ "workerCount"] f = open(path, "w+") if sys.platform == "win32": def cmt(s): return "REM " + s + "\n" else: os.chmod(path, 0700) def cmt(s): return "# " + s + "\n" f.write("#!/bin/sh\n") f.write(cmt("Workbench Table Data copy script")) f.write( cmt("Workbench Version: %s" % Version.fromgrt(grt.root.wb.info.version))) f.write(cmt("")) f.write( cmt("Execute this to copy table data from a source RDBMS to MySQL." )) f.write( cmt("Edit the options below to customize it. You will need to provide passwords, at least." )) f.write(cmt("")) f.write( cmt("Source DB: %s (%s)" % (self.main.plan.migrationSource.connection.hostIdentifier, self .main.plan.migrationSource.connection.driver.owner.caption))) f.write( cmt("Target DB: %s" % self.main.plan.migrationTarget.connection.hostIdentifier)) f.write("\n\n") if sys.platform == "win32": f.write("@ECHO OFF\n") f.write("REM Source and target DB passwords\n") f.write("set arg_source_password=\n") f.write("set arg_target_password=\n") f.write(""" IF [%arg_source_password%] == [] ( IF [%arg_target_password%] == [] ( ECHO WARNING: Both source and target RDBMSes passwords are empty. You should edit this file to set them. ) ) """) f.write("set arg_worker_count=%d\n" % worker_count) f.write( "REM Uncomment the following options according to your needs\n" ) f.write("\n") f.write( "REM Whether target tables should be truncated before copy\n") f.write(("" if truncate_target_tables else "REM ") + "set arg_truncate_target=--truncate-target\n") #f.write("REM Copy tables incrementally. Useful for updating table contents after an initial migration\n") #f.write("REM set arg_incremental_copy=--incremental-copy\n") f.write("REM Enable debugging output\n") f.write(("" if debug_table_copy else "REM ") + "set arg_debug_output=--log-level=debug3\n") f.write("\n\n") f.write( "REM Creation of file with table definitions for copytable\n\n" ) # Creates a temporary file name with the tables to be migrated filename = '"%TMP%\wb_tables_to_migrate.txt"' f.write("set table_file=%s\n" % filename) f.write("TYPE NUL > %s\n" % filename) for table in self._working_set.values(): fields = [] fields.append(table["source_schema"]) fields.append(table["source_table"]) fields.append(table["target_schema"]) fields.append(table["target_table"]) fields.append(table["source_primary_key"].replace("'", r"\'")) fields.append(table["target_primary_key"].replace("'", r"\'")) fields.append(table["select_expression"].replace("'", r"\'")) line = "ECHO %s >> %s" % ("\t".join(fields), filename) f.write(line + "\n") f.write("\n\n") f.write(self.main.plan.wbcopytables_path) for arg in self._transferer.helper_basic_arglist(True): f.write(' %s' % arg) f.write( ' --source-password="******" --target-password="******" --table-file="%table_file%"' ) f.write( ' --thread-count=%arg_worker_count% %arg_truncate_target% %arg_debug_output%' ) f.write("\n\n") f.write("REM Removes the file with the table definitions\n") f.write("DEL %s\n" % filename) else: f.write("# Source and target DB passwords\n") f.write("arg_source_password=\n") f.write("arg_target_password=\n") f.write(""" if [ -z "$arg_source_password" ] && [ -z "$arg_target_password" ] ; then echo WARNING: Both source and target RDBMSes passwords are empty. You should edit this file to set them. fi """) f.write("arg_worker_count=%d\n" % worker_count) f.write( "# Uncomment the following options according to your needs\n") f.write("\n") f.write( "# Whether target tables should be truncated before copy\n") f.write(("" if truncate_target_tables else "# ") + "arg_truncate_target=--truncate-target\n") #f.write("# Copy tables incrementally. Useful for updating table contents after an initial migration\n") #f.write("#arg_incremental_copy=--incremental-copy\n") f.write("# Enable debugging output\n") f.write(("" if debug_table_copy else "# ") + "arg_debug_output=--log-level=debug3\n") f.write("\n") f.write(self.main.plan.wbcopytables_path) for arg in self._transferer.helper_basic_arglist(True): f.write(' %s' % arg) f.write( ' --source-password="******" --target-password="******"' ) f.write( ' --thread-count=$arg_worker_count $arg_truncate_target $arg_debug_output' ) for table in self._working_set.values(): opt = "--table '%s' '%s' '%s' '%s' '%s' '%s' '%s'" % ( table["source_schema"], table["source_table"], table["target_schema"], table["target_table"], table["source_primary_key"].replace("'", "\'"), table["target_primary_key"].replace("'", "\'"), table["select_expression"].replace("'", "\'")) f.write(" " + opt) f.write("\n\n") f.close() self.send_info("Table copy script written to %s" % path) def _create_bulk_copy_script(self): script_path = self.main.plan.state.dataBulkTransferParams[ "GenerateBulkCopyScript"] conn_args = self._transferer.helper_connections_arglist() if conn_args['source_rdbms'] == 'mssql': conn_args[ 'source_instance'] = self.main.plan.migrationSource.get_source_instance( ) source_os = self.main.plan.migrationSource.get_os() target_os = self.main.plan.migrationTarget.get_os() script = DataCopyFactory(source_os, target_os, conn_args['source_rdbms']) script.generate(self._working_set.values(), conn_args, script_path) def _count_rows(self): self.send_info("Counting number of rows in tables...") total = self._transferer.count_table_rows(self._working_set) self.send_info("%i total rows in %i tables need to be copied:" % (total, len(self._working_set))) for task in self._working_set.values(): self.send_info("- %s.%s: %s" % (task["source_schema"], task["source_table"], task.get("row_count", "error"))) def _migrate_data(self): # update the label with the number of rows to copy here, since this is in the main thread total = 0 table_count = len(self._working_set) for task in self._working_set.values(): total += task.get("row_count", 0) self.create_transfer_log(task["target_table_object"]) self.send_info("") # newline if self._working_set: thread_count = self.main.plan.state.dataBulkTransferParams.get( "workerCount", 2) self.send_info("Migrating data...") self._log_progress_text = False self._migrating_data = True try: succeeded_tasks = self._transferer.migrate_data( thread_count, self._working_set) finally: self._log_progress_text = True self._migrating_data = False self.send_info("") # newline self.send_info("Data copy results:") fully_copied = 0 self._tables_to_exclude = list() self._count_of_failed_tables = 0 for task in self._working_set.values(): info = succeeded_tasks.get( task["target_schema"] + "." + task["target_table"], None) row_count = task.get("row_count", 0) if info: ok, count = info else: count = 0 ok = False if ok and count == row_count: fully_copied = fully_copied + 1 target_table = "%s.%s" % (task["target_schema"], task["target_table"]) message = "Succeeded : copied %s of %s rows from %s.%s" % ( count, row_count, task["source_schema"], task["source_table"]) self.add_log_entry(0, target_table, message) self.send_info( "- %s.%s has succeeded (%s of %s rows copied)" % (task["target_schema"], task["target_table"], count, row_count)) self._tables_to_exclude.append(task["target_table"]) else: self.send_info( "- %s.%s has FAILED (%s of %s rows copied)" % (task["target_schema"], task["target_table"], count, row_count)) self._count_of_failed_tables = self._count_of_failed_tables + 1 self.send_info("%i tables of %i were fully copied" % (fully_copied, table_count)) if self._transferer.interrupted: raise grt.UserInterrupt("Canceled by user") if self._resume: self.send_info( "Click [Retry] to retry copying remaining data from tables" ) else: self.send_info("Nothing to be done") def _verify_copy(self): self.send_info( "Checking if number of rows copied to target tables matches source tables..." ) def create_transfer_log(self, target_table): log = grt.classes.GrtLogObject() log.logObject = target_table target_db_module = self.main.plan.migrationTarget.module_db() logSchema = target_db_module.quoteIdentifier(log.logObject.owner.name) logTable = target_db_module.quoteIdentifier(log.logObject.name) log.name = "%s.%s" % (logSchema, logTable) log.logObject = target_table self.main.plan.state.dataTransferLog.append(log) def get_log_object(self, target_table): for log in self.main.plan.state.dataTransferLog: if target_table == log.name: return log def add_log_entry(self, type, target_table, message): logObject = self.get_log_object(target_table) entry = grt.classes.GrtLogEntry() entry.entryType = type entry.name = message logObject.entries.append(entry) def tasks_finished(self): self.show_retry_button(False) def tasks_failed(self, canceled): if self._resume: self.show_retry_button(True) mforms.Utilities.show_message( "Copying Tables", "Table data copy failed for %i tables. Please review the logs for details.\nIf you'd like to retry copying from the last successful point, click [Retry]." % self._count_of_failed_tables, "OK", "", "") else: self.show_retry_button(False) def go_retry(self): self._resume = False self.retry_button.show(False) self.reset() self.start() def show_retry_button(self, _show): self.retry_button.show(bool(_show)) self.next_button.set_enabled(not _show) def _update_resume_status(self, _resume): self._resume = _resume
class TransferMainView(WizardProgressPage): def __init__(self, main): WizardProgressPage.__init__(self, main, "Bulk Data Transfer", use_private_message_handling=True) self._autostart = True self._resume = False self.retry_button = mforms.newButton() self.retry_button.set_text("Retry") self.retry_button.add_clicked_callback(self.go_retry) self.retry_box = mforms.newBox(True) self.content.remove(self._detail_label) self.retry_box.add(self._detail_label, True, True) self.retry_box.add(self.retry_button, False, True) self.content.add(self.retry_box, False, False) self.retry_button.show(False) self.main.add_wizard_page(self, "DataMigration", "Bulk Data Transfer") self._tables_to_exclude = list() def page_activated(self, advancing): if advancing: options = self.main.plan.state.dataBulkTransferParams copy_script = options.get("GenerateCopyScript", None) bulk_copy_script = options.get("GenerateBulkCopyScript", None) self.add_task(self._prepare_copy, "Prepare information for data copy") if copy_script != None: self._copy_script_task = self.add_task(self._create_copy_script, "Create shell script for data copy") if bulk_copy_script != None: self._bulk_copy_script_task = self.add_task( self._create_bulk_copy_script, "Create shell script for bulk data copy" ) if options.get("LiveDataCopy", False) or options.get("GenerateDumpScript", False): self._migrate_task1 = self.add_threaded_task(self._count_rows, "Determine number of rows to copy") self._migrate_task2 = self.add_threaded_task(self._migrate_data, "Copy data to target RDBMS") self._migrating_data = False self._progress_per_table = {} if options.get("LiveDataCopy", False): source_password = self.main.plan.migrationSource.password if source_password is None: source_password = request_password(self.main.plan.migrationSource.connection) target_password = self.main.plan.migrationTarget.password if target_password is None: if ( self.main.plan.migrationTarget.connection.hostIdentifier == self.main.plan.migrationSource.connection.hostIdentifier ): if ( self.main.plan.migrationTarget.connection.parameterValues["userName"] == self.main.plan.migrationSource.connection.parameterValues["userName"] ): target_password = source_password if target_password is None: target_password = request_password(self.main.plan.migrationTarget.connection) else: source_password = None target_password = None self._transferer = DataMigrator( self, self.main.plan.state.dataBulkTransferParams, self.main.plan.migrationSource.connection, source_password, self.main.plan.migrationTarget.connection, target_password, ) self._transferer.copytable_path = self.main.plan.wbcopytables_path WizardProgressPage.page_activated(self, advancing) def go_back(self): self.clear_tasks() self.reset(True) WizardProgressPage.go_back(self) def update_status(self): return WizardProgressPage.update_status(self) def _prepare_copy(self): # create work list source_catalog = self.main.plan.migrationSource.catalog tables = self.main.plan.state.dataBulkTransferParams["tableList"] has_catalogs = self.main.plan.migrationSource.connection.driver.owner.doesSupportCatalogs > 0 has_schema = self.main.plan.migrationSource.connection.driver.owner.doesSupportCatalogs >= 0 source_db_module = self.main.plan.migrationSource.module_db() target_db_module = self.main.plan.migrationTarget.module_db() self._working_set = {} for table in tables: # find the source table stable = None for sschema in source_catalog.schemata: if sschema.name == table.owner.oldName: for t in sschema.tables: if t.name == table.oldName: stable = t break break if not stable: self.send_error("Source table for %s (%s) not found, skipping...\n" % (table.name, table.oldName)) continue if table.name in self._tables_to_exclude: continue if has_catalogs: schema_name = source_db_module.quoteIdentifier(stable.owner.owner.name) if stable.oldName: # oldName already comes pre-quoted from the reveng stage table_name = stable.oldName else: table_name = ( source_db_module.quoteIdentifier(stable.owner.name) + "." + source_db_module.quoteIdentifier(stable.name) ) else: if has_schema: schema_name = source_db_module.quoteIdentifier(stable.owner.name) else: schema_name = "" table_name = source_db_module.quoteIdentifier(stable.name) targ_schema_name = target_db_module.quoteIdentifier(table.owner.name) targ_table_name = target_db_module.quoteIdentifier(table.name) self._working_set[schema_name + "." + table_name] = { "table": table, "source_schema": schema_name, "source_table": table_name, "target_schema": targ_schema_name, "target_table": targ_table_name, "target_table_object": table, } select_expression = [] source_pk_list = [] target_pk_list = [] for column in table.columns: if column.generated: continue if table.isPrimaryKeyColumn(column): source_pk_list.append(source_db_module.quoteIdentifier(column.oldName)) target_pk_list.append(target_db_module.quoteIdentifier(column.name)) cast = table.customData.get("columnTypeCastExpression:%s" % column.name, None) if cast: select_expression.append(cast.replace("?", source_db_module.quoteIdentifier(column.oldName))) else: select_expression.append(source_db_module.quoteIdentifier(column.oldName)) self._working_set[schema_name + "." + table_name]["source_primary_key"] = ( ",".join(source_pk_list) if len(source_pk_list) > 0 else "-" ) self._working_set[schema_name + "." + table_name]["target_primary_key"] = ( ",".join(target_pk_list) if len(target_pk_list) > 0 else "-" ) self._working_set[schema_name + "." + table_name]["select_expression"] = ", ".join(select_expression) # source_db_module = self.main.plan.migrationSource.module_db() # source_table = source_db_module.fullyQualifiedObjectName(stable) def _create_copy_script(self): path = self.main.plan.state.dataBulkTransferParams["GenerateCopyScript"] debug_table_copy = self.main.plan.state.dataBulkTransferParams["DebugTableCopy"] truncate_target_tables = self.main.plan.state.dataBulkTransferParams["TruncateTargetTables"] worker_count = self.main.plan.state.dataBulkTransferParams["workerCount"] f = open(path, "w+") if sys.platform == "win32": def cmt(s): return "REM " + s + "\n" else: os.chmod(path, 0700) def cmt(s): return "# " + s + "\n" f.write("#!/bin/sh\n") f.write(cmt("Workbench Table Data copy script")) f.write(cmt("Workbench Version: %s" % Version.fromgrt(grt.root.wb.info.version))) f.write(cmt("")) f.write(cmt("Execute this to copy table data from a source RDBMS to MySQL.")) f.write(cmt("Edit the options below to customize it. You will need to provide passwords, at least.")) f.write(cmt("")) f.write( cmt( "Source DB: %s (%s)" % ( self.main.plan.migrationSource.connection.hostIdentifier, self.main.plan.migrationSource.connection.driver.owner.caption, ) ) ) f.write(cmt("Target DB: %s" % self.main.plan.migrationTarget.connection.hostIdentifier)) f.write("\n\n") if sys.platform == "win32": f.write("@ECHO OFF\n") f.write("REM Source and target DB passwords\n") f.write("set arg_source_password=\n") f.write("set arg_target_password=\n") f.write( """ IF [%arg_source_password%] == [] ( IF [%arg_target_password%] == [] ( ECHO WARNING: Both source and target RDBMSes passwords are empty. You should edit this file to set them. ) ) """ ) f.write("set arg_worker_count=%d\n" % worker_count) f.write("REM Uncomment the following options according to your needs\n") f.write("\n") f.write("REM Whether target tables should be truncated before copy\n") f.write(("" if truncate_target_tables else "REM ") + "set arg_truncate_target=--truncate-target\n") # f.write("REM Copy tables incrementally. Useful for updating table contents after an initial migration\n") # f.write("REM set arg_incremental_copy=--incremental-copy\n") f.write("REM Enable debugging output\n") f.write(("" if debug_table_copy else "REM ") + "set arg_debug_output=--log-level=debug3\n") f.write("\n\n") f.write("REM Creation of file with table definitions for copytable\n\n") # Creates a temporary file name with the tables to be migrated filename = '"%TMP%\wb_tables_to_migrate.txt"' f.write("set table_file=%s\n" % filename) f.write("TYPE NUL > %s\n" % filename) for table in self._working_set.values(): fields = [] fields.append(table["source_schema"]) fields.append(table["source_table"]) fields.append(table["target_schema"]) fields.append(table["target_table"]) fields.append(table["source_primary_key"].replace("'", r"\'")) fields.append(table["target_primary_key"].replace("'", r"\'")) fields.append(table["select_expression"].replace("'", r"\'")) line = "ECHO %s >> %s" % ("\t".join(fields), filename) f.write(line + "\n") f.write("\n\n") f.write(self.main.plan.wbcopytables_path) for arg in self._transferer.helper_basic_arglist(True): f.write(" %s" % arg) f.write( ' --source-password="******" --target-password="******" --table-file="%table_file%"' ) f.write(" --thread-count=%arg_worker_count% %arg_truncate_target% %arg_debug_output%") f.write("\n\n") f.write("REM Removes the file with the table definitions\n") f.write("DEL %s\n" % filename) else: f.write("# Source and target DB passwords\n") f.write("arg_source_password=\n") f.write("arg_target_password=\n") f.write( """ if [ -z "$arg_source_password" ] && [ -z "$arg_target_password" ] ; then echo WARNING: Both source and target RDBMSes passwords are empty. You should edit this file to set them. fi """ ) f.write("arg_worker_count=%d\n" % worker_count) f.write("# Uncomment the following options according to your needs\n") f.write("\n") f.write("# Whether target tables should be truncated before copy\n") f.write(("" if truncate_target_tables else "# ") + "arg_truncate_target=--truncate-target\n") # f.write("# Copy tables incrementally. Useful for updating table contents after an initial migration\n") # f.write("#arg_incremental_copy=--incremental-copy\n") f.write("# Enable debugging output\n") f.write(("" if debug_table_copy else "# ") + "arg_debug_output=--log-level=debug3\n") f.write("\n") f.write(self.main.plan.wbcopytables_path) for arg in self._transferer.helper_basic_arglist(True): f.write(" %s" % arg) f.write(' --source-password="******" --target-password="******"') f.write(" --thread-count=$arg_worker_count $arg_truncate_target $arg_debug_output") for table in self._working_set.values(): opt = "--table '%s' '%s' '%s' '%s' '%s' '%s' '%s'" % ( table["source_schema"], table["source_table"], table["target_schema"], table["target_table"], table["source_primary_key"].replace("'", "'"), table["target_primary_key"].replace("'", "'"), table["select_expression"].replace("'", "'"), ) f.write(" " + opt) f.write("\n\n") f.close() self.send_info("Table copy script written to %s" % path) def _create_bulk_copy_script(self): script_path = self.main.plan.state.dataBulkTransferParams["GenerateBulkCopyScript"] conn_args = self._transferer.helper_connections_arglist() if conn_args["source_rdbms"] == "mssql": conn_args["source_instance"] = self.main.plan.migrationSource.get_source_instance() source_os = self.main.plan.migrationSource.get_os() target_os = self.main.plan.migrationTarget.get_os() script = DataCopyFactory(source_os, target_os, conn_args["source_rdbms"]) script.generate(self._working_set.values(), conn_args, script_path) def _count_rows(self): self.send_info("Counting number of rows in tables...") total = self._transferer.count_table_rows(self._working_set) self.send_info("%i total rows in %i tables need to be copied:" % (total, len(self._working_set))) for task in self._working_set.values(): self.send_info( "- %s.%s: %s" % (task["source_schema"], task["source_table"], task.get("row_count", "error")) ) def _migrate_data(self): # update the label with the number of rows to copy here, since this is in the main thread total = 0 table_count = len(self._working_set) for task in self._working_set.values(): total += task.get("row_count", 0) self.create_transfer_log(task["target_table_object"]) self.send_info("") # newline if self._working_set: thread_count = self.main.plan.state.dataBulkTransferParams.get("workerCount", 2) self.send_info("Migrating data...") self._log_progress_text = False self._migrating_data = True try: succeeded_tasks = self._transferer.migrate_data(thread_count, self._working_set) finally: self._log_progress_text = True self._migrating_data = False self.send_info("") # newline self.send_info("Data copy results:") fully_copied = 0 self._tables_to_exclude = list() self._count_of_failed_tables = 0 for task in self._working_set.values(): info = succeeded_tasks.get(task["target_schema"] + "." + task["target_table"], None) row_count = task.get("row_count", 0) if info: ok, count = info else: count = 0 ok = False if ok and count == row_count: fully_copied = fully_copied + 1 target_table = "%s.%s" % (task["target_schema"], task["target_table"]) message = "Succeeded : copied %s of %s rows from %s.%s" % ( count, row_count, task["source_schema"], task["source_table"], ) self.add_log_entry(0, target_table, message) self.send_info( "- %s.%s has succeeded (%s of %s rows copied)" % (task["target_schema"], task["target_table"], count, row_count) ) self._tables_to_exclude.append(task["target_table"]) else: self.send_info( "- %s.%s has FAILED (%s of %s rows copied)" % (task["target_schema"], task["target_table"], count, row_count) ) self._count_of_failed_tables = self._count_of_failed_tables + 1 self.send_info("%i tables of %i were fully copied" % (fully_copied, table_count)) if self._transferer.interrupted: raise grt.UserInterrupt("Canceled by user") if self._resume: self.send_info("Click [Retry] to retry copying remaining data from tables") else: self.send_info("Nothing to be done") def _verify_copy(self): self.send_info("Checking if number of rows copied to target tables matches source tables...") def create_transfer_log(self, target_table): log = grt.classes.GrtLogObject() log.logObject = target_table target_db_module = self.main.plan.migrationTarget.module_db() logSchema = target_db_module.quoteIdentifier(log.logObject.owner.name) logTable = target_db_module.quoteIdentifier(log.logObject.name) log.name = "%s.%s" % (logSchema, logTable) log.logObject = target_table self.main.plan.state.dataTransferLog.append(log) def get_log_object(self, target_table): for log in self.main.plan.state.dataTransferLog: if target_table == log.name: return log def add_log_entry(self, type, target_table, message): logObject = self.get_log_object(target_table) entry = grt.classes.GrtLogEntry() entry.entryType = type entry.name = message logObject.entries.append(entry) def tasks_finished(self): self.show_retry_button(False) def tasks_failed(self, canceled): if self._resume: self.show_retry_button(True) mforms.Utilities.show_message( "Copying Tables", "Table data copy failed for %i tables. Please review the logs for details.\nIf you'd like to retry copying from the last successful point, click [Retry]." % self._count_of_failed_tables, "OK", "", "", ) else: self.show_retry_button(False) def go_retry(self): self._resume = False self.retry_button.show(False) self.reset() self.start() def show_retry_button(self, _show): self.retry_button.show(bool(_show)) self.next_button.set_enabled(not _show) def _update_resume_status(self, _resume): self._resume = _resume
class TransferMainView(WizardProgressPage): def __init__(self, main): WizardProgressPage.__init__(self, main, "Bulk Data Transfer", use_private_message_handling=True) self.main.add_wizard_page(self, "DATA MIGRATION", "Bulk Data Transfer") self.add_task(self._prepare_copy, "Prepare information for data copy") self._copy_script_task = self.add_task( self._create_copy_script, "Create shell script for data copy") self._migrate_task1 = self.add_threaded_task( self._count_rows, "Determine number of rows to copy") self._migrate_task2 = self.add_threaded_task( self._migrate_data, "Copy data to target RDBMS") def page_activated(self, advancing): if advancing: options = self.main.plan.state.dataBulkTransferParams copy_script = options.get("GenerateCopyScript", None) self._copy_script_task.set_enabled(copy_script != None) if options.get("LiveDataCopy", False) or options.get( "GenerateDumpScript", False): self._migrate_task1.set_enabled(True) self._migrate_task2.set_enabled(True) else: self._migrate_task1.set_enabled(False) self._migrate_task2.set_enabled(False) self._migrating_data = False self._progress_per_table = {} if options.get("LiveDataCopy", False): source_password = self.main.plan.migrationSource.password if source_password is None: source_password = request_password( self.main.plan.migrationSource.connection) target_password = self.main.plan.migrationTarget.password if target_password is None: if self.main.plan.migrationTarget.connection.hostIdentifier == self.main.plan.migrationSource.connection.hostIdentifier: if self.main.plan.migrationTarget.connection.parameterValues[ 'userName'] == self.main.plan.migrationSource.connection.parameterValues[ 'userName']: target_password = source_password if target_password is None: target_password = request_password( self.main.plan.migrationTarget.connection) else: source_password = None target_password = None self._transferer = DataMigrator( self, self.main.plan.state.dataBulkTransferParams, self.main.plan.migrationSource.connection, source_password, self.main.plan.migrationTarget.connection, target_password) self._transferer.copytable_path = self.main.plan.wbcopytables_path WizardProgressPage.page_activated(self, advancing) def go_back(self): self.reset(True) WizardProgressPage.go_back(self) def update_status(self): return WizardProgressPage.update_status(self) def _prepare_copy(self): # create work list source_catalog = self.main.plan.migrationSource.catalog tables = self.main.plan.state.dataBulkTransferParams["tableList"] has_catalogs = self.main.plan.migrationSource.connection.driver.owner.doesSupportCatalogs source_db_module = self.main.plan.migrationSource.module_db() target_db_module = self.main.plan.migrationTarget.module_db() self._working_set = {} for table in tables: # find the source table stable = None for sschema in source_catalog.schemata: if sschema.name == table.owner.oldName: for t in sschema.tables: if t.name == table.oldName: stable = t break break if not stable: self.send_error( "Source table for %s (%s) not found, skipping...\n" % (table.name, table.oldName)) continue if has_catalogs: schema_name = source_db_module.quoteIdentifier( stable.owner.owner.name) if stable.oldName: # oldName already comes pre-quoted from the reveng stage table_name = stable.oldName else: table_name = source_db_module.quoteIdentifier( stable.owner.name ) + "." + source_db_module.quoteIdentifier(stable.name) else: schema_name = source_db_module.quoteIdentifier( stable.owner.name) table_name = source_db_module.quoteIdentifier(stable.name) targ_schema_name = target_db_module.quoteIdentifier( table.owner.name) targ_table_name = target_db_module.quoteIdentifier(table.name) self._working_set[schema_name + "." + table_name] = { "table": table, "source_schema": schema_name, "source_table": table_name, "target_schema": targ_schema_name, "target_table": targ_table_name, "target_table_object": table } select_expression = [] for column in table.columns: cast = table.customData.get( "columnTypeCastExpression:%s" % column.name, None) if cast: select_expression.append( cast.replace( "?", source_db_module.quoteIdentifier(column.name))) else: select_expression.append( source_db_module.quoteIdentifier(column.name)) self._working_set[schema_name + "." + table_name]["select_expression"] = ", ".join( select_expression) # source_db_module = self.main.plan.migrationSource.module_db() # source_table = source_db_module.fullyQualifiedObjectName(stable) def _create_copy_script(self): path = self.main.plan.state.dataBulkTransferParams[ "GenerateCopyScript"] f = open(path, "w+") if sys.platform == "win32": def cmt(s): return "REM " + s + "\n" else: os.chmod(path, 0700) def cmt(s): return "# " + s + "\n" f.write("#!/bin/sh\n") f.write(cmt("Workbench Table Data copy script")) f.write(cmt("")) f.write( cmt("Execute this to copy table data from a source RDBMS to MySQL." )) f.write( cmt("Edit the options below to customize it. You will need to provide passwords, at least." )) f.write(cmt("")) f.write( cmt("Source DB: %s (%s)" % (self.main.plan.migrationSource.connection.hostIdentifier, self .main.plan.migrationSource.connection.driver.owner.caption))) f.write( cmt("Target DB: %s" % self.main.plan.migrationTarget.connection.hostIdentifier)) f.write("\n\n") if sys.platform == "win32": f.write("REM Source and target DB passwords\n") f.write("set arg_source_password=\n") f.write("set arg_target_password=\n") f.write( "REM Uncomment the following options according to your needs\n" ) f.write("\n") f.write( "REM Whether target tables should be truncated before copy\n") f.write("REM set arg_truncate_target=--truncate-target\n") #f.write("REM Copy tables incrementally. Useful for updating table contents after an initial migration\n") #f.write("REM set arg_incremental_copy=--incremental-copy\n") f.write("REM Enable debugging output\n") f.write("REM set arg_debug_output=--log-level=debug3\n") f.write("\n\n") f.write( "REM Creation of file with table definitions for copytable\n\n" ) # Creates a temporary file name with the tables to be migrated filename = "wb_tables_to_migrate.txt" f.write("set table_file=%s\n" % filename) f.write("TYPE NUL > %s\n" % filename) for table in self._working_set.values(): fields = [] fields.append(table["source_schema"]) fields.append(table["source_table"]) fields.append(table["target_schema"]) fields.append(table["target_table"]) fields.append(table["select_expression"].replace("'", "\'")) line = "ECHO %s >> %s" % ("\t".join(fields), filename) f.write(line + "\n") f.write("\n\n") f.write(self.main.plan.wbcopytables_path) for arg in self._transferer.helper_basic_arglist(): if ';' in arg or ' ' in arg: f.write(" '%s'" % arg) else: f.write(' %s' % arg) f.write( ' --source-password="******" --target-password="******" --table-file="%table_file%" %arg_truncate_target% %arg_debug_output%' ) f.write("\n\n") f.write("REM Removes the file with the tabla definitions\n") f.write("DEL %s\n" % filename) else: f.write("# Source and target DB passwords\n") f.write("arg_source_password=\n") f.write("arg_target_password=\n") f.write( "# Uncomment the following options according to your needs\n") f.write("\n") f.write( "# Whether target tables should be truncated before copy\n\n") f.write("#arg_truncate_target=--truncate-target\n") #f.write("# Copy tables incrementally. Useful for updating table contents after an initial migration\n") #f.write("#arg_incremental_copy=--incremental-copy\n") f.write("# Enable debugging output\n") f.write("#arg_debug_output=--log-level=debug3\n") f.write("\n") f.write(self.main.plan.wbcopytables_path) for arg in self._transferer.helper_basic_arglist(): if ';' in arg or ' ' in arg: f.write(" '%s'" % arg) else: f.write(' %s' % arg) f.write( ' --source-password="******" --target-password="******" $arg_truncate_target $arg_debug_output' ) for table in self._working_set.values(): opt = "--table '%s' '%s' '%s' '%s' '%s'" % ( table["source_schema"], table["source_table"], table["target_schema"], table["target_table"], table["select_expression"].replace("'", "\'")) f.write(" " + opt) f.write("\n\n") f.close() self.send_info("Table copy script written to %s" % path) def _count_rows(self): self.send_info("Counting number of rows in tables...") total = self._transferer.count_table_rows(self._working_set) self.send_info("%i total rows in %i tables need to be copied:" % (total, len(self._working_set))) for task in self._working_set.values(): self.send_info("- %s.%s: %s" % (task["source_schema"], task["source_table"], task.get("row_count", "error"))) def _migrate_data(self): # update the label with the number of rows to copy here, since this is in the main thread total = 0 table_count = len(self._working_set) for task in self._working_set.values(): total += task.get("row_count", 0) self.create_transfer_log(task["target_table_object"]) self.send_info("") # newline if self._working_set: thread_count = self.main.plan.state.dataBulkTransferParams.get( "workerCount", 2) self.send_info("Migrating data...") self._log_progress_text = False self._migrating_data = True try: succeeded_tasks = self._transferer.migrate_data( thread_count, self._working_set) finally: self._log_progress_text = True self._migrating_data = False self.send_info("") # newline self.send_info("Data copy results:") fully_copied = 0 for task in self._working_set.values(): info = succeeded_tasks.get( task["target_schema"] + "." + task["target_table"], None) row_count = task.get("row_count", 0) if info: ok, count = info else: count = 0 ok = False if ok and count == row_count: fully_copied = fully_copied + 1 target_table = "%s.%s" % (task["target_schema"], task["target_table"]) message = "Succeeded : copied %s of %s rows from %s.%s" % ( count, row_count, task["source_schema"], task["source_table"]) self.add_log_entry(0, target_table, message) self.send_info( "- %s.%s has succeeded (%s of %s rows copied)" % (task["target_schema"], task["target_table"], count, row_count)) else: self.send_info( "- %s.%s has FAILED (%s of %s rows copied)" % (task["target_schema"], task["target_table"], count, row_count)) self.send_info("%i tables of %i were fully copied" % (fully_copied, table_count)) if self._transferer.interrupted: raise grt.UserInterrupt("Canceled by user") else: self.send_info("Nothing to be done") def _verify_copy(self): self.send_info( "Checking if number of rows copied to target tables matches source tables..." ) def create_transfer_log(self, target_table): log = grt.classes.GrtLogObject() log.logObject = target_table target_db_module = self.main.plan.migrationTarget.module_db() logSchema = target_db_module.quoteIdentifier(log.logObject.owner.name) logTable = target_db_module.quoteIdentifier(log.logObject.name) log.name = "%s.%s" % (logSchema, logTable) log.logObject = target_table self.main.plan.state.dataTransferLog.append(log) def get_log_object(self, target_table): for log in self.main.plan.state.dataTransferLog: if target_table == log.name: return log def add_log_entry(self, type, target_table, message): logObject = self.get_log_object(target_table) entry = grt.classes.GrtLogEntry() entry.entryType = type entry.name = message logObject.entries.append(entry)