def setUp(self): settings.read_properties("pydbcopy.conf") self.source_host = MySQLHost(settings.source_host, settings.source_user, \ settings.source_password, settings.source_database) self.dest_host = MySQLHost(settings.target_host, settings.target_user, \ settings.target_password, settings.target_database) # # Just in case lets tear down an old or canceled run... # self.tearDown() # # Bring up the fixture # c = self.source_host.conn.cursor() c.execute("SET AUTOCOMMIT=1") c.execute("create table if not exists tmp_pydbcopy_test ( id integer primary key, test_string varchar(50) )") c.execute("insert into tmp_pydbcopy_test (id,test_string) values (1,'test')") c.execute("create table if not exists tmp_hashed_pydbcopy_test ( id integer primary key, test_string varchar(50), fieldHash varchar(50) )") c.execute("insert into tmp_hashed_pydbcopy_test (id,test_string,fieldHash) values (1,'test','123')") c.execute("insert into tmp_hashed_pydbcopy_test (id,test_string,fieldHash) values (2,'test1','234')") c.execute("insert into tmp_hashed_pydbcopy_test (id,test_string,fieldHash) values (3,'test2','345')") c.execute("create table if not exists tmp_pydbcopy_modified_test ( id integer primary key, test_string varchar(50), lastModifiedDate timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP )") c.execute("insert into tmp_pydbcopy_modified_test (id,test_string,lastModifiedDate) values (1,'test','2010-11-23 05:00:00')") c.close()
def setUp(self): settings.read_properties("pydbcopy.conf") self.source_host = MySQLHost(settings.source_host, settings.source_user, \ settings.source_password, settings.source_database) self.dest_host = MySQLHost(settings.target_host, settings.target_user, \ settings.target_password, settings.target_database) # # Bring up the fixture # c = self.source_host.conn.cursor() c.execute("SET AUTOCOMMIT=1") # Create tmp_pydbcopy_test table and a single row c.execute("create table if not exists tmp_pydbcopy_test ( id integer primary key, test_string varchar(50) )") c.execute("insert into tmp_pydbcopy_test (id,test_string) values (1,'test')") # Create tmp_hashed_pydbcopy_test table and three rows c.execute("create table if not exists tmp_hashed_pydbcopy_test ( id integer primary key, test_string varchar(50), fieldHash varchar(50) )") c.execute("insert into tmp_hashed_pydbcopy_test (id,test_string,fieldHash) values (1,'test','123')") c.execute("insert into tmp_hashed_pydbcopy_test (id,test_string,fieldHash) values (2,'test1','234')") c.execute("insert into tmp_hashed_pydbcopy_test (id,test_string,fieldHash) values (3,'test2','345')") # Create tmp_pydbcopy_modified_table and a single row c.execute("create table if not exists tmp_pydbcopy_modified_test ( id integer primary key, test_string varchar(50), lastModifiedDate timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP )") c.execute("insert into tmp_pydbcopy_modified_test (id,test_string, lastModifiedDate) values (1,'test', '2010-11-23 05:00:00')") c.close() c = self.dest_host.conn.cursor() c.execute("SET AUTOCOMMIT=1") c.execute("create table if not exists tmp_pydbcopy_modified_test ( id integer primary key, test_string varchar(50), lastModifiedDate timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP )") c.execute("insert into tmp_pydbcopy_modified_test (id,test_string, lastModifiedDate) values (1,'test', '2010-11-22 05:00:00')") c.close()
def main(argv=None): """ This is the main routine for pydbcopy. Pydbcopy copies a set of tables from one database (possibly remote) to a local database. If possible an incremental copy is performed (see the perform_incremental_copy routine below). If an incremental copy cannot be performed a full copy is performed (see the perform_full_copy routine below). This routine reads settings from a prop file and then overrides any properties therein with command line args. See the usage output for a description of the arguments. This program is multi-process by default but can be switched into single process mode for debugging (see the --debug option in the usage). Pydbcopy will use a number of processes equal to the number of cpus detected on the system minus one. """ if argv is None: argv = sys.argv parser = get_option_parser() options = parser.parse_args(argv[1:])[0] if options.properties: settings.read_properties(options.properties) """ Command Line options override properties from the prop file """ if options.source_host is not None: settings.source_host = options.source_host if options.source_user is not None: settings.source_user = options.source_user if options.source_password is not None: settings.source_password = options.source_password if options.source_database is not None: settings.source_database = options.source_database if options.target_host is not None: settings.target_host = options.target_host if options.target_user is not None: settings.target_user = options.target_user if options.target_password is not None: settings.target_password = options.target_password if options.target_database is not None: settings.target_database = options.target_database if options.scp_user is not None: settings.scp_user = options.scp_user if options.dump_dir is not None: settings.dump_dir = options.dump_dir if options.verify_threshold is not None: settings.verify_threshold = options.verify_threshold if options.force_full is not None: settings.force_full = options.force_full if options.no_last_mod_check is not None: settings.no_last_mod_check = options.no_last_mod_check if options.debug is not None: settings.debug = options.debug if options.tables is not None: settings.tables = options.tables.split() if options.tables_to_skip_verification is not None: settings.tables_to_skip_verification = options.tables_to_skip_verification.split( ) if options.num_processes is not None and options.num_processes != '': settings.num_processes = int(options.num_processes) if settings.num_processes == 0: settings.num_processes = multiprocessing.cpu_count() - 1 settings.verbosity = 0 if options.verbose is not None and options.verbose is True: settings.verbosity = 1 # check that we have a list of tables to copy if len(settings.tables) == 0: sys.stderr.write("Error: No tables specified.\n") return 1 # check that user specified dump dir is readable/writable by all if not os.path.isdir(settings.dump_dir): os.makedirs(settings.dump_dir) if os.stat(settings.dump_dir)[stat.ST_MODE] != 0777: try: os.chmod(settings.dump_dir, 0777) except OSError: sys.stderr.write("Warning: unable to chmod 777 on '%s'. Pydbcopy may not be able to clean up after itself!\n" \ % settings.dump_dir) pass if not os.access(settings.dump_dir, os.F_OK | os.R_OK | os.W_OK): sys.stderr.write("Error: unable to find or create a writable dump dir at '%s'\n" \ % settings.dump_dir) return 1 # Configure the logging ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.DEBUG if settings.verbosity else logging.INFO) formatter = logging.Formatter( "%(asctime)s - %(processName)s - %(levelname)s - %(message)s") ch.setFormatter(formatter) logger.addHandler(ch) logger.setLevel(logging.DEBUG if settings.verbosity else logging.INFO) if not settings.debug and settings.num_processes > 1: pool = multiprocessing.Pool(settings.num_processes) result_list = pool.map(verify_and_copy_table, settings.tables, 1) else: result_list = map(verify_and_copy_table, settings.tables) failed_tables = set() invalid_tables = set() skipped_tables = set() copied_tables = set() for result, table in map(None, result_list, settings.tables): if result == 1: skipped_tables.add(table) elif result == -1: invalid_tables.add(table) elif result < -1: failed_tables.add(table) else: copied_tables.add(table) logger.info('Summary for copy from source database %s on %s to target database %s on %s:' % \ (settings.source_database, settings.source_host, settings.target_database, settings.target_host)) logger.info('--------------------------------------') if len(skipped_tables) > 0: logger.info(' Skipped: %s' % ', '.join(skipped_tables)) if len(copied_tables) > 0: logger.info(' Copied: %s' % ', '.join(copied_tables)) if len(invalid_tables) > 0: logger.error('Invalid: %s' % ', '.join(invalid_tables)) if len(failed_tables) > 0: logger.error(' Failed: %s' % ', '.join(failed_tables)) logger.info('--------------------------------------') if len(invalid_tables) > 0 or len(failed_tables) > 0: return -1 return 0
def main(argv=None): """ This is the main routine for pydbcopy. Pydbcopy copies a set of tables from one database (possibly remote) to a local database. If possible an incremental copy is performed (see the perform_incremental_copy routine below). If an incremental copy cannot be performed a full copy is performed (see the perform_full_copy routine below). This routine reads settings from a prop file and then overrides any properties therein with command line args. See the usage output for a description of the arguments. This program is multi-process by default but can be switched into single process mode for debugging (see the --debug option in the usage). Pydbcopy will use a number of processes equal to the number of cpus detected on the system minus one. """ if argv is None: argv = sys.argv parser = get_option_parser() options = parser.parse_args(argv[1:])[0] if options.properties: settings.read_properties(options.properties) """ Command Line options override properties from the prop file """ if options.source_host is not None: settings.source_host = options.source_host if options.source_user is not None: settings.source_user = options.source_user if options.source_password is not None: settings.source_password = options.source_password if options.source_database is not None: settings.source_database = options.source_database if options.target_host is not None: settings.target_host = options.target_host if options.target_user is not None: settings.target_user = options.target_user if options.target_password is not None: settings.target_password = options.target_password if options.target_database is not None: settings.target_database = options.target_database if options.scp_user is not None: settings.scp_user = options.scp_user if options.dump_dir is not None: settings.dump_dir = options.dump_dir if options.verify_threshold is not None: settings.verify_threshold = options.verify_threshold if options.force_full is not None: settings.force_full = options.force_full if options.no_last_mod_check is not None: settings.no_last_mod_check = options.no_last_mod_check if options.debug is not None: settings.debug = options.debug if options.tables is not None: settings.tables = options.tables.split() if options.tables_to_skip_verification is not None: settings.tables_to_skip_verification = options.tables_to_skip_verification.split() if options.num_processes is not None and options.num_processes != '': settings.num_processes = int(options.num_processes) if settings.num_processes == 0: settings.num_processes = multiprocessing.cpu_count() - 1 settings.verbosity = 0 if options.verbose is not None and options.verbose is True: settings.verbosity = 1 # check that we have a list of tables to copy if len(settings.tables) == 0: sys.stderr.write("Error: No tables specified.\n") return 1 # check that user specified dump dir is readable/writable by all if not os.path.isdir(settings.dump_dir): os.makedirs(settings.dump_dir) if os.stat(settings.dump_dir)[stat.ST_MODE] != 0777: try: os.chmod(settings.dump_dir, 0777) except OSError: sys.stderr.write("Warning: unable to chmod 777 on '%s'. Pydbcopy may not be able to clean up after itself!\n" \ % settings.dump_dir) pass if not os.access(settings.dump_dir, os.F_OK | os.R_OK | os.W_OK): sys.stderr.write("Error: unable to find or create a writable dump dir at '%s'\n" \ % settings.dump_dir) return 1 # Configure the logging ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.DEBUG if settings.verbosity else logging.INFO) formatter = logging.Formatter("%(asctime)s - %(processName)s - %(levelname)s - %(message)s") ch.setFormatter(formatter) logger.addHandler(ch) logger.setLevel(logging.DEBUG if settings.verbosity else logging.INFO) if not settings.debug and settings.num_processes > 1: pool = multiprocessing.Pool(settings.num_processes) result_list = pool.map(verify_and_copy_table, settings.tables, 1) else: result_list = map(verify_and_copy_table, settings.tables) failed_tables = set() invalid_tables = set() skipped_tables = set() copied_tables = set() for result, table in map(None, result_list, settings.tables): if result == 1: skipped_tables.add(table) elif result == -1: invalid_tables.add(table) elif result < -1: failed_tables.add(table) else: copied_tables.add(table) logger.info('Summary for copy from source database %s on %s to target database %s on %s:' % \ (settings.source_database, settings.source_host, settings.target_database, settings.target_host)) logger.info('--------------------------------------') if len(skipped_tables) > 0: logger.info(' Skipped: %s' % ', '.join(skipped_tables)) if len(copied_tables) > 0: logger.info(' Copied: %s' % ', '.join(copied_tables)) if len(invalid_tables) > 0: logger.error('Invalid: %s' % ', '.join(invalid_tables)) if len(failed_tables) > 0: logger.error(' Failed: %s' % ', '.join(failed_tables)) logger.info('--------------------------------------') if len(invalid_tables) > 0 or len(failed_tables) > 0: return -1 return 0