def get_schema_img(self, db_name): with capture() as dbschema: server_values = parse_connection(server_connection) query_options = {'skip_data': True, 'skip_grants': True, 'skip_create': True, 'rpl_mode': None, 'quiet': True} db_list = [] db_list.append(db_name) with capture() as dbschema: dbexport.export_databases(server_values, db_list, sys.stdout, query_options) db_schema = dbschema.getvalue().splitlines(True) return db_schema
def get_schema_img(self, db_name): with capture() as dbschema: server_values = parse_connection(server_connection) query_options = { 'skip_data': True, 'skip_grants': True, 'skip_create': True, 'rpl_mode': None, 'skip_gtid': True, 'quiet': True } db_list = [] db_list.append(db_name) with capture() as dbschema: dbexport.export_databases(server_values, db_list, sys.stdout, query_options) db_schema = dbschema.getvalue().splitlines(True) return db_schema
} # Parse server connection values try: server_values = parse_connection(opt.server) except: parser.error("Server connection values invalid or cannot be parsed.") # Build list of databases to copy db_list = [] for db in args: db_list.append(db) try: # record start time if opt.verbosity >= 3: start_test = time.time() # Export all databases specified export_databases(server_values, db_list, options) # record elapsed time if opt.verbosity >= 3: print_elapsed_time(start_test) except UtilError, e: print "ERROR:", e.errmsg exit(1) exit()
# Get list of temporary files with the exported data. tmp_files_list = res.get() workers_pool.join() # Merge resulting temp files (if generated). for tmp_filename in tmp_files_list: if tmp_filename: tmp_file = open(tmp_filename, 'r') shutil.copyfileobj(tmp_file, output_file) tmp_file.close() os.remove(tmp_filename) else: # Export all specified databases (no database level concurrency). # Note: on POSIX systems multiprocessing is applied at the table # level (not database). export_databases(server_values, db_list, output_file, options) if output_filename is None: # Dump the export output to the stdout. output_file.seek(0) shutil.copyfileobj(output_file, sys.stdout) output_file.close() os.remove(output_file.name) # record elapsed time if opt.verbosity >= 3: sys.stdout.flush() print_elapsed_time(start_export_time) except UtilError: _, err, _ = sys.exc_info()
# Get list of temporary files with the exported data. tmp_files_list = res.get() workers_pool.join() # Merge resulting temp files (if generated). for tmp_filename in tmp_files_list: if tmp_filename: tmp_file = open(tmp_filename, 'r') shutil.copyfileobj(tmp_file, output_file) tmp_file.close() os.remove(tmp_filename) else: # Export all specified databases (no database level concurrency). # Note: on POSIX systems multiprocessing is applied at the table # level (not database). export_databases(server_values, db_list, output_file, options) if output_filename is None: # Dump the export output to the stdout. output_file.seek(0) shutil.copyfileobj(output_file, sys.stdout) output_file.close() os.remove(output_file.name) # record elapsed time if opt.verbosity >= 3: sys.stdout.flush() print_elapsed_time(start_export_time) except UtilError: _, err, _ = sys.exc_info()
parser.error("Server connection values invalid: %s." % err) except UtilError: _, err, _ = sys.exc_info() parser.error("Server connection values invalid: %s." % err.errmsg) # Build list of databases to copy db_list = [] for db in args: # Remove backtick quotes (handled later) db = remove_backtick_quoting(db) if is_quoted_with_backticks(db) else db db_list.append(db) try: # record start time if opt.verbosity >= 3: start_test = time.time() # Export all databases specified export_databases(server_values, db_list, options) # record elapsed time if opt.verbosity >= 3: print_elapsed_time(start_test) except UtilError: _, e, _ = sys.exc_info() print("ERROR: %s" % e.errmsg) sys.exit(1) sys.exit()