def unpack(datasource, destination, include_tables=(), exclude_tables=(), report_progress=False, list_contents=False): """Unpack a MySQL tar or xbstream based datasource :param datasource: file object stream to unpack :param destination: directory to unpack archive contents to :param include_tables: sequence of tables to include (db.tablename) :param exclude_tables: sequences of tables to exclude (db.tablename) :param report_progress: boolean flag whether to report progress reading stream to stderr :raises: UnpackError on error """ name_filter = inclusion_exclusion_filter(include_tables, exclude_tables) with compression.decompressed(datasource, report_progress) as stream: stream = io.open(stream.fileno(), 'rb', closefd=False) for entry in load_unpacker(stream): if entry.name is not None and name_filter(entry.name): debug("# Skipping: %s" % entry.path) continue if list_contents: if entry.chunk == 0: print(entry.path.decode('utf-8')) else: entry.extract(destination)
def unpack(datasource, destination, include_tables=(), exclude_tables=(), report_progress=False, list_contents=False): """Unpack a MySQL tar or xbstream based datasource :param datasource: file object stream to unpack :param destination: directory to unpack archive contents to :param include_tables: sequence of tables to include (db.tablename) :param exclude_tables: sequences of tables to exclude (db.tablename) :param report_progress: boolean flag whether to report progress reading stream to stderr :raises: UnpackError on error """ name_filter = inclusion_exclusion_filter(include_tables, exclude_tables) with compression.decompressed(datasource, report_progress) as stream: for entry in load_unpacker(stream): if entry.name is not None and name_filter(entry.name): debug("# Skipping: %s" % entry.path) continue if list_contents: if entry.chunk == 0: print(entry.path.decode('utf-8')) else: entry.extract(destination)
def sieve(options): if options.output_format == 'directory': pycompat.makedirs(options.directory, exist_ok=True) if not options.table_schema: options.exclude_section('tablestructure') options.exclude_section('view_temporary') options.exclude_section('view') if not options.table_data: options.exclude_section('tabledata') if options.routines is False: options.exclude_section('routines') if options.events is False: options.exclude_section('events') if options.triggers is False: options.exclude_section('triggers') with compression.decompressed(options.input_stream) as input_stream: dump_parser = parser.DumpParser(stream=input_stream) filter_section = filters.SectionFilter(options) transform_section = transform.SectionTransform(options) write_section = writers.load(options, context=transform_section) stats = collections.defaultdict(int) for section in dump_parser: if filter_section(section): continue stats[section.name] += 1 transform_section(section) write_section(section) return stats
def unpack_tarball_distribution(stream, destdir, report_progress): """Unpack a MySQL tar distribution in a directory This method filters several items from the tarball: - static libraries from ./lib/ - *_embedded and mysqld-debug from ./bin/ - ./mysql-test - ./sql-bench :param stream: stream of bytes from which the tarball data can be read :param destdir: destination directory files should be unpacked to """ debug(" # unpacking tarball stream=%r destination=%r", stream, destdir) total_size = 0 extracted_size = 0 # python 2.6's tarfile does not support the context manager protocol # so try...finally is used here sizehint = int(stream.info()['content-length']) with compression.decompressed(stream, report_progress=report_progress, sizehint=sizehint, filetype='.gz') as stream: with contextlib.closing(tarfile.open(None, 'r|*', fileobj=stream)) as tar: for tarinfo in tar: total_size += tarinfo.size if not (tarinfo.isreg() or tarinfo.issym()): continue name = os.path.normpath(tarinfo.name).partition(os.sep)[2] name0 = name.partition(os.sep)[0] if (name0 == 'bin' and not name.endswith('_embedded') and not name.endswith('mysqld-debug')) or \ (name0 == 'lib' and not name.endswith('.a')) or \ name0 == 'share': tarinfo.name = name elif name0 == 'scripts': tarinfo.name = os.path.join('bin', os.path.basename(name)) elif name in ('COPYING', 'README', 'INSTALL-BINARY', 'docs/ChangeLog'): tarinfo.name = os.path.join('docs.mysql', os.path.basename(name)) else: debug(" # Filtering: %s", name) continue # reset the user to something sane tarinfo.uname = 'mysql' tarinfo.gname = 'mysql' tarinfo.uid = 0 tarinfo.gid = 0 # finally extract the element debug(" # Extracting: %s", name) # http://bugs.python.org/issue12800 if tarinfo.issym(): dest_path = os.path.join(destdir, name) try: os.unlink(dest_path) except OSError as exc: if exc.errno != errno.ENOENT: raise tar.extract(tarinfo, destdir) extracted_size += tarinfo.size debug(" # Uncompressed tarball size: %s Extracted size: %s", fmt.filesize(total_size), fmt.filesize(extracted_size))