Ejemplo n.º 1
0
 def test_single_fileobj_tar_gz_no_simplify_archives(self):
     source = os.path.join(self.temp_dir, 'source_dir')
     os.mkdir(source)
     self.write_string_to_file('testing', os.path.join(source, 'filename'))
     self.do_upload([('source.tar.gz', tar_gzip_directory(source))],
                    simplify_archives=False)
     self.assertEqual(['filename'], os.listdir(self.bundle_location))
     self.check_file_contains_string(os.path.join(self.bundle_location, 'filename'), 'testing')
Ejemplo n.º 2
0
    def test_tar_empty(self):
        dir = tempfile.mkdtemp()
        self.addCleanup(lambda: remove_path(dir))
        temp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: remove_path(temp_dir))

        output_dir = os.path.join(temp_dir, 'output')
        un_tar_directory(tar_gzip_directory(dir), output_dir, 'gz')
        self.assertEqual(os.listdir(output_dir), [])
Ejemplo n.º 3
0
    def test_tar_empty(self):
        dir = tempfile.mkdtemp()
        self.addCleanup(lambda: remove_path(dir))
        temp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: remove_path(temp_dir))

        output_dir = os.path.join(temp_dir, 'output')
        un_tar_directory(tar_gzip_directory(dir), output_dir, 'gz')
        self.assertEquals(os.listdir(output_dir), [])
Ejemplo n.º 4
0
 def test_single_local_tar_gz_path_simplify_archives(self):
     source_dir = os.path.join(self.temp_dir, 'source_dir')
     os.mkdir(source_dir)
     self.write_string_to_file('testing', os.path.join(source_dir, 'filename'))
     source = os.path.join(self.temp_dir, 'source.tar.gz')
     with open(source, 'wb') as f:
         f.write(tar_gzip_directory(source_dir).read())
     self.do_upload([source], simplify_archives=True)
     self.assertTrue(os.path.exists(source))
     self.check_file_contains_string(self.bundle_location, 'testing')
Ejemplo n.º 5
0
    def test_tar_has_files(self):
        dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'files')
        temp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: remove_path(temp_dir))

        output_dir = os.path.join(temp_dir, 'output')
        un_tar_directory(tar_gzip_directory(dir, False, ['f2'], ['f1', 'b.txt']), output_dir, 'gz')
        output_dir_entries = os.listdir(output_dir)
        self.assertIn('dir1', output_dir_entries)
        self.assertIn('a.txt', output_dir_entries)
        self.assertNotIn('b.txt', output_dir_entries)
        self.assertTrue(os.path.exists(os.path.join(output_dir, 'dir1', 'f1')))
        self.assertFalse(os.path.exists(os.path.join(output_dir, 'dir1', 'f2')))
        self.assertTrue(os.path.islink(os.path.join(output_dir, 'a-symlink.txt')))
Ejemplo n.º 6
0
    def test_tar_has_files(self):
        dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'files')
        temp_dir = tempfile.mkdtemp()
        self.addCleanup(lambda: remove_path(temp_dir))

        output_dir = os.path.join(temp_dir, 'output')
        un_tar_directory(tar_gzip_directory(dir, False, ['f2'], ['f1', 'b.txt']), output_dir, 'gz')
        output_dir_entries = os.listdir(output_dir)
        self.assertIn('dir1', output_dir_entries)
        self.assertIn('a.txt', output_dir_entries)
        self.assertNotIn('b.txt', output_dir_entries)
        self.assertTrue(os.path.exists(os.path.join(output_dir, 'dir1', 'f1')))
        self.assertFalse(os.path.exists(os.path.join(output_dir, 'dir1', 'f2')))
        self.assertTrue(os.path.islink(os.path.join(output_dir, 'a-symlink.txt')))
Ejemplo n.º 7
0
 def stream_tarred_gzipped_directory(self, uuid, path):
     """
     Returns a file-like object containing a tarred and gzipped archive
     of the given directory.
     """
     if self._is_available_locally(uuid):
         directory_path = self._get_target_path(uuid, path)
         return file_util.tar_gzip_directory(directory_path)
     else:
         worker = self._worker_model.get_bundle_worker(uuid)
         response_socket_id = self._worker_model.allocate_socket(
             worker['user_id'], worker['worker_id']
         )
         try:
             read_args = {'type': 'stream_directory'}
             self._send_read_message(worker, response_socket_id, uuid, path, read_args)
             fileobj = self._get_read_response_stream(response_socket_id)
             return Deallocating(fileobj, self._worker_model, response_socket_id)
         except Exception:
             self._worker_model.deallocate_socket(response_socket_id)
             raise
Ejemplo n.º 8
0
 def stream_tarred_gzipped_directory(self, uuid, path):
     """
     Returns a file-like object containing a tarred and gzipped archive
     of the given directory.
     """
     if self._is_available_locally(uuid):
         directory_path = self._get_target_path(uuid, path)
         return file_util.tar_gzip_directory(directory_path)
     else:
         worker = self._worker_model.get_bundle_worker(uuid)
         response_socket_id = self._worker_model.allocate_socket(worker['user_id'], worker['worker_id'])
         try:
             read_args = {
                 'type': 'stream_directory',
             }
             self._send_read_message(worker, response_socket_id, uuid, path, read_args)
             fileobj = self._get_read_response_stream(response_socket_id)
             return Deallocating(fileobj, self._worker_model, response_socket_id)
         except:
             self._worker_model.deallocate_socket(response_socket_id)
             raise
Ejemplo n.º 9
0
 def stream_thread(final_path):
     with closing(
             tar_gzip_directory(
                 final_path, exclude_names=exclude_names)) as fileobj:
         reply_fn(None, {}, fileobj)
Ejemplo n.º 10
0
def pack_files_for_upload(sources, should_unpack, follow_symlinks,
                          exclude_patterns=None, force_compression=False):
    """
    Create a single flat tarfile containing all the sources.
    Caller is responsible for closing the returned fileobj.

    Note: It may be possible to achieve additional speed gains on certain
    cases if we disable compression when tar-ing directories. But for now,
    force_compression only affects the case of single, uncompressed files.

    :param sources: list of paths to files to pack
    :param should_unpack: will unpack archives iff True
    :param follow_symlinks: will follow symlinks if True else behavior undefined
    :param exclude_patterns: list of glob patterns for files to ignore, or
                             None to include all files
    :param force_compression: True to always use compression
    :return: dict with {
        'fileobj': <file object of archive>,
        'filename': <name of archive file>,
        'filesize': <size of archive in bytes, or None if unknown>,
        'should_unpack': <True iff archive should be unpacked at server>,
        'should_simplify': <True iff directory should be 'simplified' at server>
        }
    """
    exclude_patterns = exclude_patterns or []

    def resolve_source(source):
        # Resolve symlink if desired
        resolved_source = source
        if follow_symlinks:
            resolved_source = os.path.realpath(source)
            if not os.path.exists(resolved_source):
                raise UsageError('Broken symlink')
        elif os.path.islink(source):
            raise UsageError('Not following symlinks.')
        return resolved_source

    sources = map(resolve_source, sources)

    # For efficiency, return single files and directories directly
    if len(sources) == 1:
        source = sources[0]
        filename = os.path.basename(source)
        if os.path.isdir(sources[0]):
            archived = tar_gzip_directory(
                source, follow_symlinks=follow_symlinks,
                exclude_patterns=exclude_patterns)
            return {
                'fileobj': archived,
                'filename': filename + '.tar.gz',
                'filesize': None,
                'should_unpack': True,
                'should_simplify': False,
            }
        elif path_is_archive(source):
            return {
                'fileobj': open(source),
                'filename': filename,
                'filesize': os.path.getsize(source),
                'should_unpack': should_unpack,
                'should_simplify': True,
            }
        elif force_compression:
            return {
                'fileobj': gzip_file(source),
                'filename': filename + '.gz',
                'filesize': None,
                'should_unpack': True,
                'should_simplify': False,
            }
        else:
            return {
                'fileobj': open(source),
                'filename': filename,
                'filesize': os.path.getsize(source),
                'should_unpack': False,
                'should_simplify': False,
            }

    # Build archive file incrementally from all sources
    # TODO: For further optimization, could either uses a temporary named pipe
    # or a wrapper around a TemporaryFile to concurrently write to the tarfile
    # while the REST client reads and sends it to the server. At the moment,
    # we wait for the tarfile to be created until we rewind and pass the file
    # to the client to be sent to the server.
    scratch_dir = tempfile.mkdtemp()
    archive_fileobj = tempfile.SpooledTemporaryFile()
    archive = tarfile.open(name='we', mode='w:gz', fileobj=archive_fileobj)

    def should_exclude(fn):
        basefn = os.path.basename(fn)
        return any(fnmatch(basefn, p) for p in exclude_patterns)

    for source in sources:
        if should_unpack and path_is_archive(source):
            # Unpack archive into scratch space
            dest_basename = strip_archive_ext(os.path.basename(source))
            dest_path = os.path.join(scratch_dir, dest_basename)
            unpack(get_archive_ext(source), source, dest_path)

            # Add file or directory to archive
            archive.add(dest_path, arcname=dest_basename, recursive=True)
        else:
            # Add file to archive, or add files recursively if directory
            archive.add(source, arcname=os.path.basename(source),
                        recursive=True, exclude=should_exclude)

    # Clean up, rewind archive file, and return it
    archive.close()
    shutil.rmtree(scratch_dir)
    filesize = archive_fileobj.tell()
    archive_fileobj.seek(0)
    return {
        'fileobj': archive_fileobj,
        'filename': 'contents.tar.gz',
        'filesize': filesize,
        'should_unpack': True,
        'should_simplify': False,
    }
Ejemplo n.º 11
0
def pack_files_for_upload(
    sources, should_unpack, follow_symlinks, exclude_patterns=None, force_compression=False
):
    """
    Create a single flat tarfile containing all the sources.
    Caller is responsible for closing the returned fileobj.

    Note: It may be possible to achieve additional speed gains on certain
    cases if we disable compression when tar-ing directories. But for now,
    force_compression only affects the case of single, uncompressed files.

    :param sources: list of paths to files to pack
    :param should_unpack: will unpack archives iff True
    :param follow_symlinks: will follow symlinks if True else behavior undefined
    :param exclude_patterns: list of glob patterns for files to ignore, or
                             None to include all files
    :param force_compression: True to always use compression
    :return: dict with {
        'fileobj': <file object of archive>,
        'filename': <name of archive file>,
        'filesize': <size of archive in bytes, or None if unknown>,
        'should_unpack': <True iff archive should be unpacked at server>,
        'should_simplify': <True iff directory should be 'simplified' at server>
        }
    """
    exclude_patterns = exclude_patterns or []

    def resolve_source(source):
        # Resolve symlink if desired
        resolved_source = source
        if follow_symlinks:
            resolved_source = os.path.realpath(source)
            if not os.path.exists(resolved_source):
                raise UsageError('Broken symlink')
        elif os.path.islink(source):
            raise UsageError('Not following symlinks.')
        return resolved_source

    sources = map(resolve_source, sources)

    # For efficiency, return single files and directories directly
    if len(sources) == 1:
        source = sources[0]
        filename = os.path.basename(source)
        if os.path.isdir(sources[0]):
            archived = tar_gzip_directory(
                source, follow_symlinks=follow_symlinks, exclude_patterns=exclude_patterns
            )
            return {
                'fileobj': archived,
                'filename': filename + '.tar.gz',
                'filesize': None,
                'should_unpack': True,
                'should_simplify': False,
            }
        elif path_is_archive(source):
            return {
                'fileobj': open(source, mode='rb'),
                'filename': filename,
                'filesize': os.path.getsize(source),
                'should_unpack': should_unpack,
                'should_simplify': True,
            }
        elif force_compression:
            return {
                'fileobj': gzip_file(source),
                'filename': filename + '.gz',
                'filesize': None,
                'should_unpack': True,
                'should_simplify': False,
            }
        else:
            return {
                'fileobj': open(source, mode='rb'),
                'filename': filename,
                'filesize': os.path.getsize(source),
                'should_unpack': False,
                'should_simplify': False,
            }

    # Build archive file incrementally from all sources
    # TODO: For further optimization, could either uses a temporary named pipe
    # or a wrapper around a TemporaryFile to concurrently write to the tarfile
    # while the REST client reads and sends it to the server. At the moment,
    # we wait for the tarfile to be created until we rewind and pass the file
    # to the client to be sent to the server.
    scratch_dir = tempfile.mkdtemp()
    archive_fileobj = tempfile.SpooledTemporaryFile()
    archive = tarfile.open(name='we', mode='w:gz', fileobj=archive_fileobj)

    def should_exclude(fn):
        basefn = os.path.basename(fn)
        return any(fnmatch(basefn, p) for p in exclude_patterns)

    for source in sources:
        if should_unpack and path_is_archive(source):
            # Unpack archive into scratch space
            dest_basename = strip_archive_ext(os.path.basename(source))
            dest_path = os.path.join(scratch_dir, dest_basename)
            unpack(get_archive_ext(source), source, dest_path)

            # Add file or directory to archive
            archive.add(dest_path, arcname=dest_basename, recursive=True)
        else:
            # Add file to archive, or add files recursively if directory
            archive.add(
                source, arcname=os.path.basename(source), recursive=True, exclude=should_exclude
            )

    # Clean up, rewind archive file, and return it
    archive.close()
    shutil.rmtree(scratch_dir)
    filesize = archive_fileobj.tell()
    archive_fileobj.seek(0)
    return {
        'fileobj': archive_fileobj,
        'filename': 'contents.tar.gz',
        'filesize': filesize,
        'should_unpack': True,
        'should_simplify': False,
    }