コード例 #1
0
    def stream(self, fileobj, callback=None, sleeptime=5):
        """
        Stream the bundle to the fileobj.

        This method is a blocking I/O operation.
        The ``fileobj`` should be an open file like object with 'wb' options.
        An asynchronous callback method MAY be provided via the optional ``callback``
        keyword argument. Periodically, the callback method is provided with the current
        percentage of completion.
        """
        notifythread = None
        if callable(callback):
            self._save_total_size()
            notifythread = self._setup_notify_thread(callback, sleeptime)

        tarfile = taropen(None, 'w|', fileobj)
        for file_data in self.file_data:
            tarinfo, fileobj = self._tarinfo_from_file_data(file_data)
            tarfile.addfile(tarinfo, fileobj)
            self.md_obj.append(
                self._build_file_info(file_data, fileobj.hashdigest()))
        md_txt = bytes(metadata_encode(self.md_obj), 'utf8')
        md_fd = StringIO(md_txt)
        md_tinfo = TarInfo('metadata.txt')
        md_tinfo.size = len(md_txt)
        tarfile.addfile(md_tinfo, md_fd)
        tarfile.close()
        self._complete = True

        if callable(callback):
            notifythread.join()
コード例 #2
0
ファイル: engine.py プロジェクト: yanyu510/splitgraph
def copy_to_container(container: "Container", source_path: str,
                      target_path: str) -> None:
    """
    Copy a file into a Docker container

    :param container: Container object
    :param source_path: Source file path
    :param target_path: Target file path (in the container)
    :return:
    """
    # https://github.com/docker/docker-py/issues/1771
    with open(source_path, "rb") as f:
        data = f.read()

    tarinfo = TarInfo(name=os.path.basename(target_path))
    tarinfo.size = len(data)
    tarinfo.mtime = int(time.time())

    stream = BytesIO()
    tar = TarFile(fileobj=stream, mode="w")
    tar.addfile(tarinfo, BytesIO(data))
    tar.close()

    stream.seek(0)
    container.put_archive(path=os.path.dirname(target_path),
                          data=stream.read())
コード例 #3
0
    def _dump_files(self, tar):
        """
        Dump all uploaded media to the archive.
        """

        # Loop through all models and find FileFields
        for model in apps.get_models():

            # Get the name of all file fields in the model
            field_names = []
            for field in model._meta.fields:
                if isinstance(field, models.FileField):
                    field_names.append(field.name)

            # If any were found, loop through each row
            if len(field_names):
                for row in model.objects.all():
                    for field_name in field_names:
                        field = getattr(row, field_name)
                        if field:
                            field.open()
                            info = TarInfo(field.name)
                            info.size = field.size
                            tar.addfile(info, field)
                            field.close()
コード例 #4
0
ファイル: files.py プロジェクト: simon987/hexlib
def add_buf_to_tar(tar: TarFile, filename: str, buf: BytesIO):
    buf.flush()
    buf.seek(0)

    info = TarInfo(name=filename)
    info.size = len(buf.getvalue())
    tar.addfile(info, buf)
コード例 #5
0
def sanitize_tarinfo(tarinfo: tarfile.TarInfo):
    path = Path(tarinfo.name)

    # Ban absolute paths
    if path.is_absolute():
        return False

    # Ban paths containing .. that would go outside
    try:
        base = Path("fake_path")
        base.joinpath(path).relative_to(base)
    except ValueError as e:
        logger.warning(f"Unsafe path {path}")
        return False

    # Ban links that would point somewhere outside
    if tarinfo.islnk() or tarinfo.issym():
        try:
            base = Path("fake_path")
            link = path.parent / tarinfo.linkname
            base.joinpath(link).relative_to(base)
        except ValueError as e:
            logger.warning(f"Unsafe path {path}")
            return False

    return True
コード例 #6
0
    def test_09__verify_version(self, mock_isfile, mock_fumktmp, mock_osbase,
                                mock_fugetdata, mock_furm, mock_versionok):
        """Test09 UdockerTools()._verify_version()."""
        tball = "/home/udocker.tar"
        mock_isfile.return_value = False
        utools = UdockerTools(self.local)
        status = utools._verify_version(tball)
        self.assertTrue(mock_isfile.called)
        self.assertEqual(status, (False, ""))

        tball = "/home/udocker.tar"
        mock_isfile.return_value = True
        mock_fumktmp.return_value = ""
        utools = UdockerTools(self.local)
        status = utools._verify_version(tball)
        self.assertTrue(mock_isfile.called)
        self.assertTrue(mock_fumktmp.called)
        self.assertEqual(status, (False, ""))

        tball = "/home/udocker.tar"
        tinfo1 = TarInfo("udocker_dir/lib/VERSION")
        tinfo2 = TarInfo("a")
        mock_isfile.return_value = True
        mock_fumktmp.return_value = "/home/tmp"
        mock_osbase.return_value = "VERSION"
        mock_fugetdata.return_value = "1.2.7"
        mock_furm.return_value = None
        mock_versionok.return_value = True
        with patch.object(tarfile, 'open', autospec=True) as open_mock:
            open_mock.return_value.getmembers.return_value = [tinfo2, tinfo1]
            open_mock.return_value.extract.return_value = None
            utools = UdockerTools(self.local)
            status = utools._verify_version(tball)
            self.assertEqual(status, (True, "1.2.7"))
            self.assertTrue(mock_furm.called)
コード例 #7
0
 def add(self, filename, size, fileobj):
     """
     Add the provided file to the archive
     """
     tarinfo = TarInfo(filename)
     tarinfo.size = size
     self._tarfile.addfile(tarinfo, fileobj)
コード例 #8
0
    def upload(self):
        logger.info("Loading artifacts")

        t = ts()

        tar_data = io.BytesIO()

        with tarfile.open(fileobj=tar_data, mode="w|") as tar:
            for root, _, files in os.walk(self._artifact_directory):
                for af in files:
                    full_path = os.path.join(root, af)

                    relpath = os.path.relpath(full_path, self._artifact_directory)
                    ti = TarInfo(relpath)

                    stat = os.stat(full_path)
                    ti.size = stat.st_size
                    ti.mode = stat.st_mode

                    with open(full_path, "rb") as f:
                        tar.addfile(ti, f)

        res = self._container.put_archive(config.build_dir, tar_data.getvalue())
        if not res:
            raise Exception(f"Error loading artifact: {af}")

        t = ts() - t

        logger.info("Artifacts loaded in %.3fs", t)
コード例 #9
0
    def stream_regular_file(self, filepath, tarinfo_buf, file_info):
        try:
            file_system = files.get_fs_by_file_path(filepath)
            with file_system.open(filepath, 'rb') as fd:
                f_iter = iter(lambda: fd.read(self.CHUNKSIZE), '')  # pylint: disable=cell-var-from-loop
                try:
                    yield tarinfo_buf
                    chunk = ''
                    for chunk in f_iter:
                        yield chunk
                    if len(chunk) % self.BLOCKSIZE != 0:
                        yield (self.BLOCKSIZE -
                               (len(chunk) % self.BLOCKSIZE)) * b'\0'

                except (IOError, fs.errors.OperationFailed):
                    msg = (
                        "Error happened during sending file content in archive stream, file path: %s, "
                        "container: %s/%s, archive path: %s" % file_info)
                    self.log.critical(msg)
                    self.abort(500, msg)
        except (fs.errors.ResourceNotFound, fs.errors.OperationFailed,
                IOError):
            self.log.critical(
                "Couldn't find the file during creating archive stream: %s, "
                "container: %s/%s, archive path: %s" % file_info)
            tarinfo = TarInfo()
            tarinfo.name = file_info[3] + '.MISSING'
            yield tarinfo.tobuf()
コード例 #10
0
ファイル: test_xtarfile.py プロジェクト: pombredanne/xtarfile
    def _test_roundtrip(self, context):
        path = context.given_file()
        content = b'test content'
        filename = 'archived-file.txt'

        with xtarfile_open(path, context.mode('w')) as archive:
            buffer = BytesIO()
            buffer.write(content)
            buffer.seek(0)

            tarinfo = TarInfo()
            tarinfo.size = len(content)
            tarinfo.name = filename

            archive.addfile(tarinfo, buffer)

        with xtarfile_open(path, context.mode('r')) as archive:
            while True:
                member = archive.next()
                if member is None:
                    self.fail('{} not found in archive'.format(filename))
                if member.name == filename:
                    buffer = archive.extractfile(member)
                    actual_content = buffer.read()
                    break

        self.assertEqual(actual_content, content)
コード例 #11
0
ファイル: tar2xml.py プロジェクト: plison/OpenSubtitles2015
def _addToArchive(output, filename, archive):
    output.seek(0)
    xmlInfo = TarInfo(filename)
    xmlInfo.size = len(output.getbuffer())
    xmlInfo.mtime = time.time()
    archive.addfile(xmlInfo,output)    
    output.close()
コード例 #12
0
ファイル: __init__.py プロジェクト: nilp0inter/oniontree
    def put(self, content, filename="", file_hash=""):
        """ Store file information in hashed tree """ 

        if not filename and not file_hash:
            raise ValueError('Filename or FileHash is mandatory')

        if filename:
            # File accesibility
            if not os.path.exists(filename):
                raise IOError('Unaccesible file %s', filename)

            # Calc hash
            file_hash = self._get_hash(filename)
        
        if not file_hash:
            raise ValueError('Hash of file is mandatory')

        # Get file path for hash
        path, tarfile, hashed_filename = self._get_path(file_hash)

        # Create file path
        try:
            os.makedirs(path)
        except WindowsError: 
            pass
        except OSError:
            pass

        # Open tarfile
        if self.external_compressor:
            # External compressor is not suited for adding files.
            raise ValueError('You cannot use external compressor for write files')

        with TarFile.open(name=os.path.join(path, tarfile), mode='a') as tar:
            with FileLock(os.path.join(path, tarfile)) as lock:
                # Test if file already exists into tarfile
                try:
                    tar.getmember(hashed_filename)
                    raise ValueError('Member already exists')
                except KeyError:
                    pass
                except:
                    raise
            
                data = self.encoder.encode(content)
                if self.internal_compressor:
                    data = self.internal_compressor.compress(data)

                data_file = StringIO(data)

                mtime = time.time() 
                ti = TarInfo(hashed_filename)
                ti.size = data_file.len
                ti.mtime = mtime 

                tar.addfile(tarinfo=ti, fileobj=data_file)

                tar.close()

        return file_hash
コード例 #13
0
ファイル: context.py プロジェクト: bendavis78/zope
    def writeDataFile(self, filename, text, content_type, subdir=None):
        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join((subdir, filename))

        parents = filename.split('/')[:-1]
        while parents:
            path = '/'.join(parents) + '/'
            if path not in self._archive.getnames():
                info = TarInfo(path)
                info.type = DIRTYPE
                # tarfile.filemode(0755) == '-rwxr-xr-x'
                info.mode = 0755
                info.mtime = time.time()
                self._archive.addfile(info)
            parents.pop()

        info = TarInfo(filename)
        if isinstance(text, str):
            stream = StringIO(text)
            info.size = len(text)
        elif isinstance(text, unicode):
            raise ValueError("Unicode text is not supported, even if it only "
                             "contains ascii. Please encode your data")
        else:
            # Assume text is a an instance of a class like
            # Products.Archetypes.WebDAVSupport.PdataStreamIterator,
            # as in the case of ATFile
            stream = text.file
            info.size = text.size
        info.mtime = time.time()
        self._archive.addfile(info, stream)
コード例 #14
0
    def writeDataFile(self, filename, text, content_type, subdir=None):
        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join((subdir, filename))

        parents = filename.split('/')[:-1]
        while parents:
            path = '/'.join(parents) + '/'
            if path not in self._archive.getnames():
                info = TarInfo(path)
                info.type = DIRTYPE
                # tarfile.filemode(0o755) == '-rwxr-xr-x'
                info.mode = 0o755
                info.mtime = time.time()
                self._archive.addfile(info)
            parents.pop()

        info = TarInfo(filename)
        if isinstance(text, six.text_type):
            encoding = self.getEncoding() or 'utf-8'
            text = text.encode(encoding)

        if isinstance(text, six.binary_type):
            stream = BytesIO(text)
            info.size = len(text)
        else:
            # Assume text is a an instance of a class like
            # Products.Archetypes.WebDAVSupport.PdataStreamIterator,
            # as in the case of ATFile
            stream = text.file
            info.size = text.size
        info.mtime = time.time()
        self._archive.addfile(info, stream)
コード例 #15
0
    def test_10__install(self, mock_isfile, mock_futil, mock_osbase):
        """Test10 UdockerTools()._install()."""
        tfile = ""
        mock_isfile.return_value = False
        utools = UdockerTools(self.local)
        status = utools._install(tfile)
        self.assertFalse(status)

        tinfo1 = TarInfo("udocker_dir/bin/ls")
        tinfo2 = TarInfo("udocker_dir/lib/lib1")
        tfile = "udocker.tar"
        mock_isfile.return_value = True
        mock_futil.return_value.chmod.return_value = None
        mock_futil.return_value.rchmod.side_effect = [
            None, None, None, None, None, None
        ]
        mock_osbase.side_effect = ["ls", "ls", "lib1", "lib1", "doc", "doc1"]
        self.local.create_repo.return_value = None
        with patch.object(tarfile, 'open', autospec=True) as open_mock:
            open_mock.return_value.getmembers.side_effect = [[tinfo1, tinfo2],
                                                             [tinfo1, tinfo2],
                                                             [tinfo1, tinfo2]]
            open_mock.return_value.extract.side_effect = [None, None]
            utools = UdockerTools(self.local)
            status = utools._install(tfile)
            self.assertTrue(status)
            self.assertTrue(mock_futil.called)
            self.assertTrue(mock_futil.return_value.rchmod.call_count, 4)
コード例 #16
0
def GetTarInfo(filename, filetype=DIRTYPE, mode=0755):
    """Create information for tar files"""
    tarinfo = TarInfo(path.basename(filename))
    tarinfo.type = filetype
    tarinfo.mode = mode
    tarinfo.mtime = time()
    return tarinfo
コード例 #17
0
    def stream(self, fileobj, callback=None, sleeptime=5):
        """
        Stream the bundle to the fileobj.

        The fileobj should be an open file like object with 'wb' options.
        If the callback is given then percent complete of the size
        of the bundle will be given to the callback as the first argument.
        """
        notifythread = None
        if callable(callback):
            self._save_total_size()
            notifythread = self._setup_notify_thread(callback, sleeptime)

        tarfile = taropen(None, 'w|', fileobj)
        for file_data in self.file_data:
            tarinfo, fileobj = self._tarinfo_from_file_data(file_data)
            tarfile.addfile(tarinfo, fileobj)
            self.md_obj.append(
                self._build_file_info(file_data, fileobj.hashdigest()))
        md_txt = metadata_encode(self.md_obj)
        md_txt = md_txt if PY2 else bytes(md_txt, 'UTF-8')
        md_fd = StringIO(md_txt)
        md_tinfo = TarInfo('metadata.txt')
        md_tinfo.size = len(md_txt)
        tarfile.addfile(md_tinfo, md_fd)
        tarfile.close()
        self._complete = True

        if callable(callback):
            notifythread.join()
コード例 #18
0
 def test_basic(self):
     # writing
     db = TarDB(dirname).open(mode='w')
     data_foo = '123'
     data_bar = 'ABCDEF'
     db.add_record(TarInfo('foo'), data_foo)
     db.add_record(TarInfo('bar'), data_bar)
     db.close()
     #
     files = os.listdir(dirname)
     self.assertEqual(len(files), 3)
     self.assertTrue('catalog' in files)
     self.assertTrue('lock' in files)
     self.assertTrue('db00000.tar' in files)
     # reading
     db = TarDB(dirname).open()
     (info1, data1) = db.get_record(0)
     self.assertEqual(data1, data_foo)
     self.assertEqual(len(data1), info1.size)
     (info2, data2) = db.get_record(1)
     self.assertEqual(data2, data_bar)
     self.assertEqual(len(data2), info2.size)
     # iter
     infos = list(db)
     self.assertEqual(len(infos), 2)
     self.assertEqual(infos[0].name, info1.name)
     self.assertEqual(infos[1].name, info2.name)
     db.close()
     return
コード例 #19
0
    def test_rawtar(self):
        """Create a normal tar archive and restore it"""
        raw = BytesIO()
        tarfile = TarFile(mode='w', fileobj=raw)

        testdata = rand_str(20) * 5000

        inf = TarInfo("simpletar")
        fileraw = BytesIO()
        fileraw.write(testdata)
        inf.size = len(testdata)
        fileraw.seek(0)

        tarfile.addfile(inf, fileobj=fileraw)
        tarfile.close()

        raw.seek(0)
        data = raw.read()

        cnt = rand_str(20)
        ret = requests.put(self.make_uri("restore", container=cnt), data=data)

        self.assertEqual(ret.status_code, 201)
        meta, stream = self.conn.object_fetch(self.account, cnt, "simpletar")
        self.assertEqual(
            md5("".join(stream)).hexdigest(),
            md5(testdata).hexdigest())
コード例 #20
0
    def move_certs(self, paths):
        self.log.info("Staging internal ssl certs for %s", self._log_name)
        yield self.pull_image(self.move_certs_image)
        # create the volume
        volume_name = self.format_volume_name(self.certs_volume_name, self)
        # create volume passes even if it already exists
        self.log.info("Creating ssl volume %s for %s", volume_name, self._log_name)
        yield self.docker('create_volume', volume_name)

        # create a tar archive of the internal cert files
        # docker.put_archive takes a tarfile and a running container
        # and unpacks the archive into the container
        nb_paths = {}
        tar_buf = BytesIO()
        archive = TarFile(fileobj=tar_buf, mode='w')
        for key, hub_path in paths.items():
            fname = os.path.basename(hub_path)
            nb_paths[key] = '/certs/' + fname
            with open(hub_path, 'rb') as f:
                content = f.read()
            tarinfo = TarInfo(name=fname)
            tarinfo.size = len(content)
            tarinfo.mtime = os.stat(hub_path).st_mtime
            tarinfo.mode = 0o644
            archive.addfile(tarinfo, BytesIO(content))
        archive.close()
        tar_buf.seek(0)

        # run a container to stage the certs,
        # mounting the volume at /certs/
        host_config = self.client.create_host_config(
            binds={
                volume_name: {"bind": "/certs", "mode": "rw"},
            },
        )
        container = yield self.docker('create_container',
            self.move_certs_image,
            volumes=["/certs"],
            host_config=host_config,
        )

        container_id = container['Id']
        self.log.debug(
            "Container %s is creating ssl certs for %s",
            container_id[:12], self._log_name,
        )
        # start the container
        yield self.docker('start', container_id)
        # stage the archive to the container
        try:
            yield self.docker(
                'put_archive',
                container=container_id,
                path='/certs',
                data=tar_buf,
            )
        finally:
            yield self.docker('remove_container', container_id)
        return nb_paths
コード例 #21
0
    def move_certs(self, paths):
        self.log.info("Staging internal ssl certs for %s", self._log_name)
        yield self.pull_image(self.move_certs_image)
        # create the volume
        volume_name = self.format_volume_name(self.certs_volume_name, self)
        # create volume passes even if it already exists
        self.log.info("Creating ssl volume %s for %s", volume_name, self._log_name)
        yield self.docker('create_volume', volume_name)

        # create a tar archive of the internal cert files
        # docker.put_archive takes a tarfile and a running container
        # and unpacks the archive into the container
        nb_paths = {}
        tar_buf = BytesIO()
        archive = TarFile(fileobj=tar_buf, mode='w')
        for key, hub_path in paths.items():
            fname = os.path.basename(hub_path)
            nb_paths[key] = '/certs/' + fname
            with open(hub_path, 'rb') as f:
                content = f.read()
            tarinfo = TarInfo(name=fname)
            tarinfo.size = len(content)
            tarinfo.mtime = os.stat(hub_path).st_mtime
            tarinfo.mode = 0o644
            archive.addfile(tarinfo, BytesIO(content))
        archive.close()
        tar_buf.seek(0)

        # run a container to stage the certs,
        # mounting the volume at /certs/
        host_config = self.client.create_host_config(
            binds={
                volume_name: {"bind": "/certs", "mode": "rw"},
            },
        )
        container = yield self.docker('create_container',
            self.move_certs_image,
            volumes=["/certs"],
            host_config=host_config,
        )

        container_id = container['Id']
        self.log.debug(
            "Container %s is creating ssl certs for %s",
            container_id[:12], self._log_name,
        )
        # start the container
        yield self.docker('start', container_id)
        # stage the archive to the container
        try:
            yield self.docker(
                'put_archive',
                container=container_id,
                path='/certs',
                data=tar_buf,
            )
        finally:
            yield self.docker('remove_container', container_id)
        return nb_paths
コード例 #22
0
ファイル: test_revision.py プロジェクト: thepwagner/flotilla
 def generate_tar(entries):
     tar_buf = BytesIO()
     tar_file = TarFile(mode="w", fileobj=tar_buf)
     for path, contents in entries.items():
         tar_info = TarInfo(name=path)
         tar_info.size = len(contents)
         tar_file.addfile(tar_info, fileobj=BytesIO(contents))
     return BytesIO(tar_buf.getvalue())
コード例 #23
0
ファイル: test_revision.py プロジェクト: thepwagner/flotilla
 def generate_tar(entries):
     tar_buf = BytesIO()
     tar_file = TarFile(mode='w', fileobj=tar_buf)
     for path, contents in entries.items():
         tar_info = TarInfo(name=path)
         tar_info.size = len(contents)
         tar_file.addfile(tar_info, fileobj=BytesIO(contents))
     return BytesIO(tar_buf.getvalue())
コード例 #24
0
ファイル: mksfx.py プロジェクト: nxt3-org/nxt3
 def file_filter(info: tarfile.TarInfo):
     info.mode = 0o00777 if executable else 0o00666
     info.mtime = 0
     info.type = tarfile.REGTYPE
     info.uid = info.gid = 0
     info.uname = info.gname = "root"
     info.pax_headers = {}
     return info
コード例 #25
0
ファイル: onos_stage.py プロジェクト: K-OpenNet/ONOS-SSM
def addFile(tar, dest, file, file_size):
    if dest not in written_files:
        info = TarInfo(dest)
        info.size = file_size
        info.mtime = now
        info.mode = 0777
        tar.addfile(info, fileobj=file)
        written_files.add(dest)
コード例 #26
0
def addFile(tar, dest, file, file_size):
    if dest not in written_files:
        info = TarInfo(dest)
        info.size = file_size
        info.mtime = now
        info.mode = 0o777
        tar.addfile(info, fileobj=file)
        written_files.add(dest)
コード例 #27
0
def compactar(idfiltro):
    m_filtro = Filtro.objects.get(pk=idfiltro)

    if m_filtro.situacao in SITUACOES_EXECUTORES:
        return

    m_filtro.situacao = "6"
    m_filtro.percentual_atual = 0
    m_filtro.save()

    slug_classificador = slugify(m_filtro.nome)

    documentos = m_filtro.documento_set.all()
    qtd_documentos = documentos.count()

    # cria o streamfile em disco
    nometar = "%s.tar.bz2" % slug_classificador

    numeros_documentos = defaultdict(int)

    with BytesIO() as arquivotar:
        tarfile = TarFile(name=nometar, mode="w", fileobj=arquivotar)

        for contador, documento in enumerate(documentos):
            numero = documento.numero
            numeros_documentos[numero] += 1
            ordem = numeros_documentos[numero]

            with BytesIO() as conteudo_documento:
                conteudo_documento.write(
                    documento.conteudo.encode("latin1", "ignore"))
                conteudo_documento.seek(0)

                if documento.classe_filtro:
                    classe = slugify(documento.classe_filtro.nome)
                else:
                    classe = slugify("Não Identificado")

                if documento.tipo_movimento:
                    tipo = slugify(documento.tipo_movimento.nome)
                else:
                    tipo = "documento"

                tarinfo = TarInfo(name="%s/%s-%s-%s.txt" %
                                  (classe, tipo, numero, ordem))
                tarinfo.size = len(conteudo_documento.getvalue())
                conteudo_documento.seek(0)

                tarfile.addfile(fileobj=conteudo_documento, tarinfo=tarinfo)

            m_filtro.percentual_atual = contador / qtd_documentos * 100
            logger.info("Percentual %s" % m_filtro.percentual_atual)
            m_filtro.save()

        arquivotar.seek(0)
        m_filtro.saida.save(nometar, File(arquivotar))
        m_filtro.situacao = "7"
        m_filtro.save()
コード例 #28
0
def get_tarinfo(name, buffer: typing.Union[BytesIO, StringIO]):
    tar_info = TarInfo(name=name)
    buffer.seek(0)
    if isinstance(buffer, BytesIO):
        tar_info.size = len(buffer.getbuffer())
    else:
        tar_info.size = len(buffer.getvalue())
    tar_info.mtime = datetime.now().timestamp()
    return tar_info
コード例 #29
0
 def _dump_meta(self, tar):
     """
     Dump metadata to the archive.
     """
     data = MixedIO()
     dump({'version': __version__}, data)
     info = TarInfo('meta.json')
     info.size = data.rewind()
     tar.addfile(info, data)
コード例 #30
0
 def _given_resource(self, resource_id: str, name: str, lines: bytes):
     client = self._storage._file_storage._client
     mode = f'w:{Path(resource_id).suffix[1:]}'
     with removing(create_tempfilename(resource_id)) as buffer_path:
         with tarfile_open(buffer_path, mode) as archive:
             tarinfo = TarInfo(name)
             tarinfo.size = len(lines)
             archive.addfile(tarinfo, BytesIO(lines))
         client.upload_object(buffer_path, resource_id)
コード例 #31
0
ファイル: files.py プロジェクト: simon987/hexlib
def add_bytes_to_tar(tar: TarFile, filename: str, data: bytes):
    buf = BytesIO()
    buf.write(data)
    buf.flush()
    buf.seek(0)

    info = TarInfo(name=filename)
    info.size = len(data)
    tar.addfile(info, buf)
コード例 #32
0
 def _given_resource(self, resource_id: str, name: str, lines: bytes):
     client = self._storage._file_storage._client
     buffer = BytesIO()
     with tarfile_open(mode='w:gz', fileobj=buffer) as archive:
         tarinfo = TarInfo(name)
         tarinfo.size = len(lines)
         archive.addfile(tarinfo, BytesIO(lines))
     buffer.seek(0)
     client.upload_object_via_stream(buffer, resource_id)
コード例 #33
0
    def create_dir(self, path):
        """Create a directory within the tarfile.

        :param path: the path to put the directory at.
        """
        tarinfo = TarInfo(name=path)
        tarinfo.type = DIRTYPE
        tarinfo.mode = 0755
        self._set_defaults(tarinfo)
        self.addfile(tarinfo)
コード例 #34
0
    def create_dir(self, path):
        """Create a directory within the tarfile.

        :param path: the path to put the directory at.
        """
        tarinfo = TarInfo(name=path)
        tarinfo.type = DIRTYPE
        tarinfo.mode = 0755
        self._set_defaults(tarinfo)
        self.addfile(tarinfo)
コード例 #35
0
ファイル: patches.py プロジェクト: kroman0/products
def writeDataFile( self, filename, text, content_type, subdir=None ):

    """ See IExportContext.
    """
    mod_time = time.time()
    if subdir is not None:
        elements = subdir.split('/')
        parents = filter(None, elements)
        while parents:
            dirname = os.path.join(*parents)
            try:
                self._archive.getmember(dirname+'/')
            except KeyError:
                info = TarInfo(dirname)
                info.size = 0
                info.mode = 509
                info.mtime = mod_time
                info.type = DIRTYPE
                self._archive.addfile(info, StringIO())
            parents = parents[:-1]

        filename = '/'.join( ( subdir, filename ) )

    stream = StringIO( text )
    info = TarInfo( filename )
    info.size = len( text )
    info.mode = 436
    info.mtime = mod_time
    self._archive.addfile( info, stream )
コード例 #36
0
    def write(self, file_name):
        if not self.data or not os.path.isdir(self.data):
            raise Exception('Must set data before building')

        gzfile = GzipFile(file_name, 'w')
        tar = TarFile(fileobj=gzfile, mode='w')

        buff = BytesIO(json.dumps(self.control).encode())
        info = TarInfo(name='./CONTROL')
        info.size = buff.getbuffer().nbytes
        tar.addfile(tarinfo=info, fileobj=buff)

        if self.init is not None:
            buff = BytesIO(self.init.encode())
            info = TarInfo(name='./INIT')
            info.size = buff.getbuffer().nbytes
            tar.addfile(tarinfo=info, fileobj=buff)

        data = BytesIO()
        datatar = TarFile(fileobj=data, mode='w')
        datatar.add(self.data, '/')
        datatar.close()
        data.seek(0)

        info = TarInfo(name='./DATA')
        info.size = data.getbuffer().nbytes
        tar.addfile(tarinfo=info, fileobj=data)

        tar.close()
        gzfile.close()
コード例 #37
0
ファイル: onos_stage.py プロジェクト: K-OpenNet/ONOS-SSM
def addString(tar, dest, string):
    if dest not in written_files:
        print dest, string
        info = TarInfo(dest)
        info.size = len(string)
        info.mtime = now
        info.mode = 0777
        file = StringIO(string)
        tar.addfile(info, fileobj=file)
        file.close()
        written_files.add(dest)
コード例 #38
0
def addString(tar, dest, string):
    if dest not in written_files:
        print dest, string
        info = TarInfo(dest)
        info.size = len(string)
        info.mtime = now
        info.mode = 0777
        file = StringIO(string)
        tar.addfile(info, fileobj=file)
        file.close()
        written_files.add(dest)
コード例 #39
0
def addBytes(tar, dest, bytes):
    if dest not in written_files:
        # print dest, string
        info = TarInfo(dest)
        info.size = len(bytes)
        info.mtime = now
        info.mode = 0o777
        file = BytesIO(bytes)
        tar.addfile(info, fileobj=file)
        file.close()
        written_files.add(dest)
コード例 #40
0
ファイル: tarcms.py プロジェクト: yasusii/fooling
 def create_article(self, data, info=None):
   if not self._mode: raise TarCMS.TarCMSError('not open: %r' % self)
   if info is None:
     info = TarInfo()
   assert isinstance(info, TarInfo)
   aid = '%08x' % self._artdb.nextrecno()
   info.name = aid+info.name
   tid = self._add_corpus(info, data)
   assert aid == tid
   self._artdb.add_record(tid)
   return aid
コード例 #41
0
ファイル: test_sync.py プロジェクト: OPWEN/opwen-webapp
    def given_download(self, payload: Dict[str, bytes], compression: str):
        resource_id = '{}.tar.{}'.format(uuid4(), compression)
        download_filename = join(self._content_root, resource_id)

        with self.sync._open(download_filename, 'w') as archive:
            for filename, content in payload.items():
                tarinfo = TarInfo(filename)
                tarinfo.size = len(content)
                archive.addfile(tarinfo, BytesIO(content))

        self.email_server_client_mock.download.return_value = resource_id
コード例 #42
0
    def create_file_from_string(self, filename, content):
        """Create a file with the contents passed as a string.

        :param filename: the path to put the file at inside the
            tarfile.
        :param content: the content to put in the created file.
        """
        tarinfo = TarInfo(name=filename)
        tarinfo.size = len(content)
        self._set_defaults(tarinfo)
        fileobj = StringIO(content)
        self.addfile(tarinfo, fileobj=fileobj)
コード例 #43
0
    def writeDataFile( self, filename, text, content_type, subdir=None ):

        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join( ( subdir, filename ) )

        stream = StringIO( text )
        info = TarInfo( filename )
        info.size = len( text )
        info.mtime = time.time()
        self._archive.addfile( info, stream )
コード例 #44
0
ファイル: archive.py プロジェクト: Adnn/django-archive
    def _dump_db(self, tar):
        """
        Dump the rows in each model to the archive.
        """

        # Dump the tables to a MixedIO
        data = MixedIO()
        call_command('dumpdata', all=True, format='json', indent=self.attr.get('ARCHIVE_DB_INDENT'),
                                 exclude=self.attr.get('ARCHIVE_EXCLUDE'), stdout=data)
        info = TarInfo(DB_DUMP)
        info.size = data.rewind()
        tar.addfile(info, data)
コード例 #45
0
 def _addMember(filename, data, modtime):
     from tarfile import DIRTYPE
     parents = filename.split('/')[:-1]
     while parents:
         path = '/'.join(parents) + '/'
         if path not in archive.getnames():
             info = TarInfo(path)
             info.type = DIRTYPE
             info.mtime = modtime
             archive.addfile(info)
         parents.pop()
     _addOneMember(filename, data, modtime)
コード例 #46
0
ファイル: test_context.py プロジェクト: goschtl/zope
 def _addMember(path, data, modtime):
     from tarfile import DIRTYPE
     elements = path.split('/')
     parents = filter(None, [elements[x] for x in range(len(elements))])
     for parent in parents:
         info = TarInfo()
         info.name = parent
         info.size = 0
         info.mtime = mod_time
         info.type = DIRTYPE
         archive.addfile(info, StringIO())
     _addOneMember(path, data, modtime)
コード例 #47
0
ファイル: forge_client.py プロジェクト: 2php/veles
 def write_package():
     tbs = ForgeClient.UPLOAD_TAR_BUFFER_SIZE
     with TarFile.open(mode="w|gz", fileobj=body, bufsize=tbs,
                       dereference=True) as tar:
         for file in files:
             self.debug("Sending %s", file)
             ti = TarInfo(file)
             fp = os.path.join(self.path, file)
             ti.size = os.path.getsize(fp)
             ti.mode = 0o666
             with open(fp, "rb") as fd:
                 tar.addfile(ti, fileobj=fd)
     body.close()
コード例 #48
0
ファイル: context.py プロジェクト: c0ns0le/zenoss-4
    def writeDataFile( self, filename, text, content_type, subdir=None ):

        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join( ( subdir, filename ) )

        parents = filename.split('/')[:-1]
        while parents:
            path = '/'.join(parents) + '/'
            if path not in self._archive.getnames():
                info = TarInfo(path)
                info.type = DIRTYPE
                # tarfile.filemode(0755) == '-rwxr-xr-x'
                info.mode = 0755
                info.mtime = time.time()
                self._archive.addfile(info)
            parents.pop()

        info = TarInfo(filename)
        if isinstance(text, basestring):
            stream = StringIO(text)
            info.size = len(text)
        else:
            # Assume text is a an instance of a class like
            # Products.Archetypes.WebDAVSupport.PdataStreamIterator, 
            # as in the case of ATFile
            stream = text.file
            info.size = text.size
        info.mtime = time.time()
        self._archive.addfile( info, stream )
コード例 #49
0
ファイル: context.py プロジェクト: pigaov10/plone4.3
    def writeDataFile( self, filename, text, content_type, subdir=None ):

        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join( ( subdir, filename ) )

        parents = filename.split('/')[:-1]
        while parents:
            path = '/'.join(parents) + '/'
            if path not in self._archive.getnames():
                info = TarInfo(path)
                info.type = DIRTYPE
                # tarfile.filemode(0755) == '-rwxr-xr-x'
                info.mode = 0755
                info.mtime = time.time()
                self._archive.addfile(info)
            parents.pop()

        info = TarInfo(filename)
        if isinstance(text, str):
            stream = StringIO(text)
            info.size = len(text)
        elif isinstance(text, unicode):
            raise ValueError("Unicode text is not supported, even if it only "
                             "contains ascii. Please encode your data. See "
                             "GS 1.7.0 changes for more")
        else:
            # Assume text is a an instance of a class like
            # Products.Archetypes.WebDAVSupport.PdataStreamIterator, 
            # as in the case of ATFile
            stream = text.file
            info.size = text.size
        info.mtime = time.time()
        self._archive.addfile( info, stream )
コード例 #50
0
ファイル: archive.py プロジェクト: Adnn/django-archive
 def _dump_meta(self, tar):
     """
     Dump metadata to the archive.
     """
     data = MixedIO()
     meta_dict = OrderedDict((
         ('version', __version__),
         ('db_file', DB_DUMP),
         ('media_folder', MEDIA_DIR),
         ('settings', self.attr.settings_dict()),
     ))
     dump(meta_dict, data, indent=2)
     info = TarInfo(META_DUMP)
     info.size = data.rewind()
     tar.addfile(info, data)
コード例 #51
0
ファイル: tardb.py プロジェクト: yasusii/shaling
 def test_change_info(self):
   # writing
   db = TarDB(dirname).open('r+')
   data_foo = '123'
   mtime = 12345
   info = TarInfo('foo')
   info.mtime = mtime
   db.add_record(info, data_foo)
   db.close()
   # reading
   db = TarDB(dirname).open('r+')
   info = db[0]
   self.assertEqual(info.mtime, mtime)
   db[0] = info
   db.close()
   return
コード例 #52
0
ファイル: docker.py プロジェクト: MaddTheSane/qemu
    def run(self, args, argv):
        # Create a temporary tarball with our whole build context and
        # dockerfile for the update
        tmp = tempfile.NamedTemporaryFile(suffix="dckr.tar.gz")
        tmp_tar = TarFile(fileobj=tmp, mode='w')

        # Add the executable to the tarball, using the current
        # configured binfmt_misc path. If we don't get a path then we
        # only need the support libraries copied
        ff, enabled = _check_binfmt_misc(args.executable)

        if not enabled:
            print("binfmt_misc not enabled, update disabled")
            return 1

        if ff:
            tmp_tar.add(args.executable, arcname=ff)

        # Add any associated libraries
        libs = _get_so_libs(args.executable)
        if libs:
            for l in libs:
                tmp_tar.add(os.path.realpath(l), arcname=l)

        # Create a Docker buildfile
        df = StringIO()
        df.write("FROM %s\n" % args.tag)
        df.write("ADD . /\n")
        df.seek(0)

        df_tar = TarInfo(name="Dockerfile")
        df_tar.size = len(df.buf)
        tmp_tar.addfile(df_tar, fileobj=df)

        tmp_tar.close()

        # reset the file pointers
        tmp.flush()
        tmp.seek(0)

        # Run the build with our tarball context
        dkr = Docker()
        dkr.update_image(args.tag, tmp, quiet=args.quiet)

        return 0
コード例 #53
0
ファイル: backup.py プロジェクト: qij3/2buntu-Django-Blog
 def handle(self, *args, **kwargs):
     """
     Process the command.
     """
     tar = TarFile.open(datetime.today().strftime("2buntu-backup-%Y-%m-%d-%H-%M-%S.tar.bz2"), "w:bz2")
     for name in self.DATABASE_MODELS:
         f = MixedIO()
         call_command("dumpdata", name, format="json", stdout=f)
         info = TarInfo("%s.json" % name.split(".")[1])
         info.size = f.rewind()
         tar.addfile(info, f)
     for model in self.IMAGE_MODELS:
         for item in model.objects.all():
             if item.image:
                 info = TarInfo(item.image.name)
                 info.size = item.image.size
                 tar.addfile(info, item.image)
     self.stdout.write("Backup completed.")
コード例 #54
0
ファイル: archive.py プロジェクト: davinirjr/django-archive
    def _dump_db(self, tar):
        """
        Dump the rows in each model to the archive.
        """

        # Determine the list of models to exclude
        exclude = getattr(settings, 'ARCHIVE_EXCLUDE', (
            'auth.Permission',
            'contenttypes.ContentType',
            'sessions.Session',
        ))

        # Dump the tables to a MixedIO
        data = MixedIO()
        call_command('dumpdata', all=True, format='json', exclude=exclude, stdout=data)
        info = TarInfo('data.json')
        info.size = data.rewind()
        tar.addfile(info, data)
コード例 #55
0
ファイル: tardb.py プロジェクト: yasusii/shaling
 def test_tar_compatibility(self):
   # writing
   db = TarDB(dirname).open('r+')
   data_foo = '123'
   mtime = 12345
   info = TarInfo('foo')
   info.mtime = mtime
   db.add_record(info, data_foo)
   db.close()
   # reading with tarfile
   import tarfile
   tar = tarfile.TarFile(os.path.join(dirname, 'db00000.tar'))
   info = tar.next()
   data = tar.extractfile(info).read()
   self.assertEqual(data, data_foo)
   self.assertEqual(len(data), info.size)
   self.assertEqual(info.mtime, mtime)
   tar.close()
   return
コード例 #56
0
ファイル: multitraverser.py プロジェクト: ableeb/WebOOT
    def tarfile(self, format, filename, content_type):
        from .root.histogram import Histogram
        from .combination import Combination
        imgformat = "eps"

        tarred_contents = StringIO()
        with closing(open_tar(mode="w" + format, fileobj=tarred_contents)) as tar:
            for key, context in self.resource_to_render.indexed_contexts:
                if not context_renderable_as(context, imgformat):
                    continue
                name = "/".join(map(str, key))
                content = context.rendered(imgformat).content.body

                info = TarInfo(name=name + "." + imgformat)
                info.size = len(content)
                tar.addfile(tarinfo=info, fileobj=StringIO(content))

        return Response(tarred_contents.getvalue(), content_type=content_type,
                        content_disposition=("Content-Disposition: attachment; filename={0};"
                                             .format(filename)))
コード例 #57
0
ファイル: tarball.py プロジェクト: seblu/installsystems
 def add_str(self, name, content, ftype, mode, mtime=None,
             uid=None, gid=None, uname=None, gname=None):
     '''
     Add a string in memory as a file in tarball
     '''
     if isinstance(name, unicode):
         name = name.encode("UTF-8")
     ti = TarInfo(name)
     # set tarinfo attribute
     for v in ("name", "ftype", "mode", "mtime", "uid", "gid", "uname", "gname"):
         if vars()[v] is not None:
             vars(ti)[v] = vars()[v]
     # set mtime to current if not specified
     if mtime is None:
         ti.mtime = int(time())
     # unicode char is encoded in UTF-8, has changelog must be in UTF-8
     if isinstance(content, unicode):
         content = content.encode("UTF-8")
     ti.size = len(content) if content is not None else 0
     self.addfile(ti, StringIO(content))
コード例 #58
0
ファイル: docker.py プロジェクト: Pating/qemu
    def run(self, args, argv):
        # Create a temporary tarball with our whole build context and
        # dockerfile for the update
        tmp = tempfile.NamedTemporaryFile(suffix="dckr.tar.gz")
        tmp_tar = TarFile(fileobj=tmp, mode='w')

        # Add the executable to the tarball
        bn = os.path.basename(args.executable)
        ff = "/usr/bin/%s" % bn
        tmp_tar.add(args.executable, arcname=ff)

        # Add any associated libraries
        libs = _get_so_libs(args.executable)
        if libs:
            for l in libs:
                tmp_tar.add(os.path.realpath(l), arcname=l)

        # Create a Docker buildfile
        df = StringIO()
        df.write("FROM %s\n" % args.tag)
        df.write("ADD . /\n")
        df.seek(0)

        df_tar = TarInfo(name="Dockerfile")
        df_tar.size = len(df.buf)
        tmp_tar.addfile(df_tar, fileobj=df)

        tmp_tar.close()

        # reset the file pointers
        tmp.flush()
        tmp.seek(0)

        # Run the build with our tarball context
        dkr = Docker()
        dkr.update_image(args.tag, tmp, quiet=args.quiet)

        return 0