コード例 #1
0
    def writeDataFile(self, filename, text, content_type, subdir=None):
        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join((subdir, filename))

        parents = filename.split('/')[:-1]
        while parents:
            path = '/'.join(parents) + '/'
            if path not in self._archive.getnames():
                info = TarInfo(path)
                info.type = DIRTYPE
                # tarfile.filemode(0o755) == '-rwxr-xr-x'
                info.mode = 0o755
                info.mtime = time.time()
                self._archive.addfile(info)
            parents.pop()

        info = TarInfo(filename)
        if isinstance(text, six.text_type):
            encoding = self.getEncoding() or 'utf-8'
            text = text.encode(encoding)

        if isinstance(text, six.binary_type):
            stream = BytesIO(text)
            info.size = len(text)
        else:
            # Assume text is a an instance of a class like
            # Products.Archetypes.WebDAVSupport.PdataStreamIterator,
            # as in the case of ATFile
            stream = text.file
            info.size = text.size
        info.mtime = time.time()
        self._archive.addfile(info, stream)
コード例 #2
0
ファイル: context.py プロジェクト: c0ns0le/zenoss-4
    def writeDataFile( self, filename, text, content_type, subdir=None ):

        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join( ( subdir, filename ) )

        parents = filename.split('/')[:-1]
        while parents:
            path = '/'.join(parents) + '/'
            if path not in self._archive.getnames():
                info = TarInfo(path)
                info.type = DIRTYPE
                # tarfile.filemode(0755) == '-rwxr-xr-x'
                info.mode = 0755
                info.mtime = time.time()
                self._archive.addfile(info)
            parents.pop()

        info = TarInfo(filename)
        if isinstance(text, basestring):
            stream = StringIO(text)
            info.size = len(text)
        else:
            # Assume text is a an instance of a class like
            # Products.Archetypes.WebDAVSupport.PdataStreamIterator, 
            # as in the case of ATFile
            stream = text.file
            info.size = text.size
        info.mtime = time.time()
        self._archive.addfile( info, stream )
コード例 #3
0
ファイル: csvw.py プロジェクト: GSS-Cogs/gss-utils
def run_csvlint(context):
    client = docker.from_env()
    csvlint = client.containers.create(
        'gsscogs/csvlint',
        command=f'csvlint -s /tmp/{context.metadata_filename}')
    archive = BytesIO()
    context.metadata_io.seek(0, SEEK_END)
    metadata_size = context.metadata_io.tell()
    context.metadata_io.seek(0)
    context.csv_io.seek(0, SEEK_END)
    csv_size = context.csv_io.tell()
    context.csv_io.seek(0)
    with TarFile(fileobj=archive, mode='w') as t:
        tis = TarInfo(str(context.metadata_filename))
        tis.size = metadata_size
        tis.mtime = time.time()
        t.addfile(tis, BytesIO(context.metadata_io.read().encode('utf-8')))
        tic = TarInfo(str(context.csv_filename))
        tic.size = csv_size
        tic.mtime = time.time()
        t.addfile(tic, BytesIO(context.csv_io.read().encode('utf-8')))
        if hasattr(context, 'codelists'):
            t.add(Path('features') / 'fixtures' / context.codelists,
                  arcname=context.codelists)
    archive.seek(0)
    csvlint.put_archive('/tmp/', archive)
    csvlint.start()
    response = csvlint.wait()
    sys.stdout.write(csvlint.logs().decode('utf-8'))
    return (response, csvlint.logs().decode('utf-8'))
コード例 #4
0
ファイル: patches.py プロジェクト: kroman0/products
def writeDataFile( self, filename, text, content_type, subdir=None ):

    """ See IExportContext.
    """
    mod_time = time.time()
    if subdir is not None:
        elements = subdir.split('/')
        parents = filter(None, elements)
        while parents:
            dirname = os.path.join(*parents)
            try:
                self._archive.getmember(dirname+'/')
            except KeyError:
                info = TarInfo(dirname)
                info.size = 0
                info.mode = 509
                info.mtime = mod_time
                info.type = DIRTYPE
                self._archive.addfile(info, StringIO())
            parents = parents[:-1]

        filename = '/'.join( ( subdir, filename ) )

    stream = StringIO( text )
    info = TarInfo( filename )
    info.size = len( text )
    info.mode = 436
    info.mtime = mod_time
    self._archive.addfile( info, stream )
コード例 #5
0
def run_ics(group: str, turtle: bytes, extra_files: List[str] = (), extra_data: List[str] = ()):
    client = docker.from_env()
    files = ['data.ttl']
    if len(extra_files) > 0:
        files.extend(extra_files)
    tests = client.containers.create(
        'gsscogs/gdp-sparql-tests',
        command=f'''sparql-test-runner -t /usr/local/tests/{group} -p dsgraph='<urn:x-arq:DefaultGraph>' '''
                f'''{" ".join('/tmp/' + f for f in files)}'''
    )
    archive = BytesIO()
    with TarFile(fileobj=archive, mode='w') as t:
        ttl = TarInfo('data.ttl')
        ttl.size = len(turtle)
        ttl.mtime = time.time()
        t.addfile(ttl, BytesIO(turtle))
        for filename in extra_files:
            actual_path = Path('features') / 'fixtures' / 'extra' / filename
            with actual_path.open('rb') as actual_file:
                extra_file = t.gettarinfo(arcname=filename, fileobj=actual_file)
                t.addfile(extra_file, actual_file)
        for i, add_turtle in enumerate(extra_data):
            filename = f'extra_{i}.ttl'
            add_ttl = TarInfo(filename)
            add_ttl.size = len(add_turtle)
            add_ttl.mtime = time.time()
            t.addfile(add_ttl, BytesIO(add_turtle.encode('utf-8')))
            files.append(filename)
    archive.seek(0)
    tests.put_archive('/tmp/', archive)
    tests.start()
    response = tests.wait()
    sys.stdout.write(tests.logs().decode('utf-8'))
    return response['StatusCode']
コード例 #6
0
ファイル: context.py プロジェクト: pigaov10/plone4.3
    def writeDataFile( self, filename, text, content_type, subdir=None ):

        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join( ( subdir, filename ) )

        parents = filename.split('/')[:-1]
        while parents:
            path = '/'.join(parents) + '/'
            if path not in self._archive.getnames():
                info = TarInfo(path)
                info.type = DIRTYPE
                # tarfile.filemode(0755) == '-rwxr-xr-x'
                info.mode = 0755
                info.mtime = time.time()
                self._archive.addfile(info)
            parents.pop()

        info = TarInfo(filename)
        if isinstance(text, str):
            stream = StringIO(text)
            info.size = len(text)
        elif isinstance(text, unicode):
            raise ValueError("Unicode text is not supported, even if it only "
                             "contains ascii. Please encode your data. See "
                             "GS 1.7.0 changes for more")
        else:
            # Assume text is a an instance of a class like
            # Products.Archetypes.WebDAVSupport.PdataStreamIterator, 
            # as in the case of ATFile
            stream = text.file
            info.size = text.size
        info.mtime = time.time()
        self._archive.addfile( info, stream )
コード例 #7
0
ファイル: context.py プロジェクト: bendavis78/zope
    def writeDataFile(self, filename, text, content_type, subdir=None):
        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join((subdir, filename))

        parents = filename.split('/')[:-1]
        while parents:
            path = '/'.join(parents) + '/'
            if path not in self._archive.getnames():
                info = TarInfo(path)
                info.type = DIRTYPE
                # tarfile.filemode(0755) == '-rwxr-xr-x'
                info.mode = 0755
                info.mtime = time.time()
                self._archive.addfile(info)
            parents.pop()

        info = TarInfo(filename)
        if isinstance(text, str):
            stream = StringIO(text)
            info.size = len(text)
        elif isinstance(text, unicode):
            raise ValueError("Unicode text is not supported, even if it only "
                             "contains ascii. Please encode your data")
        else:
            # Assume text is a an instance of a class like
            # Products.Archetypes.WebDAVSupport.PdataStreamIterator,
            # as in the case of ATFile
            stream = text.file
            info.size = text.size
        info.mtime = time.time()
        self._archive.addfile(info, stream)
コード例 #8
0
def writeDataFile(self, filename, text, content_type, subdir=None):
    """ See IExportContext.
    """
    mod_time = time.time()
    if subdir is not None:
        elements = subdir.split('/')
        parents = filter(None, elements)
        while parents:
            dirname = os.path.join(*parents)
            try:
                self._archive.getmember(dirname + '/')
            except KeyError:
                info = TarInfo(dirname)
                info.size = 0
                info.mode = 509
                info.mtime = mod_time
                info.type = DIRTYPE
                self._archive.addfile(info, StringIO())
            parents = parents[:-1]

        filename = '/'.join((subdir, filename))

    stream = StringIO(text)
    info = TarInfo(filename)
    info.size = len(text)
    info.mode = 436
    info.mtime = mod_time
    self._archive.addfile(info, stream)
コード例 #9
0
def step_impl(context):
    client = docker.from_env()
    csvlint = client.containers.create(
        'gsscogs/csvlint',
        command=f'csvlint -s /tmp/{context.schema_filename}'
    )
    archive = BytesIO()
    context.schema_io.seek(0, SEEK_END)
    schema_size = context.schema_io.tell()
    context.schema_io.seek(0)
    context.csv_io.seek(0, SEEK_END)
    csv_size = context.csv_io.tell()
    context.csv_io.seek(0)
    with TarFile(fileobj=archive, mode='w') as t:
        tis = TarInfo(str(context.schema_filename))
        tis.size = schema_size
        tis.mtime = time.time()
        t.addfile(tis, BytesIO(context.schema_io.getvalue().encode('utf-8')))
        tic = TarInfo(str(context.csv_filename))
        tic.size = csv_size
        tic.mtime = time.time()
        t.addfile(tic, BytesIO(context.csv_io.getvalue().encode('utf-8')))
    archive.seek(0)
    csvlint.put_archive('/tmp/', archive)
    csvlint.start()
    response = csvlint.wait()
    sys.stdout.write(csvlint.logs().decode('utf-8'))
    assert_equal(response['StatusCode'], 0)
コード例 #10
0
 def _add_entry(self, name, type, mode, size, data):
     info = TarInfo(name)
     info.type = type
     info.mode = mode
     info.size = size
     info.mtime = time.time()
     self._tarfile.addfile(info, data)
コード例 #11
0
def GetTarInfo(filename, filetype=DIRTYPE, mode=0755):
    """Create information for tar files"""
    tarinfo = TarInfo(path.basename(filename))
    tarinfo.type = filetype
    tarinfo.mode = mode
    tarinfo.mtime = time()
    return tarinfo
コード例 #12
0
ファイル: tar_utils.py プロジェクト: containers/podman-py
    def add_filter(info: tarfile.TarInfo) -> Optional[tarfile.TarInfo]:
        """Filter files targeted to be added to tarfile.

        Args:
            info: Information on the file targeted to be added

        Returns:
            None: if file is not to be added
            TarInfo: when file is to be added. Modified as needed.

        Notes:
            exclude is captured from parent
        """

        if not (info.isfile() or info.isdir() or info.issym()):
            return None

        if _exclude_matcher(info.name, exclude):
            return None

        # Workaround https://bugs.python.org/issue32713. Fixed in Python 3.7
        if info.mtime < 0 or info.mtime > 8 ** 11 - 1:
            info.mtime = int(info.mtime)

        # do not leak client information to service
        info.uid = 0
        info.uname = info.gname = "root"

        if sys.platform == "win32":
            info.mode = info.mode & 0o755 | 0o111

        return info
コード例 #13
0
ファイル: tar2xml.py プロジェクト: plison/OpenSubtitles2015
def _addToArchive(output, filename, archive):
    output.seek(0)
    xmlInfo = TarInfo(filename)
    xmlInfo.size = len(output.getbuffer())
    xmlInfo.mtime = time.time()
    archive.addfile(xmlInfo,output)    
    output.close()
コード例 #14
0
ファイル: engine.py プロジェクト: yanyu510/splitgraph
def copy_to_container(container: "Container", source_path: str,
                      target_path: str) -> None:
    """
    Copy a file into a Docker container

    :param container: Container object
    :param source_path: Source file path
    :param target_path: Target file path (in the container)
    :return:
    """
    # https://github.com/docker/docker-py/issues/1771
    with open(source_path, "rb") as f:
        data = f.read()

    tarinfo = TarInfo(name=os.path.basename(target_path))
    tarinfo.size = len(data)
    tarinfo.mtime = int(time.time())

    stream = BytesIO()
    tar = TarFile(fileobj=stream, mode="w")
    tar.addfile(tarinfo, BytesIO(data))
    tar.close()

    stream.seek(0)
    container.put_archive(path=os.path.dirname(target_path),
                          data=stream.read())
コード例 #15
0
def zip2tar(zip_file: str, tar_file, tar_mode: Optional[str] = 'w:gz'):
    """

    :param zip_file: zip file path
    :param tar_file:
                      IO(_io.IOBase): file obj
    :param tar_mode:  ref `tarfile.TarFile.open`
    :return:
    """
    zip_file = ZipFile(file=zip_file, mode='r')
    tar_file = TarFile.open(fileobj=tar_file, mode=tar_mode)

    try:
        for zip_info in zip_file.infolist():
            tar_info = TarInfo(name=zip_info.filename)
            tar_info.size = zip_info.file_size
            tar_info.mtime = datetime.now().timestamp()
            # https://stackoverflow.com/a/434689/11722440
            tar_info.mode = zip_info.external_attr >> 16

            # https://stackoverflow.com/a/18432983/11722440
            # https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
            # TODO whg fix other file (like symbolic link) in zip to regular file in tar file
            if zip_info.filename.endswith('/'):
                tar_info.type = tarfile.DIRTYPE
            else:
                tar_info.type = tarfile.REGTYPE

            infile = zip_file.open(zip_info.filename)
            tar_file.addfile(tar_info, infile)
    except Exception as e:
        raise
    finally:
        tar_file.close()
        zip_file.close()
コード例 #16
0
def _clean_info(
    root: Optional[str], epoch: int, info: tarfile.TarInfo
) -> tarfile.TarInfo:
    """
    Remove variable data from an archive entry.

    :param root: absolute path to the root directory from which the
        entry was added, or None to disable turning the name into a
        relative path
    :param epoch: fixed modification time to set
    :param info: tarinfo object to set
    :returns: changed tarinfo
    """
    if root is not None:
        info.name = os.path.relpath("/" + info.name, root)

    if not info.name.startswith("."):
        info.name = "./" + info.name

    info.uid = 0
    info.gid = 0
    info.uname = ""
    info.gname = ""
    info.mtime = epoch

    return info
コード例 #17
0
ファイル: __init__.py プロジェクト: nilp0inter/oniontree
    def put(self, content, filename="", file_hash=""):
        """ Store file information in hashed tree """ 

        if not filename and not file_hash:
            raise ValueError('Filename or FileHash is mandatory')

        if filename:
            # File accesibility
            if not os.path.exists(filename):
                raise IOError('Unaccesible file %s', filename)

            # Calc hash
            file_hash = self._get_hash(filename)
        
        if not file_hash:
            raise ValueError('Hash of file is mandatory')

        # Get file path for hash
        path, tarfile, hashed_filename = self._get_path(file_hash)

        # Create file path
        try:
            os.makedirs(path)
        except WindowsError: 
            pass
        except OSError:
            pass

        # Open tarfile
        if self.external_compressor:
            # External compressor is not suited for adding files.
            raise ValueError('You cannot use external compressor for write files')

        with TarFile.open(name=os.path.join(path, tarfile), mode='a') as tar:
            with FileLock(os.path.join(path, tarfile)) as lock:
                # Test if file already exists into tarfile
                try:
                    tar.getmember(hashed_filename)
                    raise ValueError('Member already exists')
                except KeyError:
                    pass
                except:
                    raise
            
                data = self.encoder.encode(content)
                if self.internal_compressor:
                    data = self.internal_compressor.compress(data)

                data_file = StringIO(data)

                mtime = time.time() 
                ti = TarInfo(hashed_filename)
                ti.size = data_file.len
                ti.mtime = mtime 

                tar.addfile(tarinfo=ti, fileobj=data_file)

                tar.close()

        return file_hash
コード例 #18
0
    def move_certs(self, paths):
        self.log.info("Staging internal ssl certs for %s", self._log_name)
        yield self.pull_image(self.move_certs_image)
        # create the volume
        volume_name = self.format_volume_name(self.certs_volume_name, self)
        # create volume passes even if it already exists
        self.log.info("Creating ssl volume %s for %s", volume_name, self._log_name)
        yield self.docker('create_volume', volume_name)

        # create a tar archive of the internal cert files
        # docker.put_archive takes a tarfile and a running container
        # and unpacks the archive into the container
        nb_paths = {}
        tar_buf = BytesIO()
        archive = TarFile(fileobj=tar_buf, mode='w')
        for key, hub_path in paths.items():
            fname = os.path.basename(hub_path)
            nb_paths[key] = '/certs/' + fname
            with open(hub_path, 'rb') as f:
                content = f.read()
            tarinfo = TarInfo(name=fname)
            tarinfo.size = len(content)
            tarinfo.mtime = os.stat(hub_path).st_mtime
            tarinfo.mode = 0o644
            archive.addfile(tarinfo, BytesIO(content))
        archive.close()
        tar_buf.seek(0)

        # run a container to stage the certs,
        # mounting the volume at /certs/
        host_config = self.client.create_host_config(
            binds={
                volume_name: {"bind": "/certs", "mode": "rw"},
            },
        )
        container = yield self.docker('create_container',
            self.move_certs_image,
            volumes=["/certs"],
            host_config=host_config,
        )

        container_id = container['Id']
        self.log.debug(
            "Container %s is creating ssl certs for %s",
            container_id[:12], self._log_name,
        )
        # start the container
        yield self.docker('start', container_id)
        # stage the archive to the container
        try:
            yield self.docker(
                'put_archive',
                container=container_id,
                path='/certs',
                data=tar_buf,
            )
        finally:
            yield self.docker('remove_container', container_id)
        return nb_paths
コード例 #19
0
    def move_certs(self, paths):
        self.log.info("Staging internal ssl certs for %s", self._log_name)
        yield self.pull_image(self.move_certs_image)
        # create the volume
        volume_name = self.format_volume_name(self.certs_volume_name, self)
        # create volume passes even if it already exists
        self.log.info("Creating ssl volume %s for %s", volume_name, self._log_name)
        yield self.docker('create_volume', volume_name)

        # create a tar archive of the internal cert files
        # docker.put_archive takes a tarfile and a running container
        # and unpacks the archive into the container
        nb_paths = {}
        tar_buf = BytesIO()
        archive = TarFile(fileobj=tar_buf, mode='w')
        for key, hub_path in paths.items():
            fname = os.path.basename(hub_path)
            nb_paths[key] = '/certs/' + fname
            with open(hub_path, 'rb') as f:
                content = f.read()
            tarinfo = TarInfo(name=fname)
            tarinfo.size = len(content)
            tarinfo.mtime = os.stat(hub_path).st_mtime
            tarinfo.mode = 0o644
            archive.addfile(tarinfo, BytesIO(content))
        archive.close()
        tar_buf.seek(0)

        # run a container to stage the certs,
        # mounting the volume at /certs/
        host_config = self.client.create_host_config(
            binds={
                volume_name: {"bind": "/certs", "mode": "rw"},
            },
        )
        container = yield self.docker('create_container',
            self.move_certs_image,
            volumes=["/certs"],
            host_config=host_config,
        )

        container_id = container['Id']
        self.log.debug(
            "Container %s is creating ssl certs for %s",
            container_id[:12], self._log_name,
        )
        # start the container
        yield self.docker('start', container_id)
        # stage the archive to the container
        try:
            yield self.docker(
                'put_archive',
                container=container_id,
                path='/certs',
                data=tar_buf,
            )
        finally:
            yield self.docker('remove_container', container_id)
        return nb_paths
コード例 #20
0
ファイル: onos_stage.py プロジェクト: K-OpenNet/ONOS-SSM
def addFile(tar, dest, file, file_size):
    if dest not in written_files:
        info = TarInfo(dest)
        info.size = file_size
        info.mtime = now
        info.mode = 0777
        tar.addfile(info, fileobj=file)
        written_files.add(dest)
コード例 #21
0
ファイル: mksfx.py プロジェクト: nxt3-org/nxt3
 def file_filter(info: tarfile.TarInfo):
     info.mode = 0o00777 if executable else 0o00666
     info.mtime = 0
     info.type = tarfile.REGTYPE
     info.uid = info.gid = 0
     info.uname = info.gname = "root"
     info.pax_headers = {}
     return info
コード例 #22
0
def addFile(tar, dest, file, file_size):
    if dest not in written_files:
        info = TarInfo(dest)
        info.size = file_size
        info.mtime = now
        info.mode = 0o777
        tar.addfile(info, fileobj=file)
        written_files.add(dest)
コード例 #23
0
ファイル: csvw.py プロジェクト: GSS-Cogs/gss-utils
def run_csv2rdf(csv_filename: str,
                metadata_filename: str,
                csv_io: TextIO,
                metadata_io: TextIO,
                codelists_base: Optional[str] = None):
    client = docker.from_env()
    csv2rdf = client.containers.create(
        'gsscogs/csv2rdf',
        command=
        f'csv2rdf -m annotated -o /tmp/output.ttl -t /tmp/{csv_filename} -u /tmp/{metadata_filename}'
    )
    archive = BytesIO()
    metadata_io.seek(0, SEEK_END)
    metadata_size = metadata_io.tell()
    metadata_io.seek(0)
    csv_io.seek(0, SEEK_END)
    csv_size = csv_io.tell()
    csv_io.seek(0)
    with TarFile(fileobj=archive, mode='w') as t:
        tis = TarInfo(str(metadata_filename))
        tis.size = metadata_size
        tis.mtime = time.time()
        t.addfile(tis, BytesIO(metadata_io.read().encode('utf-8')))
        tic = TarInfo(str(csv_filename))
        tic.size = csv_size
        tic.mtime = time.time()
        t.addfile(tic, BytesIO(csv_io.read().encode('utf-8')))
        if codelists_base is not None:
            t.add(Path('features') / 'fixtures' / codelists_base,
                  arcname=codelists_base)

    archive.seek(0)
    csv2rdf.put_archive('/tmp/', archive)
    csv2rdf.start()
    response = csv2rdf.wait()
    sys.stdout.write(csv2rdf.logs().decode('utf-8'))
    assert_equal(response['StatusCode'], 0)
    output_stream, output_stat = csv2rdf.get_archive('/tmp/output.ttl')
    output_archive = BytesIO()
    for line in output_stream:
        output_archive.write(line)
    output_archive.seek(0)
    with TarFile(fileobj=output_archive, mode='r') as t:
        output_ttl = t.extractfile('output.ttl')
        return output_ttl.read()
コード例 #24
0
def get_tarinfo(name, buffer: typing.Union[BytesIO, StringIO]):
    tar_info = TarInfo(name=name)
    buffer.seek(0)
    if isinstance(buffer, BytesIO):
        tar_info.size = len(buffer.getbuffer())
    else:
        tar_info.size = len(buffer.getvalue())
    tar_info.mtime = datetime.now().timestamp()
    return tar_info
コード例 #25
0
def add_to_tar(tar: TarFile, data: bytes, filename: str):
    tarinfo = TarInfo(name=filename)
    tarinfo.size = len(data)
    tarinfo.mtime = int(datetime.timestamp(datetime.utcnow()))
    tarinfo.mode = 436
    tarinfo.type = b'0'
    tarinfo.uid = tarinfo.gid = 0
    tarinfo.uname = tarinfo.gname = "0"
    
    tar.addfile(tarinfo, BytesIO(data))
コード例 #26
0
def addBytes(tar, dest, bytes):
    if dest not in written_files:
        # print dest, string
        info = TarInfo(dest)
        info.size = len(bytes)
        info.mtime = now
        info.mode = 0o777
        file = BytesIO(bytes)
        tar.addfile(info, fileobj=file)
        file.close()
        written_files.add(dest)
コード例 #27
0
def addString(tar, dest, string):
    if dest not in written_files:
        print dest, string
        info = TarInfo(dest)
        info.size = len(string)
        info.mtime = now
        info.mode = 0777
        file = StringIO(string)
        tar.addfile(info, fileobj=file)
        file.close()
        written_files.add(dest)
コード例 #28
0
ファイル: context.py プロジェクト: bendavis78/zope
    def writeDataFile(self, filename, text, content_type, subdir=None):
        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join((subdir, filename))

        stream = StringIO(text)
        info = TarInfo(filename)
        info.size = len(text)
        info.mtime = time.time()
        self._archive.addfile(info, stream)
コード例 #29
0
ファイル: onos_stage.py プロジェクト: K-OpenNet/ONOS-SSM
def addString(tar, dest, string):
    if dest not in written_files:
        print dest, string
        info = TarInfo(dest)
        info.size = len(string)
        info.mtime = now
        info.mode = 0777
        file = StringIO(string)
        tar.addfile(info, fileobj=file)
        file.close()
        written_files.add(dest)
コード例 #30
0
ファイル: test_context.py プロジェクト: bendavis78/zope
 def _addMember(path, data, modtime):
     from tarfile import DIRTYPE
     elements = path.split('/')
     parents = filter(None, [elements[x] for x in range(len(elements))])
     for parent in parents:
         info = TarInfo()
         info.name = parent
         info.size = 0
         info.mtime = mod_time
         info.type = DIRTYPE
         archive.addfile(info, StringIO())
     _addOneMember(path, data, modtime)
コード例 #31
0
ファイル: test_context.py プロジェクト: goschtl/zope
 def _addMember(path, data, modtime):
     from tarfile import DIRTYPE
     elements = path.split('/')
     parents = filter(None, [elements[x] for x in range(len(elements))])
     for parent in parents:
         info = TarInfo()
         info.name = parent
         info.size = 0
         info.mtime = mod_time
         info.type = DIRTYPE
         archive.addfile(info, StringIO())
     _addOneMember(path, data, modtime)
コード例 #32
0
ファイル: test_context.py プロジェクト: bendavis78/zope
 def _addMember(filename, data, modtime):
     from tarfile import DIRTYPE
     parents = filename.split('/')[:-1]
     while parents:
         path = '/'.join(parents) + '/'
         if path not in archive.getnames():
             info = TarInfo(path)
             info.type = DIRTYPE
             info.mtime = modtime
             archive.addfile(info)
         parents.pop()
     _addOneMember(filename, data, modtime)
コード例 #33
0
    def writeDataFile( self, filename, text, content_type, subdir=None ):

        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join( ( subdir, filename ) )

        stream = StringIO( text )
        info = TarInfo( filename )
        info.size = len( text )
        info.mtime = time.time()
        self._archive.addfile( info, stream )
コード例 #34
0
    def writeDataFile(self, filename, text, content_type, subdir=None):
        """ See IExportContext.
        """
        if subdir is not None:
            filename = '/'.join((subdir, filename))

        parents = filename.split('/')[:-1]
        while parents:
            path = '/'.join(parents) + '/'
            if path not in self._archive.getnames():
                info = TarInfo(path)
                info.type = DIRTYPE
                info.mtime = time.time()
                self._archive.addfile(info)
            parents.pop()

        stream = StringIO(text)
        info = TarInfo(filename)
        info.size = len(text)
        info.mtime = time.time()
        self._archive.addfile(info, stream)
コード例 #35
0
 def _addMember(filename, data, modtime):
     from tarfile import DIRTYPE
     parents = filename.split('/')[:-1]
     while parents:
         path = '/'.join(parents) + '/'
         if path not in archive.getnames():
             info = TarInfo(path)
             info.type = DIRTYPE
             info.mtime = modtime
             archive.addfile(info)
         parents.pop()
     _addOneMember(filename, data, modtime)
コード例 #36
0
ファイル: tardb.py プロジェクト: yasusii/shaling
 def test_change_info(self):
   # writing
   db = TarDB(dirname).open('r+')
   data_foo = '123'
   mtime = 12345
   info = TarInfo('foo')
   info.mtime = mtime
   db.add_record(info, data_foo)
   db.close()
   # reading
   db = TarDB(dirname).open('r+')
   info = db[0]
   self.assertEqual(info.mtime, mtime)
   db[0] = info
   db.close()
   return
コード例 #37
0
ファイル: script_mode.py プロジェクト: flyteorg/flytekit
def tar_strip_file_attributes(tar_info: tarfile.TarInfo) -> tarfile.TarInfo:
    # set time to epoch timestamp 0, aka 00:00:00 UTC on 1 January 1970
    # note that when extracting this tarfile, this time will be shown as the modified date
    tar_info.mtime = 0

    # user/group info
    tar_info.uid = 0
    tar_info.uname = ""
    tar_info.gid = 0
    tar_info.gname = ""

    # stripping paxheaders may not be required
    # see https://stackoverflow.com/questions/34688392/paxheaders-in-tarball
    tar_info.pax_headers = {}

    return tar_info
コード例 #38
0
 def test_change_info(self):
     # writing
     db = TarDB(dirname).open(mode='w')
     data_foo = '123'
     mtime = 12345
     info = TarInfo('foo')
     info.mtime = mtime
     db.add_record(info, data_foo)
     db.close()
     # reading
     db = TarDB(dirname).open(mode='w')
     info = db[0]
     self.assertEqual(info.mtime, mtime)
     db[0] = info
     db.close()
     return
コード例 #39
0
def create_archive(filepaths):
    tarstream = BytesIO()
    tarfile = TarFile(fileobj=tarstream, mode='w')
    for filepath in filepaths:
        file = open(filepath, 'r')
        file_data = file.read()

        tarinfo = TarInfo(name=basename(file.name))
        tarinfo.size = len(file_data)
        tarinfo.mtime = time()

        tarfile.addfile(tarinfo, BytesIO(file_data))

    tarfile.close()
    tarstream.seek(0)
    return tarstream
コード例 #40
0
 def _add_entry(
     self,
     name: str,
     type: bytes,
     mode: int,
     mtime: int,
     size: int,
     data: Optional[IO[bytes]],
     linkname: str = "",
 ) -> None:
     info = TarInfo(name)
     info.type = type
     info.mode = mode
     info.size = size
     info.mtime = mtime
     info.linkname = linkname
     return self._inner.addfile(info, data)
コード例 #41
0
    def get_image(self, image):
        if not image:
            raise APIError(HTTPError('500 Server Error'),
                           None,
                           explanation='Usage: image_export IMAGE [IMAGE...]')

        layers = []
        next_layer_id = image

        while next_layer_id:
            layer = normalizeimage(self._findlayer(next_layer_id), copy=True)
            layers.append(layer)
            next_layer_id = layers[-1][':parent_id']

        image_file = BytesIO()
        mtime = time()

        with tarfile_open(mode='w', fileobj=image_file) as image_tar_file:
            for layer in layers:
                ti_dir = TarInfo(layer[':id'])
                ti_dir.mtime = mtime
                ti_dir.mode = 0o755
                ti_dir.type = DIRTYPE
                image_tar_file.addfile(ti_dir)

                layer_tar_src_path = ospath_join(self._my_dir, 'data',
                                                 layer[':short_id'],
                                                 'layer.tar')

                with open(layer_tar_src_path, 'rb') as layer_tar_src_file:
                    layer_tar_dst_path = '{}/layer.tar'.format(layer[':id'])
                    ti_layer = image_tar_file.gettarinfo(
                        layer_tar_src_path, layer_tar_dst_path)
                    ti_layer.mtime = mtime
                    ti_layer.mode = 0o644
                    ti_layer.uid = ti_layer.gid = 0
                    ti_layer.uname = ti_layer.gname = ''
                    image_tar_file.addfile(ti_layer,
                                           fileobj=layer_tar_src_file)

        image_file.seek(0)

        return image_file
コード例 #42
0
ファイル: tardb.py プロジェクト: yasusii/shaling
 def test_tar_compatibility(self):
   # writing
   db = TarDB(dirname).open('r+')
   data_foo = '123'
   mtime = 12345
   info = TarInfo('foo')
   info.mtime = mtime
   db.add_record(info, data_foo)
   db.close()
   # reading with tarfile
   import tarfile
   tar = tarfile.TarFile(os.path.join(dirname, 'db00000.tar'))
   info = tar.next()
   data = tar.extractfile(info).read()
   self.assertEqual(data, data_foo)
   self.assertEqual(len(data), info.size)
   self.assertEqual(info.mtime, mtime)
   tar.close()
   return
コード例 #43
0
 def test_tar_compatibility(self):
     # writing
     db = TarDB(dirname).open(mode='w')
     data_foo = '123'
     mtime = 12345
     info = TarInfo('foo')
     info.mtime = mtime
     db.add_record(info, data_foo)
     db.close()
     # reading with tarfile
     import tarfile
     tar = tarfile.TarFile(os.path.join(dirname, 'db00000.tar'))
     info = tar.next()
     data = tar.extractfile(info).read()
     self.assertEqual(data, data_foo)
     self.assertEqual(len(data), info.size)
     self.assertEqual(info.mtime, mtime)
     tar.close()
     return
コード例 #44
0
ファイル: tarball.py プロジェクト: seblu/installsystems
 def add_str(self, name, content, ftype, mode, mtime=None,
             uid=None, gid=None, uname=None, gname=None):
     '''
     Add a string in memory as a file in tarball
     '''
     if isinstance(name, unicode):
         name = name.encode("UTF-8")
     ti = TarInfo(name)
     # set tarinfo attribute
     for v in ("name", "ftype", "mode", "mtime", "uid", "gid", "uname", "gname"):
         if vars()[v] is not None:
             vars(ti)[v] = vars()[v]
     # set mtime to current if not specified
     if mtime is None:
         ti.mtime = int(time())
     # unicode char is encoded in UTF-8, has changelog must be in UTF-8
     if isinstance(content, unicode):
         content = content.encode("UTF-8")
     ti.size = len(content) if content is not None else 0
     self.addfile(ti, StringIO(content))
コード例 #45
0
    def _reset_tarinfo(
        self,
        info: TarInfo,
        predicate: Optional[ArchiveAdapter.FileFilter],
        mtime: Optional[int],
    ) -> Optional[TarInfo]:
        if predicate is not None and not predicate(info.name):
            return None

        # remove user and group IDs as they are irrelevant for distribution and
        # may require subsequent `chown`ing on multi-tenant systems
        info.uid = 0
        info.uname = ""
        info.gid = 0
        info.gname = ""

        if mtime is not None:
            info.mtime = mtime

        return info
コード例 #46
0
    def get_image(self, image):
        if not image:
            raise APIError(HTTPError('500 Server Error'), None, explanation='Usage: image_export IMAGE [IMAGE...]')

        layers = []
        next_layer_id = image

        while next_layer_id:
            layer = normalizeimage(self._findlayer(next_layer_id), copy=True)
            layers.append(layer)
            next_layer_id = layers[-1][':parent_id']

        image_file = BytesIO()
        mtime = time()

        with tarfile_open(mode='w', fileobj=image_file) as image_tar_file:
            for layer in layers:
                ti_dir = TarInfo(layer[':id'])
                ti_dir.mtime = mtime
                ti_dir.mode = 0o755
                ti_dir.type = DIRTYPE
                image_tar_file.addfile(ti_dir)

                layer_tar_src_path = ospath_join(self._my_dir, 'data', layer[':short_id'], 'layer.tar')

                with open(layer_tar_src_path, 'rb') as layer_tar_src_file:
                    layer_tar_dst_path = '{}/layer.tar'.format(layer[':id'])
                    ti_layer = image_tar_file.gettarinfo(layer_tar_src_path, layer_tar_dst_path)
                    ti_layer.mtime = mtime
                    ti_layer.mode = 0o644
                    ti_layer.uid = ti_layer.gid = 0
                    ti_layer.uname = ti_layer.gname = ''
                    image_tar_file.addfile(ti_layer, fileobj=layer_tar_src_file)

        image_file.seek(0)

        return image_file
コード例 #47
0
ファイル: test_context.py プロジェクト: pigaov10/plone4.3
 def _addOneMember(path, data, modtime):
     stream = StringIO(v)
     info = TarInfo(k)
     info.size = len(v)
     info.mtime = modtime
     archive.addfile(info, stream)
コード例 #48
0
ファイル: fruitbak.py プロジェクト: wsldankers/fruitbak
def tar(host, backup, share, path):
	binary_stdout = stdout.buffer

	fbak = Fruitbak(confdir = Path('/dev/shm/conf'))
	backup = fbak[host][backup]
	if path is None:
		share, path = backup.locate_path(share)
	else:
		share = backup[share]

	def iterator():
		for dentry in share.find(path):
			if dentry.is_file and not dentry.is_hardlink:
				yield from dentry.hashes

	with fbak.pool.agent().readahead(iterator()) as reader:
		for dentry in share.find(path):
			name = dentry.name or b'.'
			i = TarInfo(fsdecode(bytes(name)))
			i.mode = dentry.mode & 0o7777
			i.uid = dentry.uid
			i.gid = dentry.gid
			i.mtime = dentry.mtime // 1000000000
			if dentry.is_hardlink:
				i.type = LNKTYPE
				hardlink = dentry.hardlink or b'.'
				i.linkname = fsdecode(bytes(hardlink))
			elif dentry.is_file:
				i.type = REGTYPE
				i.size = dentry.size
			elif dentry.is_symlink:
				i.type = SYMTYPE
				i.linkname = fsdecode(bytes(dentry.symlink))
			elif dentry.is_chardev:
				i.type = CHRTYPE
				i.devmajor = dentry.major
				i.devminor = dentry.minor
			elif dentry.is_blockdev:
				i.type = BLKTYPE
				i.devmajor = dentry.major
				i.devminor = dentry.minor
			elif dentry.is_directory:
				i.type = DIRTYPE
			elif dentry.is_fifo:
				i.type = FIFOTYPE
			else:
				continue

			binary_stdout.write(i.tobuf(GNU_FORMAT))

			if dentry.is_file and not dentry.is_hardlink:
				for hash in dentry.hashes:
					action = next(reader)
					if action.exception:
						raise action.exception[1]
					binary_stdout.write(action.value)
				padding = -i.size % BLOCKSIZE
				if padding:
					binary_stdout.write(bytes(padding))

	binary_stdout.write(b'\0' * (BLOCKSIZE*2))