예제 #1
0
def write_out_backup(backing_store_fs, data, outfile, prefix=''):
    """
    Write the backup data to its final location. A backing store is required
    and either a filepath to the packaged backup or the tmp filesystem is required.

    :param backing_store_fs: a pyfilesystem2 object to be the final storage
            location of the backup. (should be `OSFS`, `S3FS`, `FTPFS`, etc.)
            Can be a single object or list of filesystem objects for copying to
            multiple backing stores.
    :param data: the byte stream that needs to be written to the file
        on the backing store fs.
    :param outfile: the name of the file to write out to.
    :param optional prefix: a parent directory for the files to be saved under.
            This is can be a good place to encode some information about the
            backup. A slash will be appended to the prefix to create
            a directory or pseudo-directory structure.

    """
    if prefix and not prefix[-1] == '/':
        prefix = prefix + '/'
    if not isinstance(backing_store_fs, list):
        backing_store_fs = [backing_store_fs]
    for backing_fs in backing_store_fs:
        # print(backing_fs)
        tmp = tempfs.TempFS()
        with tmp.open("lp-tmp-backup", 'wb') as tmp_file:
            tmp_file.write(data)
        try:
            backing_fs.makedirs(prefix)
        except DirectoryExists:
            pass
        # print(prefix, outfile)
        copy_file(tmp, "lp-tmp-backup", backing_fs, str(prefix + outfile))
        tmp.clean()
예제 #2
0
def main():
    """
    This script iterates over a group of gitlab projects and generates a script that either clones
    the projects if they don't exist or pulls the project, if it does. The project group structure
    is preserved.
    """
    gl = gitlab.Gitlab(os.environ.get('GITLAB_URL', GITLAB_URL),
                       private_token=os.environ['GITLAB_PRIVATE_TOKEN'])

    script_name = os.environ.get('SCRIPT_NAME', 'gitlab-download.sh')

    output_path = os.environ.get('OUTPUT_PATH', 'osfs:///data')

    with open_fs('mem://') as output_cache:
        output_cache.writetext(script_name,
                               f'# generated: {datetime.datetime.now()}\n')
        output_cache.appendtext(
            script_name, 'clone_or_pull() {\n'
            'if [ -e "$1" ]; then\necho $1\ngit -C $1 pull\nelse\ngit clone $2 $1\nfi\n}\n'
        )
        for group in gl.groups.list(all_available=True, all=True):
            if group.full_path.startswith(os.environ['GITLAB_GROUP_PATH']):
                LOGGER.info(group.name)
                for project in group.projects.list(all=True):
                    if project.repository_access_level != 'disabled':
                        output_cache.appendtext(script_name, (
                            f'clone_or_pull '
                            f'{project.path_with_namespace} {project.ssh_url_to_repo}\n'
                        ))

        copy_file(output_cache, script_name, output_path, script_name)
예제 #3
0
def read_backup(backing_store_fs, infile, prefix=""):
    """
    Read a backup file from some pyfilesystem.

    :param backing_store_fs: The pyfilesystem object where the file is located
    :param infile: the name of the file
    :param optional prefix: the prefix before the filename

    :return: raw file data
    """
    tmp = tempfs.TempFS()
    # data = ""
    if prefix and not prefix[-1] == '/':
        prefix = prefix + '/'
    if not isinstance(backing_store_fs, list):
        backing_store_fs = [backing_store_fs]
    restore_succeeded = False
    for backing_fs in backing_store_fs:
        try:
            copy_file(backing_fs, prefix + infile, tmp, infile)
            restore_succeeded = True
            break
        except (botocore.exceptions.NoCredentialsError, OSError,
                fs.errors.ResourceNotFound, fs.errors.PermissionDenied):
            continue
    if not restore_succeeded:
        raise exceptions.ConfigurationError(
            "Specified file could not be found in any"
            " of the available backing stores.")
    with tmp.open(infile, 'rb') as retrieved_file:
        data = retrieved_file.read()
    tmp.clean()
    return data
예제 #4
0
def load_model(filesystem: S3FS, config: PredictionConfig) -> ResUnetA:
    """ Copy the model locally if not existing and load it """
    if not os.path.exists(f'{config.temp_model_path}/{config.model_name}'):
        if not filesystem.exists(
                f'{config.model_path}/{config.model_name}/checkpoints/'):
            filesystem.makedirs(
                f'{config.model_path}/{config.model_name}/checkpoints/')
        copy_dir(filesystem,
                 f'{config.model_path}/{config.model_name}/checkpoints/',
                 f'{config.temp_model_path}/{config.model_name}',
                 'checkpoints')
        copy_file(filesystem,
                  f'{config.model_path}/{config.model_name}/model_cfg.json',
                  f'{config.temp_model_path}/{config.model_name}',
                  'model_cfg.json')

    input_shape = dict(
        features=[None, config.height, config.width, config.n_channels])

    with open(f'{config.temp_model_path}/{config.model_name}/model_cfg.json',
              'r') as jfile:
        model_cfg = json.load(jfile)

    # initialise model from config, build, compile and load trained weights
    model = ResUnetA(model_cfg)
    model.build(input_shape)
    model.net.compile()
    model.net.load_weights(
        f'{config.temp_model_path}/{config.model_name}/checkpoints/model.ckpt')

    return model
예제 #5
0
    def upload(self, local_path, remote_path, filter_regex=None, istree=False):
        try:
            # path prepare
            local_path = self._local_path_transfor(local_path)
            if os.path.isdir(local_path) and istree is False:
                self.logger.warning("warning : use upload to upload tree")
                istree = True

            # osfs prepare
            localfs, local_relative = self.build_osfs(local_path)
            walker = None

            # walk prepare
            if filter_regex is not None:
                if not isinstance(filter_regex, list):
                    filter_regex = list(filter_regex)
                walker = Walker(filter=filter_regex)

            # ftp prepare
            ftp_args = self._ftp_path_transfor(remote_path)
            ftpfs = FTPFS(host=ftp_args['host'],
                          port=ftp_args['port'],
                          passwd=ftp_args['password'],
                          user=ftp_args['user'])
            if not istree:
                ftp_local, ftp_file = self._parse_file_name(ftp_args['relative_path'])
                try:
                    ftpfs.makedirs(ftp_local)
                except Exception, error_msg:
                    self.logger.error(str(error_msg))
                copy_file(localfs, local_relative, ftpfs, ftp_args['relative_path'])
            else:
예제 #6
0
 def add_file(self, fname):
     assert (self.write)
     if not os.path.isabs(fname):
         fname = os.path.join(self.cwd, fname)
     fdir = os.path.dirname(fname)
     self.datafs.makedirs(fdir, recreate=True)
     copy.copy_file(self.osfs, fname, self.datafs, fname)
예제 #7
0
    def replicate(self, path, destdir, logger=None):
        """
        copy a file from the source bag to the same location in an output bag.

        :param str path:  the path, relative to the source bag's root directory,
                          to the file to be replicated.
        :param str destdir:  the destination directory.  This is usually the 
                          root directory of another bag.  
        :param Logger logger: a logger instance to send messages to.
        :raises ValueError:  if the given file path doesn't exist in the source
                          bag.
        """
        path = _unicode(path)
        if not self.exists(path):
            raise ValueError(
                "replicate: file/dir does not exist in this bag: " + path)

        destfs = open_fs(destdir)
        if self.isdir(path):

            if not destfs.isdir(path):
                if logger:
                    logger.info("Creating matching directory in output bag: " +
                                path)
                destfs.makedirs(path, recreate=True)
            return

        parent = os.path.dirname(path)
        if parent and not destfs.exists(parent):
            if logger:
                logger.debug("Creating output file's parent directory: " +
                             parent)
            destfs.makedirs(parent)

        copy_file(self._root.fs, path, destfs, path)
예제 #8
0
 def _copy_sources_to_subdirs(self):
     from dxpy import batch
     from fs.copy import copy_file
     all_sources = batch.FilesFilter(['*']).lst(self.fs)
     for i in range(self._nb_sub_dirs):
         with self.fs.opendir(self._sub_dir_name(i)) as d:
             for f in all_sources:
                 copy_file(self.fs, f, d, f)
예제 #9
0
    def file_copy(self, source_path, source_file, destination_path,
                  destination_file):
        os_fs_destination = open_fs(self.fs_root)
        os_fs_source = open_fs('osfs://' + source_path)

        os_fs_destination.makedirs(destination_path, recreate=True)
        copy_file(os_fs_source, source_file, os_fs_destination,
                  destination_path + '/' + destination_file)
예제 #10
0
    def copyfile_and_gzip(self, source_path, target_path):
        """
        Copies the provided file to the provided target directory.

        Gzips JavaScript, CSS and HTML and other files along the way.
        """
        # And then where we want to copy it to.
        target_dir = path.dirname(target_path)
        if not self.fs.exists(target_dir):
            try:
                self.fs.makedirs(target_dir)
            except OSError:
                pass

        # determine the mimetype of the file
        guess = mimetypes.guess_type(source_path)
        content_type = guess[0]
        encoding = guess[1]

        # If it isn't a file want to gzip...
        if content_type not in self.gzip_file_match:
            # just copy it to the target.
            logger.debug(
                "Copying {}{} to {}{} because its filetype isn't on the whitelist"
                .format("osfs://", source_path, self.fs_name, target_path))
            copy.copy_file("osfs:///", smart_text(source_path), self.fs,
                           smart_text(target_path))

        # # if the file is already gzipped
        elif encoding == 'gzip':
            logger.debug(
                "Copying {}{} to {}{} because it's already gzipped".format(
                    "osfs://", source_path, self.fs_name, target_path))
            copy.copy_file("osfs:///", smart_text(source_path), self.fs,
                           smart_text(target_path))

        # If it is one we want to gzip...
        else:
            # ... let the world know ...
            logger.debug("Gzipping {}{} to {}{}".format(
                "osfs://", source_path, self.fs_name, target_path))
            # Open up the source file from the OS
            with open(source_path, 'rb') as source_file:
                # Write GZIP data to an in-memory buffer
                data_buffer = six.BytesIO()
                kwargs = dict(filename=path.basename(target_path),
                              mode='wb',
                              fileobj=data_buffer)
                if float(sys.version[:3]) >= 2.7:
                    kwargs['mtime'] = 0
                with gzip.GzipFile(**kwargs) as f:
                    f.write(six.binary_type(source_file.read()))

                # Write that buffer out to the filesystem
                with self.fs.open(smart_text(target_path), 'wb') as outfile:
                    outfile.write(data_buffer.getvalue())
                    outfile.close()
예제 #11
0
    def _try_copy_to_filesystem(self, source: str, target: str, filesystem: Optional[Filesystem]) -> None:
        other_filesystem = cast(PyFilesystemBased, filesystem).internal_fs
        if self.internal_fs.isdir(source):
            fscp.copy_dir(self.internal_fs, source,
                          other_filesystem, target)
            return

        fscp.copy_file(self.internal_fs, source,
                       other_filesystem, target)
예제 #12
0
    def copy_file_from_package_to_fs(self, filename, path, fs):
        if not fs.isfile(filename):

            copy_file(src_fs=self.fs_pkg,
                      src_path=path,
                      dst_fs=fs,
                      dst_path=filename)

            return True
        return False
예제 #13
0
 def close(self):
     """
     Save ForensicStore to its location.
     """
     self.connection.commit()
     self.connection.close()
     if not self.remote_is_local and not self.read_only:
         copy.copy_file(self.local_fs, self.db_file, self.remote_fs,
                        self.db_file)
         self.local_fs.removetree(".")
예제 #14
0
def eggifySingle(srcFS, src, destFS, dest, config=None):
    """ Eggify single source to single destination.

    Args:
        src (basestring)
        dest (basestring)

    Raises:
        MissingDestinationException
    """

    if dest is None:
        raise MissingDestinationException()

    if config is None:
        config = {}

    if src.startswith("/") or src[1] == ":":
        head, tail = os.path.split(src)

        srcFS = OSFS(head)
        src = tail

    if srcFS.isfile(unicode(src)):
        assertFS(destFS.getsyspath(unicode(dest)))

        workingDir = srcFS.getsyspath(unicode("/"))
        devnull = open(os.devnull, 'w')

        cmd = ["python", src, "bdist_egg"]

        if "purge" in config.keys() and config["purge"]:
            cmd.append("--exclude-source-files")            

        subprocess.check_call(cmd, cwd=workingDir, stdout=devnull, stderr=devnull)
    
        if srcFS.isdir(unicode("dist")):
            distFS = srcFS.opendir(unicode("dist"))

            for name in reversed(sorted(distFS.listdir("/"))):
                if name.endswith(".egg"):
                    destEggFS = destFS.opendir(unicode(dest))

                    # remove existing eggs
                    removeOldEggs(destEggFS, name)

                    eggSrcPath = distFS.getsyspath(unicode(name))
                    eggDestPath = destEggFS.getsyspath(unicode(name))

                    copy_file(distFS, unicode(name), destEggFS, unicode(name))

                    print "copied {} to {}".format(eggSrcPath, eggDestPath)

                    break
예제 #15
0
    def copy_to(self, other):
        """Create a new FSResource by copying the underlying resource

        Note that calling .close() on either one (or exiting the
        context of the original) will close the filesystem that both use.

        In practice, if you use the new one within the open context
        of the old one, you'll be fine.
        """
        if isinstance(other, (str, Path)):
            other = FSResource.new(other)
        copy.copy_file(self.fs, self.filename, other.fs, other.filename)
예제 #16
0
    def _test_scan(self, local_file, expected_scan_result,
                   expected_scan_message):
        with open_fs('osfs://.') as source_fs, open_fs(
                self.FS_URL_HOST) as destination_fs:
            copy_file(source_fs, local_file, destination_fs, self.TEST_FILE)

        scan_result, scan_message = scan.delay(fs_url=self.FS_URL,
                                               file=self.TEST_FILE).get()

        print('Scan result {}, message {}.'.format(scan_result, scan_message))
        self.assertEqual(scan_result, expected_scan_result)
        self.assertRegex(scan_message, expected_scan_message)
예제 #17
0
파일: commands.py 프로젝트: Hong-Xiang/dxl
def fetch(source, target, filenames, depth, verbose):
    from ...api import files_in_directories
    from fs.osfs import OSFS
    from fs.copy import copy_file
    from fs.path import dirname
    with OSFS(source) as sor:
        with OSFS(target) as tar:
            files = files_in_directories(sor, ['*'], filenames, depth)
            for f in files:
                if not tar.exists(dirname(f)):
                    tar.makedirs(dirname(f))
                copy_file(sor, f, tar, f)
                if verbose:
                    click.echo('[COPY] {} => {}.'.format(
                        sor.getsyspath(f), tar.getsyspath(f)))
예제 #18
0
    def _copy_sources_from_template(self):
        from fs.path import relativefrom
        from fs.copy import copy_file
        from fs.osfs import OSFS
        path_source = self.c['source']['directory']
        if not path_source.startswith('/'):
            path_source = self.fs.getsyspath(path_source)
        with OSFS('/') as fs:
            if not fs.exists(path_source):
                raise ValueError(
                    "Source path {} not exists.".format(path_source))
        with OSFS(path_source) as fs_sor:

            for f in self.c['source']['filenames']:
                copy_file(fs_sor, f, self.fs, f)
예제 #19
0
    def copy(self, src_path, dst_path, src_fs=None, dst_fs=None):

        if not src_fs:
            src_fs = self.xml_fs

        if not dst_fs:
            dst_fs = self.out_fs
        try:
            copy.copy_file(src_fs, src_path, dst_fs, dst_path)

            logging.info("Copy asset: %s to: %s" % (
                path.join(src_fs.root_path, src_path),
                path.join(dst_fs.root_path, dst_path),
            ))

        except errors.ResourceNotFound as e:
            logging.info(e)
예제 #20
0
def scan(*,
         fs_url: str,
         file: str,
         timeout: int = 3600,
         clamscan_options: Dict[str, Any] = None) -> Tuple[bool, str]:
    with open_fs(fs_url) as input_fs, open_fs('osfs:///') as temp_fs:
        try:
            file_info = input_fs.getinfo(file)
            if file_info.is_dir:
                raise ValueError(
                    f'Resource {file} is a directory, expecting a file.')
        except ResourceNotFound:
            raise FileNotFoundError(f'Resource {file} does not exist.')

        has_system_path = input_fs.hassyspath(file)
        if has_system_path:
            file_to_scan = input_fs.getsyspath(file)
        else:
            # We use the same temporary directory as clamscan if specified
            temp_dir = clamscan_options[
                'tempdir'] if clamscan_options and 'tempdir' in clamscan_options else '/tmp'
            file_to_scan = '{}/{}'.format(temp_dir, uuid.uuid4().hex)
            try:
                copy_file(input_fs, file, temp_fs, file_to_scan)
            except Exception as exception:
                try:
                    temp_fs.remove(file_to_scan)
                except ResourceNotFound:
                    pass
                raise RuntimeError(
                    f'Copying resource {file} to osfs://{file_to_scan} failed with a {type(exception).__name__} exception: {str(exception)}.'
                ) from None

        try:
            scan_result, scan_message = _clamscan(
                file_to_scan,
                timeout=timeout,
                clamscan_options=clamscan_options)
        finally:
            if not has_system_path:
                try:
                    temp_fs.remove(file_to_scan)
                except ResourceNotFound:
                    pass

    return scan_result, scan_message
예제 #21
0
def copySingle(srcFS, src, destFS, dest):
    """Copy single source to single destination.

    Args:
        src (basestring)
        dest (basestring)

    Raises:
        MissingDestinationException
    """

    if dest is None:
        raise MissingDestinationException()

    if srcFS.isdir(unicode(src)):
        assertFS(destFS.getsyspath(unicode(dest)))

        copy_fs(srcFS.opendir(unicode(src)), destFS.opendir(unicode(dest)))

    if srcFS.isfile(unicode(src)):
        assertFS(destFS.getsyspath(unicode(os.path.dirname(dest))))

        copy_file(srcFS, unicode(src), destFS, unicode(dest))    
예제 #22
0
    def __init__(self,
                 remote_url: str,
                 discriminator: str = "type",
                 read_only: bool = False):
        self.read_only = read_only

        if isinstance(remote_url, str):
            if remote_url[-1] == "/":
                remote_url = remote_url[:-1]
            self.remote_fs = open_fs(remote_url, create=True)
        else:
            self.remote_fs = remote_url
        self.remote_is_local = self.remote_fs.hassyspath(".")

        if self.remote_is_local and not read_only:
            self.local_fs = self.remote_fs
        else:
            self.local_fs = osfs.OSFS(tempfile.mkdtemp())

        self.new = not self.remote_fs.exists(self.db_file)
        if (not self.new and not self.remote_is_local) or self.read_only:
            # store exists
            copy.copy_file(self.remote_fs, self.db_file, self.local_fs,
                           self.db_file)

        dbpath = path.join(self.local_fs.getsyspath("."), self.db_file)
        self.connection = sqlite3.connect(dbpath)
        self.connection.row_factory = sqlite3.Row
        self._options = dict()
        self._schemas = dict()

        if self.new:
            self._create_options_table()
            self.set_strict(True)
            self.set_discriminator(discriminator)

        self._tables = self._get_tables()
예제 #23
0
파일: operators.py 프로젝트: Hong-Xiang/dxl
 def broadcast(cls, fs, paths, path_content):
     """
     Copy content to all paths.
     """
     return [copy.copy_file(fs, path_content, fs, p) for p in paths]
예제 #24
0
파일: service.py 프로젝트: Hong-Xiang/hqlf2
def copy_dir(source, target):
    filters = ['*.mac', '*.sh', '*.C', '*.pat', '*.db']
    with OSFS(source) as s:
        with OSFS(target) as t:
            for f in s.filterdir('.', files=filters, exclude_dirs=['*']):
                copy_file(s, f.name, t, f.name)
예제 #25
0
파일: service.py 프로젝트: Hong-Xiang/hqlf2
def copy_mac(source, target, mac_name):
    with OSFS(source), OSFS(target) as s, t:
        copy_file(s, mac_name, t, mac_name)
예제 #26
0
    def test_generator_functions(self, _, output_format: str,
                                 maintain_ratio: bool):
        tasks = []
        input_files = []

        if output_format == 'jpg':
            generator = generate_preview_jpg
            expected_mime_type = 'image/jpeg'
        elif output_format == 'png':
            generator = generate_preview_png
            expected_mime_type = 'image/png'
        elif output_format == 'pdf':
            generator = generate_pdf
            expected_mime_type = 'application/pdf'
        else:
            raise RuntimeError(
                'Unsupported output format {}.'.format(output_format))

        for input_file in example_files:
            data_mime_type = self.mime_type(input_file)
            _, extension = os.path.splitext(input_file)

            if not supported_import_format.delay(mime_type=data_mime_type,
                                                 extension=extension).get():
                print('{}: Unsupported MIME type {}.'.format(
                    input_file, data_mime_type))
                continue
            input_file_basename = os.path.basename(input_file)
            input_files.append(input_file_basename)

            with open_fs('osfs://') as source_fs, open_fs(
                    self.INPUT_FS_URL_HOST) as destination_fs:
                copy_file(source_fs, input_file, destination_fs,
                          input_file_basename)

            pixel_height = self.PIXE_HEIGHT
            pixel_width = self.PIXEL_WIDTH

            output_file = '{input_file_basename}-{height}x{width}.{output_format}'.format(
                input_file_basename=input_file_basename,
                height=pixel_height if not maintain_ratio else 'auto',
                width=pixel_width if not maintain_ratio else 'auto',
                output_format=output_format)
            if output_format == 'pdf':
                tasks.append(
                    generator.clone(
                        kwargs={
                            'input_fs_url': self.INPUT_FS_URL,
                            'input_file': input_file_basename,
                            'output_fs_url': self.OUTPUT_FS_URL,
                            'output_file': output_file,
                            'mime_type': data_mime_type,
                            'extension': extension,
                            'paper_format': 'LETTER',
                            'timeout': 10,
                        }))
            elif output_format == 'jpg':
                tasks.append(
                    generator.clone(
                        kwargs={
                            'input_fs_url': self.INPUT_FS_URL,
                            'input_file': input_file_basename,
                            'output_fs_url': self.OUTPUT_FS_URL,
                            'output_file': output_file,
                            'mime_type': data_mime_type,
                            'extension': extension,
                            'pixel_height': pixel_height,
                            'pixel_width': pixel_width,
                            'quality': 25,
                            'maintain_ratio': maintain_ratio,
                            'timeout': 10,
                        }))
            elif output_format == 'png':
                tasks.append(
                    generator.clone(
                        kwargs={
                            'input_fs_url': self.INPUT_FS_URL,
                            'input_file': input_file_basename,
                            'output_fs_url': self.OUTPUT_FS_URL,
                            'output_file': output_file,
                            'mime_type': data_mime_type,
                            'extension': extension,
                            'pixel_height': pixel_height,
                            'pixel_width': pixel_width,
                            'compression': 3,
                            'maintain_ratio': maintain_ratio,
                            'timeout': 10,
                        }))
            else:
                raise NotImplementedError

        group_results = group(tasks).apply_async()

        failed_jobs = 0
        successful_jobs = 0
        expected_jobs = len(input_files)
        for input_file_basename, result in zip(
                input_files, group_results.get(propagate=False)):
            if isinstance(result, Exception):
                print('{}: exception {}.'.format(input_file_basename,
                                                 str(result)))
                failed_jobs += 1
                continue

            output_file = '{input_file_basename}-{height}x{width}.{output_format}'.format(
                input_file_basename=input_file_basename,
                height=pixel_height if not maintain_ratio else 'auto',
                width=pixel_width if not maintain_ratio else 'auto',
                output_format=output_format)
            with open_fs(self.OUTPUT_FS_URL_HOST) as source_fs, open_fs(
                    'osfs://output/') as destination_fs:
                copy_file(source_fs, output_file, destination_fs, output_file)

            output_mime_type = self.mime_type(f'output/{output_file}')
            self.assertTrue(expected_mime_type, output_mime_type)

            if output_format == 'jpg' or output_format == 'png':
                with open(f'output/{output_file}', 'rb') as f:
                    output_data = BytesIO(f.read())

                image = Image.open(output_data)

                if maintain_ratio:
                    self.assertTrue(image.height == self.PIXE_HEIGHT
                                    or image.width == self.PIXEL_WIDTH)
                else:
                    self.assertEqual(self.PIXE_HEIGHT, image.height)
                    self.assertEqual(self.PIXEL_WIDTH, image.width)

            successful_jobs += 1

        self.assertEqual(expected_jobs, group_results.completed_count())
        self.assertEqual(expected_jobs, successful_jobs)
        self.assertEqual(0, failed_jobs)
예제 #27
0
파일: operators.py 프로젝트: Hong-Xiang/dxl
 def broadcast_directory(cls, fs, paths, path_content):
     name = path.basename(path_content)
     return [
         copy.copy_file(fs, path_content, fs, path.join(p, name))
         for p in paths
     ]
예제 #28
0
    def copyfile_and_gzip(self, source_path, target_path):
        """
        Copies the provided file to the provided target directory.

        Gzips JavaScript, CSS and HTML and other files along the way.
        """
        # And then where we want to copy it to.
        target_dir = path.dirname(target_path)
        if not self.fs.exists(target_dir):
            try:
                self.fs.makedirs(target_dir)
            except OSError:
                pass

        # determine the mimetype of the file
        guess = mimetypes.guess_type(source_path)
        content_type = guess[0]
        encoding = guess[1]

        # If it isn't a file want to gzip...
        if content_type not in self.gzip_file_match:
            # just copy it to the target.
            logger.debug("Copying {}{} to {}{} because its filetype isn't on the whitelist".format(
                "osfs://",
                source_path,
                self.fs_name,
                target_path
            ))
            copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path))

        # # if the file is already gzipped
        elif encoding == 'gzip':
            logger.debug("Copying {}{} to {}{} because it's already gzipped".format(
                "osfs://",
                source_path,
                self.fs_name,
                target_path
            ))
            copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path))

        # If it is one we want to gzip...
        else:
            # ... let the world know ...
            logger.debug("Gzipping {}{} to {}{}".format(
                "osfs://",
                source_path,
                self.fs_name,
                target_path
            ))
            # Open up the source file from the OS
            with open(source_path, 'rb') as source_file:
                # Write GZIP data to an in-memory buffer
                data_buffer = six.BytesIO()
                kwargs = dict(
                    filename=path.basename(target_path),
                    mode='wb',
                    fileobj=data_buffer
                )
                if float(sys.version[:3]) >= 2.7:
                    kwargs['mtime'] = 0
                with gzip.GzipFile(**kwargs) as f:
                    f.write(six.binary_type(source_file.read()))

                # Write that buffer out to the filesystem
                with self.fs.open(smart_text(target_path), 'wb') as outfile:
                    outfile.write(data_buffer.getvalue())
                    outfile.close()
예제 #29
0
        puts("Adding game rpas")
        with indent(2):
            if "ddlc-win.zip" not in cwdfs.listdir("/"):
                puts("Downloading DDLC")
                with indent(2):
                    puts("Getting URL")
                    r = requests.post(
                        "https://teamsalvato.itch.io/ddlc/file/594897")
                    r.raise_for_status()
                    ddlcurl = r.json()["url"]
                    puts("Downloading")
                    r = requests.get(ddlcurl, stream=True)
                    with cwdfs.open("ddlc-win.zip", 'wb') as fd:
                        total_length = int(r.headers.get('content-length'))
                        for chunk in progress.bar(
                                r.iter_content(chunk_size=1024),
                                expected_size=(total_length / 1024) + 1):
                            fd.write(chunk)

        puts("Extracting game rpas")
        with ZipFS("./ddlc-win.zip") as zipfs:
            gamefs = zipfs.opendir(zipfs.listdir("/")[0]).opendir("game")
            templatefs = tempfs.opendir("renpy/My DDLC Mod/game")
            for fn in ("images.rpa", "fonts.rpa", "audio.rpa"):
                puts("Extracting {}".format(fn))
                fscopy.copy_file(gamefs, fn, templatefs, fn)

puts("Moving to real filesystem...")
fscopy.copy_fs(tempfs.opendir("renpy"), cwdfs)