def url(self, context):
     result = super(StaticFilesNode, self).url(context)
     if "request" in context:
         request = context["request"]
         if utils.accepts_gzip(request) and utils.should_save_gzipped_copy(result):
             return utils.get_gzipped_name(result)
     return result
 def url(self, context):
     result = super(StaticFilesNode, self).url(context)
     if 'request' in context:
         request = context['request']
         if (utils.accepts_gzip(request)
                 and utils.should_save_gzipped_copy(result)):
             return utils.get_gzipped_name(result)
     return result
    def post_process(self, paths, dry_run=False, **options):

        """
        Post process the given list of files (called from collectstatic).

        Processing finds paths that match the configuration,
        gzips them and copies them to the target storage with
        the name generated by utils.get_gzipped_name.

        """

        # allow other processors to run, yielding their values
        # and adding new files to the list of ones to be gzipped
        if hasattr(super(SaveGzippedCopyMixin, self), 'post_process'):
            processor = super(SaveGzippedCopyMixin, self).post_process(
                paths=paths.copy(), dry_run=dry_run, options=options,
            )
            for original_path, processed_path, processed in processor:
                if processed and original_path != processed_path:
                    paths[processed_path] = (self, processed_path)
                yield original_path, processed_path, processed

        # don't even dare to process the files if we're in dry run mode
        if dry_run:
            return

        path_level = lambda name: len(name.split(os.sep))

        # make a list of files that are to be gzipped
        adjustable_paths = [
            path for path in
            sorted(paths.keys(), key=path_level, reverse=True)
            if utils.should_save_gzipped_copy(path)
        ]

        for name in adjustable_paths:
            storage, path = paths[name]
            gzipped_name = utils.get_gzipped_name(name)
            if not self.should_skip_processing(storage, path, gzipped_name):
                with storage.open(path) as original_file:
                    if hasattr(original_file, 'seek'):
                        original_file.seek(0)
                    pregzipped_file = ContentFile(original_file.read())
                    pregzipped_file = self.pre_save_gzipped(
                        name, gzipped_name, pregzipped_file,
                    )
                    if self.exists(gzipped_name):
                        self.delete(gzipped_name)
                    gzipped_file = self.gzipped_file(
                        name, gzipped_name, pregzipped_file,
                    )
                    saved_name = self._save(gzipped_name, gzipped_file)
                    gzipped_name = force_text(saved_name.replace('\\', '/'))
                    self.post_save_gzipped(
                        name, gzipped_name, gzipped_file,
                    )
                    yield name, gzipped_name, True
Exemple #4
0
    def post_process(self, paths, dry_run=False, **options):
        """
        Post process the given list of files (called from collectstatic).

        Processing finds paths that match the configuration,
        gzips them and copies them to the target storage with
        the name generated by utils.get_gzipped_name.

        """

        # allow other processors to run, yielding their values
        # and adding new files to the list of ones to be gzipped
        if hasattr(super(SaveGzippedCopyMixin, self), 'post_process'):
            processor = super(SaveGzippedCopyMixin, self).post_process(
                paths=paths.copy(),
                dry_run=dry_run,
                options=options,
            )
            for original_path, processed_path, processed in processor:
                if processed and original_path != processed_path:
                    paths[processed_path] = (self, processed_path)
                yield original_path, processed_path, processed

        # don't even dare to process the files if we're in dry run mode
        if dry_run:
            return

        path_level = lambda name: len(name.split(os.sep))

        # make a list of files that are to be gzipped
        adjustable_paths = [
            path for path in sorted(paths.keys(), key=path_level, reverse=True)
            if utils.should_save_gzipped_copy(path)
        ]

        for name in adjustable_paths:
            storage, path = paths[name]
            gzipped_name = utils.get_gzipped_name(name)
            if not self.should_skip_processing(storage, path, gzipped_name):
                with storage.open(path) as original_file:
                    if hasattr(original_file, 'seek'):
                        original_file.seek(0)
                    pregzipped_file = ContentFile(original_file.read())
                    pregzipped_file = self.pre_save_gzipped(
                        name,
                        gzipped_name,
                        pregzipped_file,
                    )
                    if self.exists(gzipped_name):
                        self.delete(gzipped_name)
                    gzipped_file = self.gzipped_file(
                        name,
                        gzipped_name,
                        pregzipped_file,
                    )
                    saved_name = self._save(gzipped_name, gzipped_file)
                    gzipped_name = force_text(saved_name.replace('\\', '/'))
                    self.post_save_gzipped(
                        name,
                        gzipped_name,
                        gzipped_file,
                    )
                    yield name, gzipped_name, True