def post_process(self, paths, dry_run=False, **options): if dry_run: return from pipeline.packager import Packager packager = Packager(storage=self) for package_name in packager.packages['css']: package = packager.package_for('css', package_name) output_file = package.output_filename if self.packing: packager.pack_stylesheets(package) paths[output_file] = (self, output_file) yield output_file, output_file, True for package_name in packager.packages['js']: package = packager.package_for('js', package_name) output_file = package.output_filename if self.packing: packager.pack_javascripts(package) paths[output_file] = (self, output_file) yield output_file, output_file, True super_class = super(PipelineMixin, self) if hasattr(super_class, 'post_process'): for name, hashed_name, processed in super_class.post_process(paths.copy(), dry_run, **options): yield name, hashed_name, processed
def post_process(self, paths, dry_run=False, **options): """ This post_process hook is used to package all themed assets. """ if dry_run: return themes = get_themes() for theme in themes: css_packages = self.get_themed_packages( theme.theme_dir_name, settings.PIPELINE['STYLESHEETS']) from pipeline.packager import Packager packager = Packager(storage=self, css_packages=css_packages) for package_name in packager.packages['css']: package = packager.package_for('css', package_name) output_file = package.output_filename if self.packing: packager.pack_stylesheets(package) paths[output_file] = (self, output_file) yield output_file, output_file, True super_class = super(ThemePipelineMixin, self) if hasattr(super_class, 'post_process'): for name, hashed_name, processed in super_class.post_process( paths.copy(), dry_run, **options): yield name, hashed_name, processed
def post_process(self, paths, dry_run=False, **options): """ This post_process hook is used to package all themed assets. """ if dry_run: return themes = get_themes() for theme in themes: css_packages = self.get_themed_packages(theme.theme_dir_name, settings.PIPELINE_CSS) from pipeline.packager import Packager packager = Packager(storage=self, css_packages=css_packages) for package_name in packager.packages['css']: package = packager.package_for('css', package_name) output_file = package.output_filename if self.packing: packager.pack_stylesheets(package) paths[output_file] = (self, output_file) yield output_file, output_file, True super_class = super(ThemePipelineMixin, self) if hasattr(super_class, 'post_process'): for name, hashed_name, processed in super_class.post_process(paths.copy(), dry_run, **options): yield name, hashed_name, processed
def post_process(self, paths, dry_run=False, **options): if dry_run: return [] from pipeline.packager import Packager packager = Packager(storage=self) for package_name in packager.packages['css']: package = packager.package_for('css', package_name) output_file = package.output_filename if self.packing: packager.pack_stylesheets(package) paths[output_file] = (self, output_file) for package_name in packager.packages['js']: package = packager.package_for('js', package_name) output_file = package.output_filename if self.packing: packager.pack_javascripts(package) paths[output_file] = (self, output_file) super_class = super(PipelineMixin, self) if hasattr(super_class, 'post_process'): return super_class.post_process(paths, dry_run, **options) return [ (path, path, True) for path in paths ]
def post_process(self, paths, dry_run=False, **options): if dry_run: return packager = Packager(storage=self) for _abs_path, rel_path in paths: files_to_process = OrderedDict() files_to_process[rel_path] = (self, rel_path) for package_name in packager.packages['css']: package = packager.package_for('css', package_name) output_file = package.output_filename if rel_path in package.paths: if self.packing: packager.pack_stylesheets(package) files_to_process[output_file] = (self, output_file) yield output_file, output_file, True for package_name in packager.packages['js']: package = packager.package_for('js', package_name) output_file = package.output_filename if rel_path in package.paths: if self.packing: packager.pack_javascripts(package) files_to_process[output_file] = (self, output_file) yield output_file, output_file, True super_class = super(PipelineMixin, self) if hasattr(super_class, 'post_process'): for name, hashed_name, processed in super_class.post_process( files_to_process.copy(), dry_run, **options): yield name, hashed_name, processed
def handle(self, *args, **options): from pipeline.packager import Packager force = options.get('force', False) verbose = int(options.get('verbosity', 1)) >= 2 sync = options.get('dry_run', True) packager = Packager(verbose=verbose) for package_name in packager.packages['css']: if args and package_name not in args: continue package = packager.package_for('css', package_name) if verbose: print message = "CSS Group '%s'" % package_name print message print len(message) * '-' packager.pack_stylesheets(package, sync=sync, force=force) for package_name in packager.packages['js']: if args and package_name not in args: continue package = packager.package_for('js', package_name) if verbose: print message = "JS Group '%s'" % package_name print message print len(message) * '-' packager.pack_javascripts(package, sync=sync, force=force)
class CompressedCSSNode(template.Node): def __init__(self, name): self.name = name def render(self, context): package_name = template.Variable(self.name).resolve(context) package = settings.PIPELINE_CSS.get(package_name, {}) if package: package = {package_name: package} self.packager = Packager(css_packages=package, js_packages={}) try: package = self.packager.package_for('css', package_name) except PackageNotFound: return '' # fail silently, do not return anything if an invalid group is specified if settings.PIPELINE: compressed_path = self.packager.pack_stylesheets(package) return self.render_css(package, compressed_path) else: package['paths'] = self.packager.compile(package['paths']) return self.render_individual(package) def render_css(self, package, path): context = {} if not 'template' in package: package['template'] = "pipeline/css.html" if 'context' in package: context = package['context'] context.update({'url': self.packager.individual_url(path)}) return render_to_string(package['template'], context) def render_individual(self, package): tags = [self.render_css(package, path) for path in package['paths']] return '\n'.join(tags)
class CompressedCSSNode(template.Node): def __init__(self, name): self.name = name self.packager = Packager() def render(self, context): package_name = template.Variable(self.name).resolve(context) try: package = self.packager.package_for('css', package_name) except PackageNotFound: return '' # fail silently, do not return anything if an invalid group is specified if settings.PIPELINE: compressed_path = self.packager.pack_stylesheets(package) return self.render_css(package, compressed_path) else: package['paths'] = self.packager.compile(package['paths']) return self.render_individual(package) def render_css(self, package, path): context = {} if not 'template' in package: package['template'] = "pipeline/css.html" if not 'context' in package: context = package['context'] context.update({ 'url': self.packager.individual_url(path) }) return render_to_string(package['template'], context) def render_individual(self, package): tags = [self.render_css(package, path) for path in package['paths']] return '\n'.join(tags)
def post_process(self, paths, dry_run=False, **options): if dry_run: return [] from pipeline.packager import Packager packager = Packager(storage=self) for package_name in packager.packages['css']: package = packager.package_for('css', package_name) if self.packing: paths_written = packager.pack_stylesheets(package) for path in paths_written: paths[path] = (self, path) else: # TODO: bcooksey 5/15/13. Not sure why we pretend we packed if packing is false...will this mess up source maps output_file = package.output_filename paths[output_file] = (self, output_file) for package_name in packager.packages['js']: package = packager.package_for('js', package_name) if self.packing: paths_written = packager.pack_javascripts(package) for path in paths_written: paths[path] = (self, path) else: # TODO: bcooksey 5/15/13. Not sure why we pretend we packed if packing is false...will this mess up source maps output_file = package.output_filename paths[output_file] = (self, output_file) super_class = super(PipelineMixin, self) if hasattr(super_class, 'post_process'): return super_class.post_process(paths, dry_run, **options) return [ (path, path, True) for path in paths ]
def handle_noargs(self, **options): from pipeline.packager import Packager packager = Packager( force=options.get('force', False), verbose=int(options.get('verbosity', 1)) >= 2 ) for package_name in packager.packages['css']: package = packager.package_for('css', package_name) if packager.verbose or packager.force: print message = "CSS Group '%s'" % package_name print message print len(message) * '-' packager.pack_stylesheets(package) for package_name in packager.packages['js']: package = packager.package_for('js', package_name) if packager.verbose or packager.force: print message = "JS Group '%s'" % package_name print message print len(message) * '-' packager.pack_javascripts(package)
class OptimizedPipelineStorage(PipelineMixin, StaticFilesStorage): """This storage compresses only the packages which had modifications in their source files, or that have not been compressed yet. This speeds up the collectstatic process, since must of the time we modify only a few javascript/css files at a time. It also appends the a md5 hash to the compressed files' url so any existing cache mechanisms are naturally invalidated.""" compressed_packages = [] unchanged_packages = [] packager = None HASH_CACHE_KEY = 'pipeline_compressed_hash_key' SOURCES_DUMP_KEY = 'pipeline_dumped_sources_key' def url(self, name): """Append the produced hash to the resource url so existing cache mechanisms are naturally invalidated.""" url = super(OptimizedPipelineStorage, self).url(name) _hash = self.get_compressed_files_hash() if _hash and name: return '{url}?{_hash}'.format(url=url, _hash=_hash) else: return url def post_process(self, paths, dry_run=False, **options): if dry_run: return from pipeline.packager import Packager self.packager = Packager(storage=self) for package_name in self.packager.packages['css']: package = self.packager.package_for('css', package_name) output_file = package.output_filename if self.packing and self._is_outdated(package_name, package): print('COMPRESSING {} package...'.format(package_name)) self.packager.pack_stylesheets(package) self.compressed_packages.append(package_name) else: self.unchanged_packages.append(package_name) paths[output_file] = (self, output_file) yield output_file, output_file, True for package_name in self.packager.packages['js']: package = self.packager.package_for('js', package_name) output_file = package.output_filename if self.packing and self._is_outdated(package_name, package): print('COMPRESSING {} package...'.format(package_name)) self.packager.pack_javascripts(package) self.compressed_packages.append(package_name) else: self.unchanged_packages.append(package_name) paths[output_file] = (self, output_file) yield output_file, output_file, True super_class = super(PipelineMixin, self) if hasattr(super_class, 'post_process'): for name, hashed_name, processed in super_class.post_process( paths.copy(), dry_run, **options): yield name, hashed_name, processed self._finalize() def _is_outdated(self, package_name, package): outdated = False for path in package.paths: # Needs to run for every path in order to generate the individual # file hashes. if self._is_content_changed(path) and not outdated: outdated = True if not outdated: previous_paths = self._get_previous_compressed_sources( package_name) if not previous_paths or set(previous_paths) != set(package.paths): outdated = True from django.conf import settings output_path = os.path.join(settings.STATIC_ROOT, package.output_filename) return outdated or not os.path.exists(output_path) def _is_content_changed(self, path): """Verifies if the content of :path change based on the hash that was produced during the last collecstatic run.""" from django.conf import settings changed = True infile_path = os.path.join(self.location, path) outfile_path = os.path.join(settings.STATIC_ROOT, path) infile_hash_path = outfile_path + '.hash' with open(infile_path, 'rb') as infile_file: current_hash = hashlib.md5(infile_file.read()).hexdigest() from django.core.cache import caches DEFAULT_CACHE = caches['default'] old_hash = DEFAULT_CACHE.get(infile_hash_path) changed = current_hash != old_hash DEFAULT_CACHE.set(infile_hash_path, current_hash, None) return changed def _finalize(self): self._dump_sources() print('\n=== {} results ==='.format(self.__class__.__name__)) total_removed = self._remove_sources() self._write_hash() print('{} removed files used in the compressing'.format(total_removed)) print('{} new compressed packages: {}'.format( len(self.compressed_packages), self.compressed_packages)) print('{} unchanged packages: {}'.format(len(self.unchanged_packages), self.unchanged_packages)) print('=== End {} results ==='.format(self.__class__.__name__)) def _remove_sources(self): """We do not want to expose our source files, thus they are removed from the STATIC_ROOT directory, keeping only the compressed files.""" from django.conf import settings sources = [] for package_name in self.packager.packages['js']: package = self.packager.package_for('js', package_name) sources.extend(package.paths) for package_name in self.packager.packages['css']: package = self.packager.package_for('css', package_name) sources.extend(package.paths) removed = 0 for source in sources: source_path = os.path.join(settings.STATIC_ROOT, source) if os.path.exists(source_path): os.remove(source_path) removed += 1 return removed def _dump_sources(self): """We dump the list of compressed source files so we can compare if there is any difference (new files or removed files) in the next collectstatic run.""" from django.core.cache import caches DEFAULT_CACHE = caches['default'] packages = {} for package_name in self.packager.packages['js']: package = self.packager.package_for('js', package_name) packages[package_name] = package.paths for package_name in self.packager.packages['css']: package = self.packager.package_for('css', package_name) packages[package_name] = package.paths # cache forever DEFAULT_CACHE.set(self.SOURCES_DUMP_KEY, packages, None) def _get_previous_compressed_sources(self, package_name): from django.core.cache import caches DEFAULT_CACHE = caches['default'] return DEFAULT_CACHE.get(self.SOURCES_DUMP_KEY, {}).\ get(package_name) def _write_hash(self): """Writes a single md5 hash considering all the content from the source files. This is useful to force any cache mechanism to update their registries.""" from django.conf import settings from django.core.cache import caches DEFAULT_CACHE = caches['default'] output_filenames = [] for package_name in self.packager.packages['js']: package = self.packager.package_for('js', package_name) output_filenames.append(package.output_filename) for package_name in self.packager.packages['css']: package = self.packager.package_for('css', package_name) output_filenames.append(package.output_filename) contents = [] for output_filename in output_filenames: abs_path = os.path.join(settings.STATIC_ROOT, output_filename) with io.open(abs_path, 'rb') as output_file: contents.append(output_file.read()) digest = hashlib.md5(b''.join(contents)).hexdigest() print('New hash: {}'.format(digest)) DEFAULT_CACHE.set(self.HASH_CACHE_KEY, digest, None) # cache forever @staticmethod def get_compressed_files_hash(): from django.core.cache import caches DEFAULT_CACHE = caches['default'] return DEFAULT_CACHE.get(OptimizedPipelineStorage.HASH_CACHE_KEY)