def __init__(self, *args, **kwargs): super(CallbackOutputFilter, self).__init__(*args, **kwargs) if self.callback is None: raise ImproperlyConfigured( "The callback filter %s must define a 'callback' attribute." % self.__class__.__name__) try: mod_name, func_name = get_mod_func(self.callback) func = getattr(import_module(mod_name), func_name) except (ImportError, TypeError): if self.dependencies: if len(self.dependencies) == 1: warning = "dependency (%s) is" % self.dependencies[0] else: warning = ("dependencies (%s) are" % ", ".join([dep for dep in self.dependencies])) else: warning = "" raise ImproperlyConfigured( "The callback %s couldn't be imported. Make sure the %s " "correctly installed." % (self.callback, warning)) except AttributeError as e: raise ImproperlyConfigured("An error occurred while importing the " "callback filter %s: %s" % (self, e)) else: self._callback_func = func
def precompile(self, content, kind=None, elem=None, filename=None, **kwargs): if not kind: return False, content attrs = self.parser.elem_attribs(elem) mimetype = attrs.get("type", None) if mimetype: filter_or_command = self.all_mimetypes.get(mimetype) if filter_or_command is None: if mimetype not in ("text/css", "text/javascript"): raise CompressorError("Couldn't find any precompiler in " "COMPRESS_PRECOMPILERS setting for " "mimetype '%s'." % mimetype) else: mod_name, cls_name = get_mod_func(filter_or_command) try: mod = import_module(mod_name) except ImportError: return True, CompilerFilter(content, filter_type=self.type, command=filter_or_command, filename=filename).input( **kwargs) try: precompiler_class = getattr(mod, cls_name) except AttributeError: raise FilterDoesNotExist('Could not find "%s".' % filter_or_command) else: return True, precompiler_class(content, attrs, filter_type=self.type, filename=filename).input( **kwargs) return False, content
def _prepare_contexts(self, engine): module, function = get_mod_func(settings.COMPRESS_OFFLINE_CONTEXT) contexts = getattr(import_module(module), function)() if engine == 'django': return (Context(c) for c in contexts) if engine == 'jinja2': return contexts return None
def get_cachekey(*args, **kwargs): global _cachekey_func if _cachekey_func is None: try: mod_name, func_name = get_mod_func(settings.COMPRESS_CACHE_KEY_FUNCTION) _cachekey_func = getattr(import_module(mod_name), func_name) except (AttributeError, ImportError), e: raise ImportError("Couldn't import cache key function %s: %s" % (settings.COMPRESS_CACHE_KEY_FUNCTION, e))
def get_cachekey(*args, **kwargs): global _cachekey_func if _cachekey_func is None: try: mod_name, func_name = get_mod_func( settings.COMPRESS_CACHE_KEY_FUNCTION) _cachekey_func = getattr(import_module(mod_name), func_name) except (AttributeError, ImportError), e: raise ImportError("Couldn't import cache key function %s: %s" % (settings.COMPRESS_CACHE_KEY_FUNCTION, e))
def precompile( self, content, kind=None, elem=None, filename=None, charset=None, **kwargs ): """ Processes file using a pre compiler. This is the place where files like coffee script are processed. """ if not kind: return False, content attrs = self.parser.elem_attribs(elem) mimetype = attrs.get("type", None) if mimetype is None: return False, content filter_or_command = self.precompiler_mimetypes.get(mimetype) if filter_or_command is None: if mimetype in ("text/css", "text/javascript"): return False, content raise CompressorError( "Couldn't find any precompiler in " "COMPRESS_PRECOMPILERS setting for " "mimetype '%s'." % mimetype ) mod_name, cls_name = get_mod_func(filter_or_command) try: mod = import_module(mod_name) except (ImportError, TypeError): filter = CachedCompilerFilter( content=content, filter_type=self.type, filename=filename, charset=charset, command=filter_or_command, mimetype=mimetype, ) return True, filter.input(**kwargs) try: precompiler_class = getattr(mod, cls_name) except AttributeError: raise FilterDoesNotExist('Could not find "%s".' % filter_or_command) filter = precompiler_class( content, attrs=attrs, filter_type=self.type, charset=charset, filename=filename, ) return True, filter.input(**kwargs)
def __init__(self, *args, **kwargs): super(CallbackOutputFilter, self).__init__(*args, **kwargs) if self.callback is None: raise ImproperlyConfigured("The callback filter %s must define" "a 'callback' attribute." % self) try: mod_name, func_name = get_mod_func(self.callback) func = getattr(import_module(mod_name), func_name) except ImportError, e: if self.dependencies: if len(self.dependencies) == 1: warning = "dependency (%s) is" % self.dependencies[0] else: warning = ("dependencies (%s) are" % ", ".join([dep for dep in self.dependencies])) else: warning = "" raise ImproperlyConfigured("The callback %s couldn't be imported. " "Make sure the %s correctly installed." % (self.callback, warning))
def __init__(self, *args, **kwargs): super(CallbackOutputFilter, self).__init__(*args, **kwargs) if self.callback is None: raise ImproperlyConfigured("The callback filter %s must define" "a 'callback' attribute." % self) try: mod_name, func_name = get_mod_func(self.callback) func = getattr(import_module(mod_name), func_name) except ImportError, e: if self.dependencies: if len(self.dependencies) == 1: warning = "dependency (%s) is" % self.dependencies[0] else: warning = ("dependencies (%s) are" % ", ".join([dep for dep in self.dependencies])) else: warning = "" raise ImproperlyConfigured( "The callback %s couldn't be imported. " "Make sure the %s correctly installed." % (self.callback, warning))
def precompile(self, content, kind=None, elem=None, filename=None, charset=None, **kwargs): """ Processes file using a pre compiler. This is the place where files like coffee script are processed. """ if not kind: return False, content attrs = self.parser.elem_attribs(elem) mimetype = attrs.get("type", None) if mimetype is None: return False, content filter_or_command = self.precompiler_mimetypes.get(mimetype) if filter_or_command is None: if mimetype in ("text/css", "text/javascript"): return False, content raise CompressorError("Couldn't find any precompiler in " "COMPRESS_PRECOMPILERS setting for " "mimetype '%s'." % mimetype) mod_name, cls_name = get_mod_func(filter_or_command) try: mod = import_module(mod_name) except (ImportError, TypeError): filter = CachedCompilerFilter( content=content, filter_type=self.type, filename=filename, charset=charset, command=filter_or_command, mimetype=mimetype) return True, filter.input(**kwargs) try: precompiler_class = getattr(mod, cls_name) except AttributeError: raise FilterDoesNotExist('Could not find "%s".' % filter_or_command) filter = precompiler_class( content, attrs=attrs, filter_type=self.type, charset=charset, filename=filename) return True, filter.input(**kwargs)
def compress(self, log=None, **options): """ Searches templates containing 'compress' nodes and compresses them "offline" -- outside of the request/response cycle. The result is cached with a cache-key derived from the content of the compress nodes (not the content of the possibly linked files!). """ engine = options.get("engine", "django") extensions = options.get('extensions') extensions = self.handle_extensions(extensions or ['html']) verbosity = int(options.get("verbosity", 0)) if not log: log = StringIO() if not self.get_loaders(): raise OfflineGenerationError("No template loaders defined. You " "must set TEMPLATE_LOADERS in your " "settings or set 'loaders' in your " "TEMPLATES dictionary.") templates = set() if engine == 'django': paths = set() for loader in self.get_loaders(): try: module = import_module(loader.__module__) get_template_sources = getattr(module, 'get_template_sources', None) if get_template_sources is None: get_template_sources = loader.get_template_sources paths.update(smart_text(origin) for origin in get_template_sources('')) except (ImportError, AttributeError, TypeError): # Yeah, this didn't work out so well, let's move on pass if not paths: raise OfflineGenerationError("No template paths found. None of " "the configured template loaders " "provided template paths. See " "https://docs.djangoproject.com/en/1.8/topics/templates/ " "for more information on template " "loaders.") if verbosity > 1: log.write("Considering paths:\n\t" + "\n\t".join(paths) + "\n") for path in paths: for root, dirs, files in os.walk(path, followlinks=options.get('followlinks', False)): templates.update(os.path.join(root, name) for name in files if not name.startswith('.') and any(fnmatch(name, "*%s" % glob) for glob in extensions)) elif engine == 'jinja2': env = settings.COMPRESS_JINJA2_GET_ENVIRONMENT() if env and hasattr(env, 'list_templates'): templates |= set([env.loader.get_source(env, template)[1] for template in env.list_templates(filter_func=lambda _path: os.path.splitext(_path)[-1] in extensions)]) if not templates: raise OfflineGenerationError("No templates found. Make sure your " "TEMPLATE_LOADERS and TEMPLATE_DIRS " "settings are correct.") if verbosity > 1: log.write("Found templates:\n\t" + "\n\t".join(templates) + "\n") contexts = settings.COMPRESS_OFFLINE_CONTEXT if isinstance(contexts, six.string_types): try: module, function = get_mod_func(contexts) contexts = getattr(import_module(module), function)() except (AttributeError, ImportError, TypeError) as e: raise ImportError("Couldn't import offline context function %s: %s" % (settings.COMPRESS_OFFLINE_CONTEXT, e)) elif not isinstance(contexts, (list, tuple)): contexts = [contexts] contexts = list(contexts) # evaluate generator parser = self.__get_parser(engine) compressor_nodes = OrderedDict() for template_name in templates: try: template = parser.parse(template_name) except IOError: # unreadable file -> ignore if verbosity > 0: log.write("Unreadable template at: %s\n" % template_name) continue except TemplateSyntaxError as e: # broken template -> ignore if verbosity > 0: log.write("Invalid template %s: %s\n" % (template_name, smart_text(e))) continue except TemplateDoesNotExist: # non existent template -> ignore if verbosity > 0: log.write("Non-existent template at: %s\n" % template_name) continue except UnicodeDecodeError: if verbosity > 0: log.write("UnicodeDecodeError while trying to read " "template %s\n" % template_name) continue for context_dict in contexts: context = parser.get_init_context(context_dict) context = Context(context) try: nodes = list(parser.walk_nodes(template, context=context)) except (TemplateDoesNotExist, TemplateSyntaxError) as e: # Could be an error in some base template if verbosity > 0: log.write("Error parsing template %s: %s\n" % (template_name, smart_text(e))) continue if nodes: template.template_name = template_name template_nodes = compressor_nodes.setdefault(template, OrderedDict()) for node in nodes: template_nodes.setdefault(node, []).append(context) if not compressor_nodes: raise OfflineGenerationError( "No 'compress' template tags found in templates." "Try running compress command with --follow-links and/or" "--extension=EXTENSIONS") if verbosity > 0: log.write("Found 'compress' tags in:\n\t" + "\n\t".join((t.template_name for t in compressor_nodes.keys())) + "\n") log.write("Compressing... ") block_count = 0 compressed_contexts = [] results = [] offline_manifest = OrderedDict() for template, nodes in compressor_nodes.items(): template._log = log template._log_verbosity = verbosity for node, contexts in nodes.items(): for context in contexts: if context not in compressed_contexts: compressed_contexts.append(context) context.push() if not parser.process_template(template, context): continue parser.process_node(template, context, node) rendered = parser.render_nodelist(template, context, node) key = get_offline_hexdigest(rendered) if key in offline_manifest: continue try: result = parser.render_node(template, context, node) except Exception as e: raise CommandError("An error occurred during rendering %s: " "%s" % (template.template_name, smart_text(e))) result = result.replace( settings.COMPRESS_URL, settings.COMPRESS_URL_PLACEHOLDER ) offline_manifest[key] = result context.pop() results.append(result) block_count += 1 write_offline_manifest(offline_manifest) context_count = len(compressed_contexts) log.write("done\nCompressed %d block(s) from %d template(s) for %d context(s).\n" % (block_count, len(compressor_nodes), context_count)) return block_count, results
def compress(self, engine, extensions, verbosity, follow_links, log): """ Searches templates containing 'compress' nodes and compresses them "offline" -- outside of the request/response cycle. The result is cached with a cache-key derived from the content of the compress nodes (not the content of the possibly linked files!). """ if not self.get_loaders(): raise OfflineGenerationError("No template loaders defined. You " "must set TEMPLATE_LOADERS in your " "settings or set 'loaders' in your " "TEMPLATES dictionary.") templates = set() if engine == 'django': paths = set() for loader in self.get_loaders(): try: module = import_module(loader.__module__) get_template_sources = getattr(module, 'get_template_sources', None) if get_template_sources is None: get_template_sources = loader.get_template_sources paths.update( smart_str(origin) for origin in get_template_sources('')) except (ImportError, AttributeError, TypeError): # Yeah, this didn't work out so well, let's move on pass if not paths: raise OfflineGenerationError( "No template paths found. None of " "the configured template loaders " "provided template paths. See " "https://docs.djangoproject.com/en/2.1/topics/templates/ " "for more information on template " "loaders.") if verbosity >= 2: log.write("Considering paths:\n\t" + "\n\t".join(paths) + "\n") for path in paths: for root, dirs, files in os.walk(path, followlinks=follow_links): templates.update( os.path.relpath(os.path.join(root, name), path) for name in files if not name.startswith('.') and any( fnmatch(name, "*%s" % glob) for glob in extensions)) elif engine == 'jinja2': env = settings.COMPRESS_JINJA2_GET_ENVIRONMENT() if env and hasattr(env, 'list_templates'): templates |= set([ env.loader.get_source(env, template)[1] for template in env.list_templates( filter_func=lambda _path: os.path.splitext(_path)[ -1] in extensions) ]) if not templates: raise OfflineGenerationError("No templates found. Make sure your " "TEMPLATE_LOADERS and TEMPLATE_DIRS " "settings are correct.") if verbosity >= 2: log.write("Found templates:\n\t" + "\n\t".join(templates) + "\n") contexts = settings.COMPRESS_OFFLINE_CONTEXT if isinstance(contexts, str): try: module, function = get_mod_func(contexts) contexts = getattr(import_module(module), function)() except (AttributeError, ImportError, TypeError) as e: raise ImportError( "Couldn't import offline context function %s: %s" % (settings.COMPRESS_OFFLINE_CONTEXT, e)) elif not isinstance(contexts, (list, tuple)): contexts = [contexts] parser = self.__get_parser(engine) fine_templates = [] if verbosity >= 1: log.write("Compressing... ") for template_name in templates: try: template = parser.parse(template_name) template.template_name = template_name fine_templates.append(template) except IOError: # unreadable file -> ignore if verbosity >= 1: log.write("Unreadable template at: %s\n" % template_name) continue except TemplateSyntaxError as e: # broken template -> ignore if verbosity >= 1: log.write("Invalid template %s: %s\n" % (template_name, smart_str(e))) continue except TemplateDoesNotExist: # non existent template -> ignore if verbosity >= 1: log.write("Non-existent template at: %s\n" % template_name) continue except UnicodeDecodeError: if verbosity >= 1: log.write("UnicodeDecodeError while trying to read " "template %s\n" % template_name) continue contexts_count = 0 nodes_count = 0 offline_manifest = OrderedDict() errors = [] for context_dict in contexts: compressor_nodes = OrderedDict() for template in fine_templates: context = Context(parser.get_init_context(context_dict)) try: nodes = list(parser.walk_nodes(template, context=context)) except (TemplateDoesNotExist, TemplateSyntaxError) as e: # Could be an error in some base template if verbosity >= 1: log.write("Error parsing template %s: %s\n" % (template.template_name, smart_str(e))) continue if nodes: template_nodes = compressor_nodes.setdefault( template, OrderedDict()) for node in nodes: nodes_count += 1 template_nodes.setdefault(node, []).append(context) pool = concurrent.futures.ThreadPoolExecutor(max_workers=4) for template, nodes in compressor_nodes.items(): template._log = log template._log_verbosity = verbosity pool.submit(self._compress_template, offline_manifest, nodes, parser, template, errors) pool.shutdown(wait=True) contexts_count += 1 # If errors exist, raise the first one in the list if errors: raise errors[0] elif not nodes_count: raise OfflineGenerationError( "No 'compress' template tags found in templates." "Try running compress command with --follow-links and/or" "--extension=EXTENSIONS") if verbosity >= 1: log.write( "done\nCompressed %d block(s) from %d template(s) for %d context(s).\n" % (len(offline_manifest), nodes_count, contexts_count)) return offline_manifest, len( offline_manifest), offline_manifest.values()
def get_hexdigest(plaintext, length=None): digest = md5_constructor(smart_str(plaintext)).hexdigest() if length: return digest[:length] return digest def simple_cachekey(key): return 'django_compressor.%s' % smart_str(key) def socket_cachekey(key): return "django_compressor.%s.%s" % (socket.gethostname(), smart_str(key)) try: mod_name, func_name = get_mod_func(settings.COMPRESS_CACHE_KEY_FUNCTION) get_cachekey = getattr(import_module(mod_name), func_name) except (AttributeError, ImportError), e: raise ImportError("Couldn't import cache key function %s: %s" % (settings.COMPRESS_CACHE_KEY_FUNCTION, e)) def get_mtime_cachekey(filename): return get_cachekey("mtime.%s" % get_hexdigest(filename)) def get_offline_cachekey(source): to_hexdigest = [smart_str(getattr(s, 's', s)) for s in source] return get_cachekey("offline.%s" % get_hexdigest(to_hexdigest)) def get_templatetag_cachekey(compressor, mode, kind):
def compress(self, log=None, **options): """ Searches templates containing 'compress' nodes and compresses them "offline" -- outside of the request/response cycle. The result is cached with a cache-key derived from the content of the compress nodes (not the content of the possibly linked files!). """ extensions = options.get('extensions') extensions = self.handle_extensions(extensions or ['mako']) verbosity = int(options.get("verbosity", 0)) if not log: log = StringIO() if not self.get_loaders(): raise OfflineGenerationError("No template loaders defined. You " "must set TEMPLATE_LOADERS in your " "settings or set 'loaders' in your " "TEMPLATES dictionary.") templates = set() for path in self.get_loaders()[0].directories: for root, dirs, files in os.walk(path, followlinks=options.get( 'followlinks', False)): templates.update( os.path.join(root, name)[len(path):] for name in files if not name.startswith('.') and any( fnmatch(name, "*%s" % glob) for glob in extensions)) if not templates: raise OfflineGenerationError("No templates found. Make sure your " "TEMPLATE_LOADERS and TEMPLATE_DIRS " "settings are correct.") if verbosity > 1: log.write("Found templates:\n\t" + "\n\t".join(templates) + "\n") contexts = settings.COMPRESS_OFFLINE_CONTEXT if isinstance(contexts, six.string_types): try: module, function = get_mod_func(contexts) contexts = getattr(import_module(module), function)() except (AttributeError, ImportError, TypeError) as e: raise ImportError( "Couldn't import offline context function %s: %s" % (settings.COMPRESS_OFFLINE_CONTEXT, e)) elif not isinstance(contexts, (list, tuple)): contexts = [contexts] contexts = list(contexts) # evaluate generator parser = self.__get_parser() for template_name in templates: try: template = parser.get_template(template_name) except IOError: # unreadable file -> ignore if verbosity > 0: log.write("Unreadable template at: %s\n" % template_name) continue except TemplateSyntaxError as e: # broken template -> ignore if verbosity > 0: log.write("Invalid template %s: %s\n" % (template_name, smart_text(e))) continue except TemplateDoesNotExist: # non existent template -> ignore if verbosity > 0: log.write("Non-existent template at: %s\n" % template_name) continue except UnicodeDecodeError: if verbosity > 0: log.write("UnicodeDecodeError while trying to read " "template %s\n" % template_name) continue for context_dict in contexts: try: from django.test.client import RequestFactory request = RequestFactory().get('/') context_dict['request'] = request # mako compress tag will compress and upadate manifest template.render(context=context_dict, request=request) except (TemplateDoesNotExist, TemplateSyntaxError) as e: # Could be an error in some base template if verbosity > 0: log.write("Error parsing template %s: %s\n" % (template_name, smart_text(e))) continue