Example #1
0
 def render(self, context):
     content = self.nodelist.render(context)
     if 'MEDIA_URL' in context:
         media_url = context['MEDIA_URL']
     else:
         media_url = settings.MEDIA_URL
     if self.kind == 'css':
         compressor = CssCompressor(content, xhtml=self.xhtml, media_url=media_url)
     if self.kind == 'js':
         compressor = JsCompressor(content, xhtml=self.xhtml, media_url=media_url)
     in_cache = cache.get(compressor.cachekey)
     if in_cache:
         return in_cache
     else:
         # do this to prevent dog piling
         in_progress_key = '%s.django_css.in_progress.%s' % (DOMAIN, compressor.cachekey)
         added_to_cache = cache.add(in_progress_key, True, 300)
         if added_to_cache:
             output = compressor.output()
             cache.set(compressor.cachekey, output, 2591000) # rebuilds the cache every 30 days if nothing has changed.
             cache.set(in_progress_key, False, 300)
         else:
             while cache.get(in_progress_key):
                 sleep(0.1)
             output = cache.get(compressor.cachekey)
         return output
    def test_precompiler_caches_empty_files(self):
        command = '%s %s -f {infile} -o {outfile}' % (sys.executable, self.test_precompiler)
        compiler = CachedCompilerFilter(command=command, **self.cached_precompiler_args)
        self.assertEqual("body { color:#990; }", compiler.input())

        cache.set(compiler.get_cache_key(), "")
        compiler = CachedCompilerFilter(command=command, **self.cached_precompiler_args)
        self.assertEqual("", compiler.input())
	def input(self, **kwargs):
		key = self.get_cache_key()
		data = cache.get(key)
		if data is not None:
			return data
		filtered = super(TypeScriptFilter, self).input(**kwargs)
		cache.set(key, filtered, settings.COMPRESS_REBUILD_TIMEOUT)
		return filtered
Example #4
0
    def test_precompiler_caches_empty_files(self):
        command = '%s %s -f {infile} -o {outfile}' % (sys.executable, self.test_precompiler)
        compiler = CachedCompilerFilter(command=command, **self.cached_precompiler_args)
        self.assertEqual("body { color:#990; }", compiler.input())

        cache.set(compiler.get_cache_key(), "")
        compiler = CachedCompilerFilter(command=command, **self.cached_precompiler_args)
        self.assertEqual("", compiler.input())
Example #5
0
def get_mtime(filename):
    if settings.MTIME_DELAY:
        key = get_mtime_cachekey(filename)
        mtime = cache.get(key)
        if mtime is None:
            mtime = os.path.getmtime(filename)
            cache.set(key, mtime, settings.MTIME_DELAY)
        return mtime
    return os.path.getmtime(filename)
 def input(self, **kwargs):
     if self.mimetype in settings.COMPRESS_CACHEABLE_PRECOMPILERS:
         key = self.get_cache_key()
         data = cache.get(key)
         if data is not None:
             return data
         filtered = super(CachedCompilerFilter, self).input(**kwargs)
         cache.set(key, filtered, settings.COMPRESS_REBUILD_TIMEOUT)
         return filtered
     else:
         return super(CachedCompilerFilter, self).input(**kwargs)
Example #7
0
 def input(self, **kwargs):
     if self.mimetype in settings.COMPRESS_CACHEABLE_PRECOMPILERS:
         key = self.get_cache_key()
         data = cache.get(key)
         if data is not None:
             return data
         filtered = super(CachedCompilerFilter, self).input(**kwargs)
         cache.set(key, filtered, settings.COMPRESS_REBUILD_TIMEOUT)
         return filtered
     else:
         return super(CachedCompilerFilter, self).input(**kwargs)
Example #8
0
 def cache_set(self,
               key,
               val,
               timeout=settings.REBUILD_TIMEOUT,
               refreshed=False):
     refresh_time = timeout + time.time()
     real_timeout = timeout + settings.MINT_DELAY
     packed_val = (val, refresh_time, refreshed)
     return cache.set(key, packed_val, real_timeout)
Example #9
0
 def cache_set(self, key, val, timeout=settings.REBUILD_TIMEOUT, refreshed=False):
     refresh_time = timeout + time.time()
     real_timeout = timeout + settings.MINT_DELAY
     packed_val = (val, refresh_time, refreshed)
     return cache.set(key, packed_val, real_timeout)
Example #10
0
    def compress(self, log=None, **options):
        """
        Searches templates containing 'compress' nodes and compresses them
        "offline" -- outside of the request/response cycle.

        The result is cached with a cache-key derived from the content of the
        compress nodes (not the content of the possibly linked files!).
        """
        extensions = options.get('extensions')
        extensions = self.handle_extensions(extensions or ['html'])
        verbosity = int(options.get("verbosity", 0))
        if not log:
            log = StringIO()
        if not settings.TEMPLATE_LOADERS:
            raise OfflineGenerationError("No template loaders defined. You "
                                         "must set TEMPLATE_LOADERS in your "
                                         "settings.")
        paths = set()
        for loader in self.get_loaders():
            try:
                module = import_module(loader.__module__)
                get_template_sources = getattr(module,
                    'get_template_sources', None)
                if get_template_sources is None:
                    get_template_sources = loader.get_template_sources
                paths.update(list(get_template_sources('')))
            except (ImportError, AttributeError):
                # Yeah, this didn't work out so well, let's move on
                pass
        if not paths:
            raise OfflineGenerationError("No template paths found. None of "
                                         "the configured template loaders "
                                         "provided template paths. See "
                                         "http://django.me/template-loaders "
                                         "for more information on template "
                                         "loaders.")
        if verbosity > 1:
            log.write("Considering paths:\n\t" + "\n\t".join(paths) + "\n")
        templates = set()
        for path in paths:
            for root, dirs, files in walk(path,
                    followlinks=options.get('followlinks', False)):
                templates.update(os.path.join(root, name)
                    for name in files if not name.startswith('.') and
                        any(fnmatch(name, "*%s" % glob) for glob in extensions))
        if not templates:
            raise OfflineGenerationError("No templates found. Make sure your "
                                         "TEMPLATE_LOADERS and TEMPLATE_DIRS "
                                         "settings are correct.")
        if verbosity > 1:
            log.write("Found templates:\n\t" + "\n\t".join(templates) + "\n")

        compressor_nodes = SortedDict()
        for template_name in templates:
            try:
                template_file = open(template_name)
                try:
                    template = Template(template_file.read().decode(
                                        settings.FILE_CHARSET))
                finally:
                    template_file.close()
            except IOError:  # unreadable file -> ignore
                if verbosity > 0:
                    log.write("Unreadable template at: %s\n" % template_name)
                continue
            except TemplateSyntaxError:  # broken template -> ignore
                if verbosity > 0:
                    log.write("Invalid template at: %s\n" % template_name)
                continue
            except UnicodeDecodeError:
                if verbosity > 0:
                    log.write("UnicodeDecodeError while trying to read "
                              "template %s\n" % template_name)
            nodes = list(self.walk_nodes(template))
            if nodes:
                compressor_nodes.setdefault(template_name, []).extend(nodes)

        if not compressor_nodes:
            raise OfflineGenerationError(
                "No 'compress' template tags found in templates.")

        if verbosity > 0:
            log.write("Found 'compress' tags in:\n\t" +
                      "\n\t".join(compressor_nodes.keys()) + "\n")

        log.write("Compressing... ")
        count = 0
        results = []
        context = Context(settings.COMPRESS_OFFLINE_CONTEXT)
        for nodes in compressor_nodes.values():
            for node in nodes:
                key = get_offline_cachekey(node.nodelist)
                try:
                    result = node.render(context, forced=True)
                except Exception, e:
                    raise CommandError("An error occured during rending: "
                                       "%s" % e)
                cache.set(key, result, settings.COMPRESS_OFFLINE_TIMEOUT)
                results.append(result)
                count += 1
Example #11
0
    def compress(self, log=None, **options):
        """
        Searches templates containing 'compress' nodes and compresses them
        "offline" -- outside of the request/response cycle.

        The result is cached with a cache-key derived from the content of the
        compress nodes (not the content of the possibly linked files!).
        """
        extensions = options.get('extensions')
        extensions = self.handle_extensions(extensions or ['html'])
        verbosity = int(options.get("verbosity", 0))
        if not log:
            log = StringIO()
        if not settings.TEMPLATE_LOADERS:
            raise OfflineGenerationError("No template loaders defined. You "
                                         "must set TEMPLATE_LOADERS in your "
                                         "settings.")
        paths = set()
        for loader in self.get_loaders():
            try:
                module = import_module(loader.__module__)
                get_template_sources = getattr(module,
                    'get_template_sources', None)
                if get_template_sources is None:
                    get_template_sources = loader.get_template_sources
                paths.update(list(get_template_sources('')))
            except (ImportError, AttributeError):
                # Yeah, this didn't work out so well, let's move on
                pass
        if not paths:
            raise OfflineGenerationError("No template paths found. None of "
                                         "the configured template loaders "
                                         "provided template paths. See "
                                         "http://django.me/template-loaders "
                                         "for more information on template "
                                         "loaders.")
        if verbosity > 1:
            log.write("Considering paths:\n\t" + "\n\t".join(paths) + "\n")
        templates = set()
        for path in paths:
            for root, dirs, files in walk(path,
                    followlinks=options.get('followlinks', False)):
                templates.update(os.path.join(root, name)
                    for name in files if not name.startswith('.') and
                        any(fnmatch(name, "*%s" % glob) for glob in extensions))
        if not templates:
            raise OfflineGenerationError("No templates found. Make sure your "
                                         "TEMPLATE_LOADERS and TEMPLATE_DIRS "
                                         "settings are correct.")
        if verbosity > 1:
            log.write("Found templates:\n\t" + "\n\t".join(templates) + "\n")

        compressor_nodes = SortedDict()
        for template_name in templates:
            try:
                template_file = open(template_name)
                try:
                    template = Template(template_file.read().decode(
                                        settings.FILE_CHARSET))
                finally:
                    template_file.close()
            except IOError: # unreadable file -> ignore
                if verbosity > 0:
                    log.write("Unreadable template at: %s\n" % template_name)
                continue
            except TemplateSyntaxError: # broken template -> ignore
                if verbosity > 0:
                    log.write("Invalid template at: %s\n" % template_name)
                continue
            except UnicodeDecodeError:
                if verbosity > 0:
                    log.write("UnicodeDecodeError while trying to read "
                              "template %s\n" % template_name)
            nodes = list(self.walk_nodes(template))
            if nodes:
                compressor_nodes.setdefault(template_name, []).extend(nodes)

        if not compressor_nodes:
            raise OfflineGenerationError(
                "No 'compress' template tags found in templates.")

        if verbosity > 0:
            log.write("Found 'compress' tags in:\n\t" +
                      "\n\t".join(compressor_nodes.keys()) + "\n")

        log.write("Compressing... ")
        count = 0
        results = []
        context = Context(settings.COMPRESS_OFFLINE_CONTEXT)
        for nodes in compressor_nodes.values():
            for node in nodes:
                key = get_offline_cachekey(node.nodelist)
                try:
                    result = node.render(context, forced=True)
                except Exception, e:
                    raise CommandError("An error occured during rending: "
                                       "%s" % e)
                cache.set(key, result, settings.COMPRESS_OFFLINE_TIMEOUT)
                results.append(result)
                count += 1
Example #12
0
        if not compressor_nodes:
            raise OfflineGenerationError("No 'compress' template tags found in templates.")

        if verbosity > 0:
            log.write("Found 'compress' tags in:\n\t" +
                      "\n\t".join(compressor_nodes.keys()) + "\n")

        log.write("Compressing... ")
        count = 0
        results = []
        context = Context(settings.OFFLINE_CONTEXT)
        for nodes in compressor_nodes.values():
            for node in nodes:
                key = get_offline_cachekey(node.nodelist)
                result = node.render(context, compress=True, offline=False)
                cache.set(key, result, settings.OFFLINE_TIMEOUT)
                results.append(result)
                count += 1
        log.write("done\nCompressed %d block(s) from %d template(s).\n"
                  % (count, len(compressor_nodes)))
        return count, results

    def walk_nodes(self, node):
        for node in getattr(node, "nodelist", []):
            if (isinstance(node, CompressorNode) or
                    node.__class__.__name__ == "CompressorNode"): # for 1.1.X
                yield node
            else:
                for node in self.walk_nodes(node):
                    yield node