def test_compile_file(self, tmpdir):
        from jac import CompressorExtension
        ext = CompressorExtension(
            mock.Mock(compressor_output_dir=tmpdir,
                      compressor_static_prefix='/static',
                      compressor_source_dirs=[str(tmpdir)]))
        static_file = os.path.join(str(tmpdir), 'test.sass')

        with open(static_file, 'w', encoding='utf-8') as f:
            f.write('''$blue: #3bbfce
$margin: 16px

.content-navigation
  border-color: $blue
  color: darken($blue, 9%)

.border
  padding: $margin / 2
  margin: $margin / 2
  border-color: $blue''')

        html = '<link type="text/sass" rel="stylesheet" src="test.sass" />'
        expected_hash = hashlib.md5(utf8_encode(html))
        with open(static_file) as f:
            expected_hash.update(utf8_encode(f.read()))

        assert ext._compress_block(
            'css', mock.Mock(return_value=html)
        ) == '<link type="text/css" rel="stylesheet" href="/static/{}.css" />'.format(
            expected_hash.hexdigest())
    def test_compile_file(self, tmpdir):
        from jac import CompressorExtension
        ext = CompressorExtension(mock.Mock(compressor_output_dir=tmpdir, compressor_static_prefix='/static',
                                            compressor_source_dirs=[str(tmpdir)]))
        static_file = os.path.join(str(tmpdir), 'test.sass')

        with open(static_file, 'w', encoding='utf-8') as f:
            f.write('''$blue: #3bbfce
$margin: 16px

.content-navigation
  border-color: $blue
  color: darken($blue, 9%)

.border
  padding: $margin / 2
  margin: $margin / 2
  border-color: $blue''')

        html = '<link type="text/sass" rel="stylesheet" src="test.sass">'
        expected_hash = hashlib.md5(utf8_encode(html))
        with open(static_file) as f:
            expected_hash.update(utf8_encode(f.read()))

        assert ext._compress_block('css', mock.Mock(return_value=html)) == \
            '<link type="text/css" rel="stylesheet" href="/static/{}.css">'.format(expected_hash.hexdigest())
    def test_offline_compress(self, env):
        from jac import Compressor

        tmpdir = str(env.compressor_output_dir)

        env.compressor_offline_compress = True
        env.compressor_source_dirs = [os.path.join(tmpdir, 'static')]
        env.compressor_output_dir = os.path.join(tmpdir, 'dist')

        compressor = Compressor(environment=env)
        css = '<link type="text/css" rel="stylesheet" href="/static/test.css">'

        os.makedirs(os.path.join(tmpdir, 'templates'))
        with open(os.path.join(tmpdir, 'templates', 'test.html'), 'w') as fh:
            fh.write('<html>{% compress "css" %}' + css + '{% endcompress %}</html>')

        os.makedirs(os.path.join(tmpdir, 'static'))
        with open(os.path.join(tmpdir, 'static', 'test.css'), 'w') as fh:
            fh.write('html { display: block; }')

        compressor.offline_compress(env, [os.path.join(tmpdir, 'templates')])

        assert os.path.exists(env.compressor_output_dir) is True
    def test_offline_compress(self, env):
        from jac import Compressor

        tmpdir = str(env.compressor_output_dir)

        env.compressor_offline_compress = True
        env.compressor_source_dirs = [os.path.join(tmpdir, 'static')]
        env.compressor_output_dir = os.path.join(tmpdir, 'dist')

        compressor = Compressor(environment=env)
        css = '<link type="text/css" rel="stylesheet" href="/static/test.css" />'

        os.makedirs(os.path.join(tmpdir, 'templates'))
        with open(os.path.join(tmpdir, 'templates', 'test.html'), 'w') as fh:
            fh.write('<html>{% compress "css" %}' + css +
                     '{% endcompress %}</html>')

        os.makedirs(os.path.join(tmpdir, 'static'))
        with open(os.path.join(tmpdir, 'static', 'test.css'), 'w') as fh:
            fh.write('html { display: block; }')

        compressor.offline_compress(env, [os.path.join(tmpdir, 'templates')])

        assert os.path.exists(env.compressor_output_dir) == True
Exemple #5
0
    def make_hash(self, html):
        soup = BeautifulSoup(html, PARSER)
        compilables = self.find_compilable_tags(soup)
        html_hash = hashlib.md5(utf8_encode(html))

        for c in compilables:
            url = c.get('src') or c.get('href')
            if url:
                with open(self.find_file(url), 'r', encoding='utf-8') as f:
                    while True:
                        content = f.read(1024)
                        if content:
                            html_hash.update(utf8_encode(content))
                        else:
                            break

        return html_hash.hexdigest()
Exemple #6
0
    def make_hash(self, html):
        soup = BeautifulSoup(html)
        compilables = self.find_compilable_tags(soup)
        html_hash = hashlib.md5(utf8_encode(html))

        for c in compilables:
            url = c.get('src') or c.get('href')
            if url:
                with open(self.find_file(url), 'r', encoding='utf-8') as f:
                    while True:
                        content = f.read(1024)
                        if content:
                            html_hash.update(utf8_encode(content))
                        else:
                            break

        return html_hash.hexdigest()
    def make_hash(self, html):
        if self.config.compressor_offline_compress and html in self.offline_hash_cache:
            return self.offline_hash_cache[html]

        soup = BeautifulSoup(html, PARSER)
        compilables = self.find_compilable_tags(soup)
        html_hash = hashlib.md5(utf8_encode(html))

        for c in compilables:
            url = c.get('src') or c.get('href')
            if url:
                with open(self.find_file(url), 'r', encoding='utf-8') as f:
                    while True:
                        content = f.read(1024)
                        if content:
                            html_hash.update(utf8_encode(content))
                        else:
                            break

        digest = html_hash.hexdigest()
        if self.config.compressor_offline_compress:
            self.offline_hash_cache[html] = digest
        return digest
Exemple #8
0
    def compress(self, html, compression_type):

        if not self.config.compressor_enabled:
            return html

        compression_type = compression_type.lower()
        html_hash = self.make_hash(html)

        if not os.path.exists(u(self.config.compressor_output_dir)):
            os.makedirs(u(self.config.compressor_output_dir))

        cached_file = os.path.join(
            u(self.config.compressor_output_dir),
            u('{hash}.{extension}').format(
                hash=html_hash,
                extension=compression_type,
            ),
        )

        if os.path.exists(cached_file):
            filename = os.path.join(
                u(self.config.compressor_static_prefix),
                os.path.basename(cached_file),
            )
            return self.render_element(filename, compression_type)

        assets = OrderedDict()
        soup = BeautifulSoup(html, PARSER)
        for count, c in enumerate(self.find_compilable_tags(soup)):

            url = c.get('src') or c.get('href')
            if url:
                filename = os.path.basename(u(url)).split('.', 1)[0]
                uri_cwd = os.path.join(u(self.config.compressor_static_prefix), os.path.dirname(u(url)))
                text = open(self.find_file(u(url)), 'r', encoding='utf-8')
                cwd = os.path.dirname(text.name)
            else:
                filename = u('inline{0}').format(count)
                uri_cwd = None
                text = c.string
                cwd = None

            mimetype = c['type'].lower()
            try:
                compressor = self.config.compressor_classes[mimetype]
            except KeyError:
                msg = u('Unsupported type of compression {0}').format(mimetype)
                raise RuntimeError(msg)

            text = self.get_contents(text)
            compressed = compressor.compile(text,
                                            mimetype=mimetype,
                                            cwd=cwd,
                                            uri_cwd=uri_cwd,
                                            debug=self.config.compressor_debug)

            if not self.config.compressor_debug:
                outfile = cached_file
            else:
                outfile = os.path.join(
                    u(self.config.compressor_output_dir),
                    u('{hash}-{filename}.{extension}').format(
                        hash=html_hash,
                        filename=filename,
                        extension=compression_type,
                    ),
                )

            if assets.get(outfile) is None:
                assets[outfile] = u('')
            assets[outfile] += u("\n") + compressed

        blocks = u('')
        for outfile, asset in assets.items():
            with open(outfile, 'w', encoding='utf-8') as fh:
                fh.write(asset)
            filename = os.path.join(
                u(self.config.compressor_static_prefix),
                os.path.basename(outfile),
            )
            blocks += self.render_element(filename, compression_type)

        return blocks
Exemple #9
0
    def compress(self, html, compression_type):

        if not self.config.compressor_enabled:
            return html

        compression_type = compression_type.lower()
        html_hash = self.make_hash(html)

        if not os.path.exists(u(self.config.compressor_output_dir)):
            os.makedirs(u(self.config.compressor_output_dir))

        cached_file = os.path.join(
            u(self.config.compressor_output_dir),
            u('{hash}.{extension}').format(
                hash=html_hash,
                extension=compression_type,
            ),
        )

        if os.path.exists(cached_file):
            filename = os.path.join(
                u(self.config.compressor_static_prefix),
                os.path.basename(cached_file),
            )
            return self.render_element(filename, compression_type)

        assets = OrderedDict()
        soup = BeautifulSoup(html)
        for count, c in enumerate(self.find_compilable_tags(soup)):

            url = c.get('src') or c.get('href')
            if url:
                filename = os.path.basename(u(url)).split('.', 1)[0]
                uri_cwd = os.path.join(u(self.config.compressor_static_prefix),
                                       os.path.dirname(u(url)))
                text = open(self.find_file(u(url)), 'r', encoding='utf-8')
                cwd = os.path.dirname(text.name)
            else:
                filename = u('inline{0}').format(count)
                uri_cwd = None
                text = c.string
                cwd = None

            mimetype = c['type'].lower()
            try:
                compressor = self.config.compressor_classes[mimetype]
            except KeyError:
                msg = u('Unsupported type of compression {0}').format(mimetype)
                raise RuntimeError(msg)

            text = self.get_contents(text)
            compressed = compressor.compile(text,
                                            mimetype=mimetype,
                                            cwd=cwd,
                                            uri_cwd=uri_cwd,
                                            debug=self.config.compressor_debug)

            if not self.config.compressor_debug:
                outfile = cached_file
            else:
                outfile = os.path.join(
                    u(self.config.compressor_output_dir),
                    u('{hash}-{filename}.{extension}').format(
                        hash=html_hash,
                        filename=filename,
                        extension=compression_type,
                    ),
                )

            if assets.get(outfile) is None:
                assets[outfile] = u('')
            assets[outfile] += u("\n") + compressed

        blocks = u('')
        for outfile, asset in assets.items():
            with open(outfile, 'w', encoding='utf-8') as fh:
                fh.write(asset)
            filename = os.path.join(
                u(self.config.compressor_static_prefix),
                os.path.basename(outfile),
            )
            blocks += self.render_element(filename, compression_type)

        return blocks