示例#1
0
 def render_html(self, data):
     template = self.templateEnv.get_template(data["file"])
     if "page_data" not in data:
         data["page_data"] = dict()
     html = minify_html.minify(template.render(data["page_data"]),
                               minify_js=True)
     return html
示例#2
0
    def render(
        self,
        template: str,
        content_file: Optional[pathlib.Path] = None,
        render_args: Dict[str, Any] = {},
        outfile: Optional[pathlib.Path] = None,
    ) -> None:
        args = self._args.copy()
        args.update(render_args)

        if content_file:
            self._render_args_from_rst(content_file, args)
            if not outfile:
                outfile = content_file.relative_to(
                    self._content_root).with_suffix('.html')
        if not outfile:
            raise ValueError(
                'Neither `content_file` not `outfile` were supplied')
        outfile = self._outdir.joinpath(outfile)
        root = pathlib.Path(os.path.relpath(self._outdir, outfile.parent))
        static = root / 'static'
        args['root'] = root
        args['css'] = static / 'css'
        args['img'] = static / 'img'
        args['js'] = static / 'js'

        try:
            html = self._templates.get_template(template).render(**args)
        except Exception as e:
            html = mako.exceptions.html_error_template().render().decode()
            raise e
        finally:
            self._write(outfile,
                        minify_html.minify(html) if self._minify else html)
示例#3
0
def html_validator(html: str) -> str:
    html = stripBr(html)
    # validate invalid HTML first, since minify_html is not so robust
    html = BeautifulSoup(html, 'lxml').decode()
    html = minify(html,
                  do_not_minify_doctype=True,
                  keep_closing_tags=True,
                  keep_spaces_between_attributes=True,
                  ensure_spec_compliant_unquoted_attribute_values=True,
                  remove_processing_instructions=True)
    html = replaceInvalidSpace(html)
    return html
示例#4
0
def minify_html(file_name):
    with open(file_name, "r") as in_file:
        content = in_file.read()
    print(file_name, "size:", len(content))
    minified_content = minify_html_module.minify(content,
                                                 minify_js=False,
                                                 minify_css=True)
    print(file_name, "new size:", len(minified_content))
    print(file_name, "gain:",
          str((len(minified_content) / len(content) * 100).__round__(2)) + "%")
    with open(file_name, "w") as out_file:
        out_file.write(minified_content)
    """url = "https://htmlcompressor.com/compress"
    def process_response(self, request, response):

        s = response.content.decode("utf-8")
        try:
            s = minify_html.minify(s)
        except:
            s = htmlmin.minify(s, remove_comments=True, remove_all_empty_space=True)

        response.content = s
        response["Content-Length"] = len(response.content)

        duration = time.time() - self.ts
        response["X-Duration"] = duration*1000
        return response
def main():
    # where is the server
    port = 62435
    handler = http.server.SimpleHTTPRequestHandler
    server = socketserver.TCPServer(("", port), handler)
    thread = threading.Thread(target=server.serve_forever)
    thread.daemon = True  # so the server dies when the program exits
    thread.start()
    local_page = f'http://localhost:{port}/dynamic_page.html'

    html = load_page_firefox(local_page)
    server.shutdown()  # kill the server since we are done with it
    soup = bs(html, "html.parser")
    update_page(soup)
    try:
        minified = minify_html.minify(str(soup),
                                      minify_js=False,
                                      minify_css=False)
        with open('index.html', 'w', encoding='utf-8') as f:
            f.write(f"<!DOCTYPE html>{minified}")
    except SyntaxError as e:
        print(e)
def build_dist_html(input_html,
                    output_html,
                    data_json_file=None,
                    compress=False):
    """
    Creates a single distributable HTML file.
    Reads the input_html and internalizes all CSS, JS, and data files into the output html. For web ressources: First
    try to load a local file, else try to download file.
    :param input_html: the input html file that defines all dependencies
    :param output_html: the bundled HTML file
    :return: None
    """
    original_html_text = Path(input_html).read_text(encoding="utf-8")
    soup = BeautifulSoup(original_html_text)

    # Find link tags. example: <link rel="stylesheet" href="css/somestyle.css">
    for tag in soup.find_all('link', href=True):
        if tag.has_attr('href'):
            file_text = Path(tag['href']).read_text(encoding="utf-8")

            # remove the tag from soup
            tag.extract()

            # insert style element
            new_style = soup.new_tag('style')
            new_style.string = file_text
            soup.html.head.append(new_style)

    # Find script tags. example: <script src="js/somescript.js"></script>
    for tag in soup.find_all('script', src=True):
        if tag.has_attr('src'):
            path = tag['src']
            path = replace_by_local_file(path)
            if path.startswith("http"):
                response = requests.get(path)
                response.raise_for_status()
                file_text = response.text
            else:
                file_text = Path(path).read_text()

            # try to replace data with PLACEHOLDER_JSON_DATA
            if data_json_file is not None and "collapsible_tree" in path:
                file_text = replace_data_in_file(data_json_file, file_text)

            # remove the tag from soup
            tag.extract()

            # insert script element
            new_script = soup.new_tag('script')
            new_script.string = file_text
            soup.html.body.append(new_script)

    # Find image tags.
    for tag in soup.find_all('img', src=True):
        if tag.has_attr('src'):
            file_content = Path(tag['src']).read_bytes()

            # replace filename with base64 of the content of the file
            base64_file_content = base64.b64encode(file_content)
            tag['src'] = "data:image/png;base64, {}".format(
                base64_file_content.decode('ascii'))

    out_text = str(soup)

    if compress:
        try:
            import minify_html
            out_text = minify_html.minify(out_text,
                                          minify_js=True,
                                          minify_css=True)

        except Exception as e:
            logger.warning("Error during output compression.")
            logger.exception(e)

    # Save onefile
    with open(output_html, "w", encoding="utf-8") as outfile:
        outfile.write(out_text)
示例#8
0
        for c in i["value"]:
            span_error += "<span>" + c + "</span>"
        new_content = new_content.replace("$ERROR_SPAN", span_error)
        if error_code in custom_text:
            name = custom_text[error_code][0]
            description = custom_text[error_code][1]
        else:
            name = i["description"]
            description = "" if disable_generic_description else i["details"][
                0]["description"]
        new_content = new_content.replace("$ERROR_NAME", name)
        new_content = new_content.replace("$ERROR_DESC", description)
        with open(i["value"] + ".html", "w") as output_file:
            try:
                minified = minify_html.minify(new_content,
                                              minify_js=False,
                                              minify_css=True)
            except SyntaxError as e:
                print(e)
            output_file.write(minified)

with open("snippets/error_pages_content.conf", "w") as epc:
    for i in json["values"]:
        v = int(i["value"])
        if v < 400 or v > 599:
            continue
        print("error_page %d /error/%d.html;" % (v, v), file=epc)
    print("error_page 495 http://$host;", file=epc)
    print("error_page 496 http://$host;", file=epc)
    print("error_page 497 https://$host$request_uri;", file=epc)
示例#9
0
after `pip install`ing [`Pygments`](https://pygments.org/).
"""

import sys

import markdown as md
import minify_html as mh

import astdocs

headers = """
<!DOCTYPE html>
<html>
<head>
  <link rel="stylesheet" href="styles-body.css">
  <link rel="stylesheet" href="styles-code.css">
</head>
<body>
"""

footer = """
</body>
</html>
"""

mdwn = astdocs.render(sys.argv[1])
html = md.markdown(mdwn, extensions=["codehilite", "fenced_code", "toc"])
mini = mh.minify(f"{headers}{html}{footer}")

print(mini)