Ejemplo n.º 1
0
 def compile(self, source, dest, is_two_file=True, post=None, lang=None):
     """Compile the source file into HTML and save as dest."""
     makedirs(os.path.dirname(dest))
     if mw is None:
         req_missing(['smc.mw'],
                     'build this site (compile with MediaWiki)',
                     python=True)
     with io.open(dest, "w+", encoding="utf8") as out_file:
         with io.open(source, "r", encoding="utf8") as in_file:
             data = in_file.read()
         if not is_two_file:
             data = re.split('(\n\n|\r\n\r\n)', data, maxsplit=1)[-1]
         parser = mw.Parser(parseinfo=False, whitespace='', nameguard=False)
         ast = parser.parse(data,
                            'document',
                            semantics=mw.Semantics(parser))
         output = etree.tostring(ast, encoding='utf8').decode('utf8')
         output, shortcode_deps = self.site.apply_shortcodes(
             output,
             filename=source,
             with_dependencies=True,
             extra_context=dict(post=post))
         out_file.write(output)
     if post is None:
         if shortcode_deps:
             self.logger.error(
                 "Cannot save dependencies for post {0} (post unknown)",
                 source)
     else:
         post._depfile[dest] += shortcode_deps
Ejemplo n.º 2
0
    def compile_string(self,
                       data,
                       source_path=None,
                       is_two_file=True,
                       post=None,
                       lang=None):
        """Compile the source file into HTML strings (with shortcode support).

        Returns a tuple of at least two elements: HTML string [0] and shortcode dependencies [last].
        """
        if mw is None:
            req_missing(['smc.mw'],
                        'build this site (compile with MediaWiki)',
                        python=True)
        if not is_two_file:
            _, data = self.split_metadata(data, post, lang)
        new_data, shortcodes = sc.extract_shortcodes(data)
        parser = mw.Parser(parseinfo=False, whitespace='', nameguard=False)
        ast = parser.parse(new_data,
                           'document',
                           semantics=mw.Semantics(parser))
        output = etree.tostring(ast, encoding='utf8').decode('utf8')
        output, shortcode_deps = self.site.apply_shortcodes_uuid(
            output,
            shortcodes,
            filename=source_path,
            extra_context={'post': post})
        return output, shortcode_deps
Ejemplo n.º 3
0
def run_parser(text, filename=None, start=None, profile_data=None,
               trace=False, headings=None):
    if start is None:
        start = "document"
    parser = mw.Parser(parseinfo=False,  whitespace='', nameguard=False)
    ast = parser.parse(text, start, filename=filename,
                       semantics=mw.Semantics(parser, headings=headings), trace=trace,
                       nameguard=False, whitespace='')
    if sys.version < '3':
        text = etree.tostring(ast)
    else:
        text = etree.tostring(ast, encoding=str)
    return text
Ejemplo n.º 4
0
 def compile_html(self, source, dest, is_two_file=True):
     makedirs(os.path.dirname(dest))
     if mw is None:
         req_missing(['smc.mw'],
                     'build this site (compile with MediaWiki)',
                     python=True)
     with io.open(dest, "w+", encoding="utf8") as out_file:
         with io.open(source, "r", encoding="utf8") as in_file:
             data = in_file.read()
         if not is_two_file:
             data = re.split('(\n\n|\r\n\r\n)', data, maxsplit=1)[-1]
         parser = mw.Parser(parseinfo=False, whitespace='', nameguard=False)
         ast = parser.parse(data,
                            'document',
                            semantics=mw.Semantics(parser))
         output = etree.tostring(ast, encoding='utf8').decode('utf8')
         out_file.write(output)
Ejemplo n.º 5
0
    def parser(self, inp, profile_data=None):
        if type(inp) == tuple:
            inp, headings = inp
        else:
            headings = None
        parser = mw.Parser(parseinfo=False)
        semantics = TestSemantics(parser,
                                  headings=headings,
                                  settings=self._preprocessor.settings)
        ast = parser.parse(inp, "document", semantics=semantics, trace=False)
        body = ast[0]
        if body.text is not None:
            text = body.text
        else:
            text = ""

        # ast[0] is "body"
        for node in body.getchildren():
            # tostring adds tail
            text = text + etree.tostring(node).decode("utf-8")
        return text