def __init__(self, logdir, style):
        self.EBL_PARSER = Lark.open(
            "../../transliteration/domain/ebl_atf.lark",
            maybe_placeholders=True,
            rel_to=__file__,
        )
        self.ORACC_PARSER = Lark.open("lark-oracc/oracc_atf.lark",
                                      maybe_placeholders=True,
                                      rel_to=__file__)

        self.logger = logging.getLogger("Atf-Preprocessor")
        self.logger.setLevel(10)
        self.skip_next_lem_line = False
        self.unparseable_lines = []
        self.unused_lines = [
            "oracc_atf_at_line__object_with_status",
            "oracc_atf_at_line__surface_with_status",
            "oracc_atf_at_line__discourse",
            "oracc_atf_at_line__column",
            "dollar_line",
            "note_line",
            "control_line",
            "empty_line",
            "translation_line",
        ]
        self.stop_preprocessing = False
        self.logdir = logdir
        self.style = style
Exemplo n.º 2
0
 def zeek_parser(isEarley):
     kwargs = dict(propagate_positions=True, rel_to=__file__, start='zeek')
     if isEarley:
         return Lark.open('zeek-earley.lark', parser='earley', **kwargs)
     else:
         logging.basicConfig(level=logging.DEBUG)
         return Lark.open('zeek-lalr.lark',
                          parser='lalr',
                          **kwargs,
                          debug=False)
Exemplo n.º 3
0
def load_file(children, filename):
    global root_folder
    global global_context
    global file_analyzing

    os.chdir(__path__)

    kwargs = dict(rel_to=__real_file__,
                  postlex=GDScriptIndenter(),
                  start='file_input')
    gd_parser = Lark.open('gd.lark', parser='lalr', **kwargs)
    computed_filename = filename.replace('res:/', root_folder,
                                         1).replace('\'', '', 2)

    if file_analyzing != computed_filename:
        input_text = _read(computed_filename) + '\n'

        try:
            parsed_file = gd_parser.parse(input_text)

            analyze_tree(parsed_file, global_context, False)
            # print(json.dumps(global_context, sort_keys = True, indent = 2))
            # check_context()
        except UnexpectedInput as error:
            _output_message('fatal', error, error.get_context(input_text))

    else:
        _output_message('error', children, 'Cyclic dependencies')
Exemplo n.º 4
0
def get_lark_grammar():
    return Lark.open(
        'tests/grammar/vyper.lark',
        parser='lalr',
        start='module',
        postlex=PythonIndenter()
    )
Exemplo n.º 5
0
    def _make_grammar_tools(
        self, product_type: str
    ) -> Tuple[Lark, Grammar, Reconstructor, TokenMatcher]:
        """
        instantiate an instance of the grammar parser, the "Grammar" analyser tool, and the reconstructor
        """
        # get grammar analyser
        path = os.path.join(
            self.grammar_path,
            f"{self.asset_class}{PATH_DELIMITER}{product_type}{EXT}")
        grammar = Lark.open(path)

        # make analyser
        analyser = Grammar(grammar.rules)
        expanded_rules = map(analyser.discard_terminals,
                             analyser.expand_inline_rules())
        analyser = Grammar(expanded_rules)

        # make reconstructor
        reconstructor = Reconstructor(grammar)

        # make token matcher
        token_matcher = TokenMatcher(grammar.terminals)

        return grammar, analyser, reconstructor, token_matcher
Exemplo n.º 6
0
def main():
    """Compiles program.scratch to a scratch project."""
    parser = Lark.open("grammar.lark",
                       parser="lalr",
                       transformer=ScratchTransformer,
                       postlex=ScratchIndenter())

    with open("program.scratch") as source_file:
        source_code = source_file.read()
    parsed = parser.parse(source_code)
    # print(parsed)
    parsed = optimize(parsed)
    parsed = scratchify(parsed)

    backdrop_md5 = md5sum("resources/backdrop.svg")

    for i in parsed["targets"]:
        i["costumes"] = [{
            "assetId": backdrop_md5,
            "name": "backdrop",
            "md5ext": f"{backdrop_md5}.svg",
            "dataFormat": "svg",
            "rotationCenterX": 240,
            "rotationCenterY": 180
        }]

    try:
        with open("parsed.json", "w") as parsed_json_file:
            json.dump(parsed, parsed_json_file, indent="\t")
    except ValueError:
        print(parsed)

    create_project_files(parsed)
Exemplo n.º 7
0
    def _get_parser(
        self,
        name: str,
        add_metadata: bool = False,
        grammar_filename: str = "gdscript.lark",
    ) -> Tree:
        version: str = pkg_resources.get_distribution("gdtoolkit").version

        tree: Tree = None
        cache_filepath: str = (
            os.path.join(self._cache_dirpath, version, name) + ".pickle"
        )
        grammar_filepath: str = os.path.join(self._directory, grammar_filename)
        if not os.path.exists(cache_filepath) or not self._use_grammar_cache:
            tree = Lark.open(
                grammar_filepath,
                parser="lalr",
                start="start",
                postlex=Indenter(),
                propagate_positions=add_metadata,
                maybe_placeholders=False,
            )
            self.save(tree, cache_filepath)
        else:
            tree = self.load(cache_filepath)
        return tree
Exemplo n.º 8
0
 def _parser(self) -> Tree:
     return Lark.open(
         os.path.join(self._directory, "gdscript.lark"),
         postlex=Indenter(),
         parser="lalr",
         start="start",
     )
Exemplo n.º 9
0
def dephrase(**kwargs) -> Set[str]:
    grammar_name = "phrases_extended" if kwargs["aggressive"] else "phrases"
    g = get_grammar(grammar_name)
    parser = Lark.open(g,
                       parser="earley",
                       ambiguity="explicit",
                       lexer='dynamic_complete')
    trans = PhraseFlattener()
    if kwargs["aggressive"]:
        trans = StripNameSpace("phrases") * trans
    parses: Set[str] = set()
    try:
        tree = parser.parse(kwargs["phrase"].upper())
    except UnexpectedInput:
        print("exception")
        return parses

    trees = CollapseAmbiguities().transform(tree)
    for t in trees:
        try:
            tokens = (token.type for token in trans.transform(t))
        except VisitError as e:
            if isinstance(e.orig_exc, NoWordFound):
                continue
            raise e
        else:
            parses.add(" ".join(tokens))
    return parses
Exemplo n.º 10
0
def get_lark_grammar():
    return Lark.open(
        PARENT_DIR.joinpath("vyper.lark"),
        parser="lalr",
        start="module",
        postlex=PythonIndenter(),
    )
Exemplo n.º 11
0
    def preview(self):
        import re
        # Open preview dialog
        #preview = QtGui.QPrintPreviewDialog()

        #print(str(self.text.toPlainText()))
        kwargs = dict(rel_to=__file__,
                      postlex=PythonIndenter(),
                      start='file_input')
        python_parser3 = Lark.open('updated_python3.lark',
                                   parser='lalr',
                                   **kwargs)
        tree = python_parser3.parse(str(self.text.toPlainText()) + '\n')
        f = open('temp.xml', 'w')
        f.write(str(tree))
        f.flush()
        f.close()
        parent_file_path = 'temp.xml'
        parent_tree = ET.parse(parent_file_path)
        parent = parent_tree.getroot()
        xmlstr = xml.dom.minidom.parseString(ET.tostring(parent)).toprettyxml()
        print(xmlstr)
        xmlstr2 = str(xmlstr)
        print(re.sub('\n+', '\n', xmlstr2))
        print(QString(str(tree)))
        xml1 = QString(str(xmlstr))
        tmp = Main(p2=0)
        tmp.text.append(xml1)
        tmp.show()
Exemplo n.º 12
0
def parser(_singleton=[]):
    '''Returns a Lark parser using the grammar in sfz.lark'''
    if not _singleton:
        _singleton.append(Lark.open('sfz.lark', rel_to=__file__,
                                    parser='lalr'))

    return _singleton[0]
Exemplo n.º 13
0
 def _parser_with_metadata(self) -> Tree:
     return Lark.open(
         os.path.join(self._directory, "gdscript.lark"),
         postlex=Indenter(),
         parser="lalr",
         start="start",
         propagate_positions=True,
     )
Exemplo n.º 14
0
 def __init__(self):
     """
     Lark parser, activate!
     """
     self.imp_parser = Lark.open(os.path.join(os.path.dirname(__file__),
                                              'imp.lark'),
                                 parser='earley',
                                 lexer='standard')
 def __init__(self, tcex=None):
     self.variables = {}
     self.evaluator = Evaluate(self, self.redis_fetch)
     self.parser = Lark.open(
         'grammar.lark', parser='lalr', start='start', transformer=self.evaluator
     )
     self.tcex = tcex
     self.cache = {}
Exemplo n.º 16
0
def parse_fragment(fragment: str,
                   lark_file: str = "simai_fragment.lark") -> List[dict]:
    parser = Lark.open(lark_file, rel_to=__file__, parser="lalr")
    try:
        return FragmentTransformer().transform(parser.parse(fragment))
    except Exception:
        print(f"Error parsing {fragment}")
        raise
Exemplo n.º 17
0
def open_parser(start: str = "start") -> Lark:
    return Lark.open(GRAMMAR_FILENAME,
                     rel_to=__file__,
                     parser="lalr",
                     start=start,
                     debug=True,
                     postlex=PostlexChain(WhitespacePostlex(),
                                          NumberLiteralPostlex()))
Exemplo n.º 18
0
 def __init__(self, grammar_filename):
     self.lark_parser = Lark.open(
         grammar_filename=grammar_filename,
         parser="lalr",
         propagate_positions=False,
         maybe_placeholders=False,
         transformer=ParsingTransformer(),
     )
Exemplo n.º 19
0
class State:
    global_ctx = Context(
        {o: getattr(np, o)
         for o in np.__all__ if not isclass(getattr(np, o))})
    global_ctx.update({
        o[0]: getattr(pyplot, o[0])
        for o in getmembers(pyplot) if not isclass(getattr(pyplot, o[0]))
    })
    global_ctx.update({

        # numpy matrix
        "rank": np.linalg.matrix_rank,
        "det": np.linalg.det,
        "reshape": lambda mat, *args: np.reshape(mat, args),
        "vectorize": np.vectorize,
        "map": lambda f, arr: np.vectorize(f)(arr),

        # numpy complex
        'j': np.complex(0, 1),
        'complex': np.complex,

        # plotting
        'show': pyplot.gcf(),

        # misc
        'numpy': np,
        'true': True,
        'false': False,
        'null': None,
        "version": "0.5.0",
    })
    rules = Lark.open("newlang/grammar.lark", parser='lalr')
    transformer = CalculateTree()

    def __init__(self, ctx=None, global_ctx=None):
        ctx = ctx or Context()

        if not global_ctx:
            self.ctx = ctx.with_parent(self.global_ctx)
        else:
            self.ctx = ctx.with_parent(global_ctx)

    def parse(self, line):
        self.transformer.set_ctx(self.ctx)

        try:
            tree = self.rules.parse(line)
            tree = self.transformer.transform(tree)

            val = tree.get(self.ctx)

            return val, None
        except VisitError as e:
            return None, e.orig_exc
        except UnexpectedInput as e:
            return None, e.get_context(line)
        except Exception as e:
            return None, e
Exemplo n.º 20
0
 def _comment_parser(self) -> Tree:
     return Lark.open(
         os.path.join(self._directory, "comments.lark"),
         postlex=Indenter(),
         parser="lalr",
         start="start",
         propagate_positions=True,
         maybe_placeholders=False,
     )
Exemplo n.º 21
0
def parse(code):
    grammar = Grammar('python', 'file_input')
    parser = Lark.open(grammar.path,
                       rel_to=__file__,
                       start=grammar.start,
                       parser='lalr',
                       postlex=PythonIndenter())
    tree = parser.parse(code + '\n')
    return Treeify().transform(tree)
Exemplo n.º 22
0
def drive(playlist):
    parser = Lark.open("lang.lark", parser='lalr')

    with open(playlist) as f:
        tree = parser.parse(f.read())
        # print(tree)
        # print("="*16)
        for t in tree.children:
            do(t)
Exemplo n.º 23
0
    def __init__(self, debug=False):

        self.debug = debug
        self.parser = Lark.open("fate.lark", __file__,
            start=["test_start", "dice_start"],
            parser="lalr",
            maybe_placeholders=True,
            transformer=Processor()
        )
def CreateParser():
    """
    Generates a parser that will process a ".matter" file into a IDL
    """
    return Lark.open('matter_grammar.lark',
                     rel_to=__file__,
                     start='idl',
                     parser='lalr',
                     transformer=MatterIdlTransformer())
Exemplo n.º 25
0
def CreateParser(file_name: str):
    """
    Generates a parser that will process a ".matter" file into a IDL
    """
    return Parser(Lark.open('lint_rules_grammar.lark',
                            rel_to=__file__,
                            parser='lalr',
                            propagate_positions=True),
                  file_name=file_name)
Exemplo n.º 26
0
    def parse(self, text: str, **kwargs: Any) -> Tree:
        from lark import Lark

        if self._lark is None:
            self._lark = Lark.open(
                grammar_filename=self._grammar, parser=self._parser, debug=self._debug
            )

        return self._lark.parse(text=text, **kwargs)
Exemplo n.º 27
0
def CreateParser(skip_meta: bool = False):
    """
    Generates a parser that will process a ".matter" file into a IDL
    """
    return ParserWithLines(
        Lark.open('matter_grammar.lark',
                  rel_to=__file__,
                  start='idl',
                  parser='lalr',
                  propagate_positions=True), skip_meta)
Exemplo n.º 28
0
    def __init__(self):
        """STIX2 Parser Initialization"""

        self.result = None
        grammar = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                               'stix2.lark')
        self.parser = Lark.open(grammar,
                                parser='lalr',
                                start='pattern',
                                transformer=Stix2IndicatorTransformer())
Exemplo n.º 29
0
    def _parsed_tree(filename):
        json_parser = Lark.open("grammar.lark")

        with open(filename) as fo:
            tree = json_parser.parse(fo.read())

        transformer = MyTransformer()
        transformed_tree = transformer.transform(tree)

        return transformed_tree
Exemplo n.º 30
0
def CreateParser(skip_meta: bool = False):
    """
    Generates a parser that will process a ".matter" file into a IDL
    """

    # NOTE: LALR parser is fast. While Earley could parse more ambigous grammars,
    #       earley is much slower:
    #    - 0.39s LALR parsing of all-clusters-app.matter
    #    - 2.26s Earley parsing of the same thing.
    # For this reason, every attempt should be made to make the grammar context free
    return ParserWithLines(Lark.open('matter_grammar.lark', rel_to=__file__, start='idl', parser='lalr', propagate_positions=True), skip_meta)