def cli(ctx, pipeline): """Generates a workflow diagram corresponding to a Popper pipeline, in the .dot format. The string defining the graph is printed to stdout so it can be piped into other tools. For example, to generate a png file, one can make use of the graphviz CLI tools: popper workflow mypipe | dot -T png -o mypipe.png """ pipes = pu.read_config()['pipelines'] if pipeline not in pipes: pu.fail("Cannot find pipeline {} in .popper.yml".format(pipeline)) project_root = pu.get_project_root() abs_path = os.path.join(project_root, pipes[pipeline]['path']) transformer = ObtainDotGraph() parser = Lark(bash_grammar, parser='lalr', lexer='contextual', transformer=transformer) for stage in pipes[pipeline]['stages']: transformer.current_stage = stage stage_file = pu.get_filename(abs_path, stage) with open(stage_file, 'r') as f: s = f.read() parser.parse(s) cs = transformer.comment_stack cs = remove_redundant_if(cs) # print(cs) print('digraph pipeline {') curr_node = None prev_node = None node_id = 's{}'.format(0) curr_if_node = None if_created = False if_index = None for i, item in enumerate(cs): if item == '[wf]#stage#': prev_item = cs[i - 1] next_item = cs[i + 1] label = '"{' + '{} | {}'.format(next_item, prev_item) + '}"' curr_node = (next_item.replace('-', '_')).replace('.sh', ' ') # create the stage node print('{} [{}];'.format(curr_node, 'shape=record, label=' + label)) if prev_node: print('{} -> {};'.format(prev_node, curr_node)) prev_node = curr_node continue # initialize the if-node elif item == '[wf]#if#': if_created = False c = 'condition' if i > 1 and (not cs[i - 1].startswith('[wf]#') and '.sh' not in cs[i - 1]): c += ' : {}'.format(cs[i - 1]) if_index = i - 1 curr_if_node = node_id # inside if-elif-else construct elif (item == '[wf]#else#' or item == '[wf]#elif#' or item == '[wf]#fi#'): if not cs[i - 1].startswith('[wf]#'): if not if_created: if_created, node_id = create_if_node(node_id, c, prev_node) print('{} [shape=record, label="{}"];'.format( node_id, cs[i - 1])) print('{} -> {};'.format(curr_if_node, node_id)) node_id = increment(node_id) if item == '[wf]#fi': if_created = False continue # inside loop elif item == '[wf]#done': c = 'loop' if not cs[i - 1].startswith('[wf]#') and '.sh' not in cs[i - 1]: c += ' : {}'.format(cs[i - 1]) print('{} [shape=record,label="{}"];'.format(node_id, c)) print('{} -> {};'.format(prev_node, node_id)) node_id = increment(node_id) # is a comment outside any control structures elif not item.startswith('[wf]#') and '.sh' not in item: if i == len(cs) - 1 and not cs[i - 1] == '[wf]#stage#': print('{} [shape=record,label="{}"];'.format(node_id, item)) print('{} -> {};'.format(prev_node, node_id)) node_id = increment(node_id) elif i < len(cs) - 1: if (not cs[i + 1].startswith('[wf]#') and not cs[i - 1] == '[wf]#stage#'): print('{} [shape=record,label="{}"];'.format( node_id, item)) print('{} -> {};'.format(prev_node, node_id)) node_id = increment(node_id) print('}')
def parseLDF(file): """Parse LDF file Returns ---------- LDF object """ # json_parser = Lark(open("..\ucanlintools\ldf.lark"),parser="lalr") json_parser = Lark(''' start: ldf_container ?ldf_container: ldf_header [(ldf_nodes | ldf_signals | ldf_frames | ldf_diagnostic | ldf_diagnostic_frames | ldf_node_atributes | ldf_schedule_table | ldf_signal_encoding_types | ldf_signal_representation)*] ?ldf_nodes : "Nodes" "{" [ldf_node_master] [ldf_node_slaves] "}" ?ldf_signals : "Signals" "{" [ldf_signal (ldf_signal)*] "}" ?ldf_frames : "Frames" "{" [ldf_frame (ldf_frame)*] "}" ?ldf_diagnostic : "Diagnostic_signals" "{" [(ANY_SEMICOLON_TERMINATED_LINE)*] "}" ?ldf_diagnostic_frames : "Diagnostic_frames" "{" [(ldf_diagnostic_master_req | ldf_diagnostic_slave_resp)*] "}" ?ldf_diagnostic_master_req: "MasterReq:" [C_INT] "{" [(ANY_SEMICOLON_TERMINATED_LINE)*] "}" ?ldf_diagnostic_slave_resp: "SlaveResp:" [C_INT] "{" [(ANY_SEMICOLON_TERMINATED_LINE)*] "}" ?ldf_node_atributes: "Node_attributes" "{" ldf_node_atributes_node? "}" ?ldf_node_atributes_node: ANY_OPENED_BLOCK [ANY_SEMICOLON_TERMINATED_LINE*] ANY_OPENED_BLOCK [ANY_SEMICOLON_TERMINATED_LINE*] "}" "}" ?ldf_schedule_table: "Schedule_tables" "{" [ldf_schedule_table_node*] "}" ?ldf_schedule_table_node: ANY_OPENED_BLOCK [ANY_SEMICOLON_TERMINATED_LINE*] "}" ?ldf_signal_encoding_types: "Signal_encoding_types" "{" [ldf_signal_encoding_types_node*] "}" ?ldf_signal_encoding_types_node: ANY_OPENED_BLOCK [(ANY_SEMICOLON_TERMINATED_LINE*)] ["}"] [ANY_CLOSED_BLOCK] ?ldf_signal_representation: "Signal_representation" "{" [ldf_signal_representation_node*] "}" ?ldf_signal_representation_node: [(ANY_COLON_TERMINATED_LINE | ANY_SEMICOLON_TERMINATED_LINE)*] ?ldf_node_master: "Master:" ldf_node_name "," /.*;/ ?ldf_node_slaves: "Slaves:" ldf_node_name? ";" ldf_node : ldf_signal_name ":" ldf_node_name ";" ldf_signal : ldf_signal_name ":" ldf_signal_size "," ldf_signal_default_value "," ldf_node_name "," ldf_node_name ";" ldf_frame : ldf_frame_name ":" ldf_frame_id "," ldf_node_name "," ldf_frame_len ["{" (ldf_frame_signal)* "}"] ldf_frame_signal: ldf_signal_name "," ldf_signal_bit_offset ";" ldf_frame_name: CNAME ldf_frame_id : C_INT ldf_frame_len : C_INT ldf_signal_default_value: C_INITIALIZER_LIST|C_INT ldf_signal_size: C_INT ldf_signal_bit_offset: C_INT ldf_signal_name: CNAME ldf_node_name: CNAME ANY_OPENED_BLOCK: /.*{/ ANY_CLOSED_BLOCK: /.*}/ ANY_SEMICOLON_TERMINATED_LINE: /.*;/ ANY_COLON_TERMINATED_LINE: /.*,/ C_INITIALIZER_LIST: ("{"|"{ ") C_INT ([","|", "]C_INT)* ("}"|" }") C_INT: ("0x"HEXDIGIT+) | ("-"? HEXDIGIT+) ldf_header: (ldf_header_lin | ldf_header_channel)* ldf_header_lin : "LIN_"/.*;/ ldf_header_channel : "Channel_name"/.*;/ %import common._STRING_INNER %import common.HEXDIGIT %import common.INT %import common.WORD %import common.CNAME %import common.ESCAPED_STRING %import common.SIGNED_NUMBER %import common.WS %import common.WS_INLINE %ignore WS %ignore WS_INLINE ''',parser="lalr") f=open(file, "r").read() tree = json_parser.parse(f) # print(tree.pretty()) json_data = TreeToJson().transform(tree) ldf = {} for a in json_data: if (a != None): ldf.update(a) out_ldf = LDF(ldf['nodes'],ldf['frames'],ldf['signals']) return out_ldf
PMKS_parser = Lark( #Number ''' DIGIT: "0".."9" INT: DIGIT+ SIGNED_INT: ["+"|"-"] INT DECIMAL: INT "." INT? | "." INT _EXP: ("e"|"E") SIGNED_INT FLOAT: INT _EXP | DECIMAL _EXP? NUMBER: FLOAT | INT ''' #Letters ''' LCASE_LETTER: "a".."z" UCASE_LETTER: "A".."Z" LETTER: UCASE_LETTER | LCASE_LETTER CNAME: ("_"|LETTER) ("_"|LETTER|DIGIT)* ''' #White space ''' WS: /[ \\t\\f\\r\\n]/+ ''' ''' type: JOINTTYPE+ name: CNAME num : NUMBER -> number | "-" num -> neg joint : "J[" [type ("," angle)? ("," color)? "," point "," link] "]" link : "L[" [name ("," name)*] "]" point : "P[" [num "," num] "]" angle : "A[" num "]" color : "color[" COLOR+ "]" mechanism: "M[" [joint ("," joint)*] "]" JOINTTYPE: "RP" | "R" | "P" COLOR : ''' + _COLOR_LIST + ''' %ignore WS ''', start='mechanism')
In this example we use a dynamic lexer and let the Earley parser resolve the ambiguity. Another approach is to use the contextual lexer with LALR. It is less powerful than Earley, but it can handle some ambiguity when lexing and it's much faster. See examples/conf_lalr.py for an example of that approach. """ from lark import Lark parser = Lark(r""" start: _NL? section+ section: "[" NAME "]" _NL item+ item: NAME "=" VALUE? _NL NAME: /\w/+ VALUE: /./+ %import common.NEWLINE -> _NL %import common.WS_INLINE %ignore WS_INLINE """, parser="earley") def test(): sample_conf = """ [bla] a=Hello this="that",4 empty= """
def lark(self): """ Get the grammar and initialize Lark. """ return Lark(self.grammar(), parser=self.algo, postlex=self.indenter())
grammar = Lark( r""" ?start : bool ?bool : expr ">" expr -> maior | expr ">=" expr -> maiorigual | expr "<" expr -> menor | expr "<=" expr -> menorigual | expr "==" expr -> igual | expr "!=" expr -> diferente | expr | expr com -> exprcom | com ?com : COMMENT -> com ?expr : expr "+" term -> so | expr "-" term -> su | term ?term : term "*" pow -> mu | term "/" pow -> di | pow ?pow : atom "^" pow -> po | atom ?atom : NUMBER -> num | VARIABLE -> var | VARIABLE "(" expr ")" -> func | "(" expr ")" VARIABLE : /-?\w+/ NUMBER : /-?\d+(\.[\d|e|\+|-]+)?/ COMMENT : /#.+$/ %ignore /\s+/ """, parser="lalr", )
@dataclass class Int(R0Exp): val: int ################################################## # Concrete Syntax Parser ################################################## _r_var_parser = Lark(r""" ?exp: NUMBER -> int_e %import common.NUMBER %import common.CNAME %import common.WS %ignore WS """, start='exp') ################################################## # Pass #0: Parsing Concrete to Abstract Syntax ################################################## def _parse(s: str) -> R0Exp: def bast(e): if e.data == 'int_e': return Int(int(e.children[0])) else:
def test_hddl_parser(self): inputs_ = [ """ (define (domain barman_agent) (:requirements :negative-preconditions :hierarchy :typing :equality :method-preconditions ) (:types container dispenser level beverage hand - anything shot shaker - container ingredient cocktail - beverage ) (:predicates (clean ?p0 - container) (cocktailPart1 ?p0 - cocktail ?p1 - ingredient) (cocktailPart2 ?p0 - cocktail ?p1 - ingredient) (contains ?p0 - container ?p1 - beverage) (dispenses ?p0 - dispenser ?p1 - ingredient) (empty ?p0 - container) (handEmpty ?p0 - hand) (holding ?p0 - hand ?p1 - container) (ingredient ?p0 - ingredient) (next ?p0 - level ?p1 - level) (ontable ?p0 - container) (shaked ?p0 - shaker) (shakerEmptyLevel ?p0 - shaker ?p1 - level) (shakerLevel ?p0 - shaker ?p1 - level) (unshaked ?p0 - shaker) (used ?p0 - container ?p1 - beverage) ) (:task AchieveContainsShakerIngredient :parameters (?x_0 - shaker ?x_1 - ingredient)) (:task AchieveCleanShaker :parameters (?x_0 - shaker)) (:task AchieveHandEmpty :parameters (?x_0 - hand)) (:task AchieveContainsShotIngredient :parameters (?x_0 - shot ?x_1 - ingredient)) (:task AchieveContainsShakerCocktail :parameters (?x_0 - shaker ?x_1 - cocktail)) (:task DoPourShakerToShot :parameters (?x_0 - shaker ?x_1 - shot ?x_2 - cocktail)) (:task AchieveOnTable :parameters (?x_0 - container)) (:task AchieveHolding :parameters (?x_0 - hand ?x_1 - container)) (:task AchieveCleanShot :parameters (?x_0 - shot)) (:task AchieveContainsShotCocktail :parameters (?x_0 - shot ?x_1 - cocktail)) (:method MakeAndPourCocktail :parameters (?x_0 - shot ?x_1 - cocktail ?x_2 - shaker ?x_3 - hand) :task (AchieveContainsShotCocktail ?x_0 ?x_1) :precondition (and (not (contains ?x_0 ?x_1)) ) :ordered-subtasks (and (AchieveContainsShakerCocktail ?x_2 ?x_1 ) (AchieveCleanShot ?x_0) (AchieveHolding ?x_3 ?x_2) (DoPourShakerToShot ?x_2 ?x_0 ?x_1) ) ) (:method MakeAndPourCocktailNull :parameters (?x_0 - shot ?x_1 - cocktail) :task (AchieveContainsShotCocktail ?x_0 ?x_1) :precondition (and (contains ?x_0 ?x_1) ) ) (:method MakeCocktail :parameters (?x_0 - shaker ?x_1 - cocktail ?x_2 - ingredient ?x_3 - hand ?x_4 - hand ?x_5 - ingredient) :task (AchieveContainsShakerCocktail ?x_0 ?x_1) :precondition (and (cocktailPart1 ?x_1 ?x_5) (cocktailPart2 ?x_1 ?x_2) (not (= ?x_4 ?x_3)) ) :ordered-subtasks (and (AchieveCleanShaker ?x_0) (AchieveContainsShakerIngredient ?x_0 ?x_5) (AchieveContainsShakerIngredient ?x_0 ?x_2) (AchieveHolding ?x_4 ?x_0) (AchieveHandEmpty ?x_3) (shake ?x_1 ?x_5 ?x_2 ?x_0 ?x_4 ?x_3) ) ) (:method MakeCocktailNull :parameters (?x_0 - shaker ?x_1 - cocktail) :task (AchieveContainsShakerCocktail ?x_0 ?x_1) :precondition (and (contains ?x_0 ?x_1) ) ) (:method AddIngredientToEmptyShaker :parameters (?x_0 - shaker ?x_1 - ingredient ?x_2 - level ?x_3 - level ?x_4 - shot ?x_5 - hand) :task (AchieveContainsShakerIngredient ?x_0 ?x_1) :precondition (and (empty ?x_0) (clean ?x_0) (shakerLevel ?x_0 ?x_2) (next ?x_2 ?x_3) ) :ordered-subtasks (and (AchieveContainsShotIngredient ?x_4 ?x_1) (AchieveHolding ?x_5 ?x_4) (pour-shot-to-clean-shaker ?x_4 ?x_1 ?x_0 ?x_5 ?x_2 ?x_3) ) ) (:method AddIngredientToUsedShaker :parameters (?x_0 - shaker ?x_1 - ingredient ?x_2 - level ?x_3 - level ?x_4 - shot ?x_5 - hand) :task (AchieveContainsShakerIngredient ?x_0 ?x_1) :precondition (and (not (empty ?x_0)) (shakerLevel ?x_0 ?x_2) (next ?x_2 ?x_3) ) :ordered-subtasks (and (AchieveContainsShotIngredient ?x_4 ?x_1) (AchieveHolding ?x_5 ?x_4) (pour-shot-to-used-shaker ?x_4 ?x_1 ?x_0 ?x_5 ?x_2 ?x_3) ) ) (:method AddIngredientToShakerNull :parameters (?x_0 - shaker ?x_1 - ingredient) :task (AchieveContainsShakerIngredient ?x_0 ?x_1) :precondition (and (contains ?x_0 ?x_1) ) ) (:method AddIngredientToShot :parameters (?x_0 - shot ?x_1 - ingredient ?x_2 - dispenser ?x_3 - hand ?x_4 - hand) :task (AchieveContainsShotIngredient ?x_0 ?x_1) :precondition (and (not (contains ?x_0 ?x_1)) (dispenses ?x_2 ?x_1) (not (= ?x_4 ?x_3)) ) :ordered-subtasks (and (AchieveCleanShot ?x_0) (AchieveHolding ?x_4 ?x_0) (AchieveHandEmpty ?x_3) (fill-shot ?x_0 ?x_1 ?x_4 ?x_3 ?x_2) ) ) (:method AddIngredientToShotNull :parameters (?x_0 - shot ?x_1 - ingredient) :task (AchieveContainsShotIngredient ?x_0 ?x_1) :precondition (and (contains ?x_0 ?x_1) ) ) (:method CleanFullShot :parameters (?x_0 - shot ?x_1 - hand ?x_2 - beverage ?x_3 - hand) :task (AchieveCleanShot ?x_0 ) :precondition (and (contains ?x_0 ?x_2) (not (= ?x_3 ?x_1)) ) :ordered-subtasks (and (AchieveHolding ?x_3 ?x_0) (empty-shot ?x_3 ?x_0 ?x_2 ) (AchieveHandEmpty ?x_1) (clean-shot ?x_0 ?x_2 ?x_3 ?x_1) ) ) (:method CleanEmptyShot :parameters (?x_0 - shot ?x_1 - hand ?x_2 - beverage ?x_3 - hand) :task (AchieveCleanShot ?x_0) :precondition (and (empty ?x_0) (used ?x_0 ?x_2) (not (= ?x_3 ?x_1)) ) :ordered-subtasks (and (AchieveHolding ?x_3 ?x_0) (AchieveHandEmpty ?x_1) (clean-shot ?x_0 ?x_2 ?x_3 ?x_1) ) ) (:method CleanShotNull :parameters (?x_0 - shot) :task (AchieveCleanShot ?x_0) :precondition (and (clean ?x_0) ) ) (:method CleanEmptyShaker :parameters (?x_0 - shaker ?x_1 - hand ?x_2 - hand) :task (AchieveCleanShaker ?x_0) :precondition (and (not (clean ?x_0)) (empty ?x_0) (not (= ?x_2 ?x_1)) ) :ordered-subtasks (and (AchieveHolding ?x_2 ?x_0) (AchieveHandEmpty ?x_1) (clean-shaker ?x_0 ?x_2 ?x_1) ) ) (:method CleanFullShaker :parameters (?x_0 - shaker ?x_1 - level ?x_2 - cocktail ?x_3 - hand ?x_4 - hand ?x_5 - level ) :task (AchieveCleanShaker ?x_0) :precondition (and (contains ?x_0 ?x_2) (shaked ?x_0) (shakerEmptyLevel ?x_0 ?x_1) (shakerLevel ?x_0 ?x_5) (not (= ?x_4 ?x_3)) ) :ordered-subtasks (and (AchieveHolding ?x_4 ?x_0) (empty-shaker ?x_4 ?x_0 ?x_2 ?x_5 ?x_1) (AchieveHandEmpty ?x_3) (clean-shaker ?x_0 ?x_4 ?x_3) ) ) (:method CleanShakerNull :parameters (?x_0 - shaker) :task (AchieveCleanShaker ?x_0) :precondition (and (clean ?x_0) ) ) (:method PickUp :parameters (?x_0 - hand ?x_1 - container) :task (AchieveHolding ?x_0 ?x_1) :precondition (and (not (holding ?x_0 ?x_1)) ) :ordered-subtasks (and (AchieveHandEmpty ?x_0) (AchieveOnTable ?x_1) (grasp ?x_0 ?x_1) ) ) (:method HoldingNull :parameters (?x_0 - hand ?x_1 - container) :task (AchieveHolding ?x_0 ?x_1) :precondition (and (holding ?x_0 ?x_1) ) ) (:method EmptyHand :parameters (?x_0 - hand ?x_1 - container) :task (AchieveHandEmpty ?x_0) :precondition (and (holding ?x_0 ?x_1) ) :ordered-subtasks (and (drop ?x_0 ?x_1) ) ) (:method HandEmptyNull :parameters (?x_0 - hand ?x_1 - hand) :task (AchieveHandEmpty ?x_0) :precondition (and (handEmpty ?x_1) ) ) (:method PutDown :parameters (?x_0 - container ?x_1 - hand) :task (AchieveOnTable ?x_0) :precondition (and (holding ?x_1 ?x_0) ) :ordered-subtasks (and (drop ?x_1 ?x_0) ) ) (:method OnTableNull :parameters (?x_0 - container) :task (AchieveOnTable ?x_0) :precondition (and (ontable ?x_0) ) ) (:method pour_shaker_to_shot_action :parameters (?x_0 - shaker ?x_1 - shot ?x_2 - cocktail ?x_3 - level ?x_4 - hand ?x_5 - level) :task (DoPourShakerToShot ?x_0 ?x_1 ?x_2) :precondition (and (holding ?x_4 ?x_0) (shaked ?x_0) (empty ?x_1) (clean ?x_1) (contains ?x_0 ?x_2) (shakerLevel ?x_0 ?x_3) (next ?x_5 ?x_3) ) :ordered-subtasks (and (pour-shaker-to-shot ?x_2 ?x_1 ?x_4 ?x_0 ?x_3 ?x_5) ) ) (:action clean-shaker :parameters (?x_0 - shaker ?x_1 - hand ?x_2 - hand) :precondition (and (holding ?x_1 ?x_0) (empty ?x_0) (handEmpty ?x_2) ) :effect (and (clean ?x_0) ) ) (:action clean-shot :parameters (?x_0 - shot ?x_1 - beverage ?x_2 - hand ?x_3 - hand ) :precondition (and (holding ?x_2 ?x_0) (handEmpty ?x_3) (empty ?x_0) (used ?x_0 ?x_1) ) :effect (and (clean ?x_0) (not (used ?x_0 ?x_1)) ) ) (:action drop :parameters (?x_0 - hand ?x_1 - container) :precondition (and (holding ?x_0 ?x_1) ) :effect (and (ontable ?x_1) (handEmpty ?x_0) (not (holding ?x_0 ?x_1)) ) ) (:action empty-shaker :parameters (?x_0 - hand ?x_1 - shaker ?x_2 - cocktail ?x_3 - level ?x_4 - level) :precondition (and (holding ?x_0 ?x_1) (contains ?x_1 ?x_2) (shaked ?x_1) (shakerEmptyLevel ?x_1 ?x_4) (shakerLevel ?x_1 ?x_3) ) :effect (and (empty ?x_1) (unshaked ?x_1) (shakerLevel ?x_1 ?x_4) (not (contains ?x_1 ?x_2)) (not (shakerLevel ?x_1 ?x_3)) (not (shaked ?x_1)) ) ) (:action empty-shot :parameters (?x_0 - hand ?x_1 - shot ?x_2 - beverage) :precondition (and (holding ?x_0 ?x_1) (contains ?x_1 ?x_2) ) :effect (and (empty ?x_1) (not (contains ?x_1 ?x_2)) ) ) (:action fill-shot :parameters (?x_0 - shot ?x_1 - ingredient ?x_2 - hand ?x_3 - hand ?x_4 - dispenser) :precondition (and (holding ?x_2 ?x_0) (handEmpty ?x_3) (empty ?x_0) (clean ?x_0) (dispenses ?x_4 ?x_1) ) :effect (and (contains ?x_0 ?x_1) (used ?x_0 ?x_1) (not (clean ?x_0)) (not (empty ?x_0)) ) ) (:action grasp :parameters (?x_0 - hand ?x_1 - container) :precondition (and (ontable ?x_1) (handEmpty ?x_0) ) :effect (and (holding ?x_0 ?x_1) (not (handEmpty ?x_0)) (not (ontable ?x_1)) ) ) (:action pour-shaker-to-shot :parameters (?x_0 - cocktail ?x_1 - shot ?x_2 - hand ?x_3 - shaker ?x_4 - level ?x_5 - level) :precondition (and (holding ?x_2 ?x_3) (contains ?x_3 ?x_0) (shaked ?x_3) (clean ?x_1) (empty ?x_1) (shakerLevel ?x_3 ?x_4) (next ?x_5 ?x_4) ) :effect (and (contains ?x_1 ?x_0) (used ?x_1 ?x_0) (shakerLevel ?x_3 ?x_5) (not (clean ?x_1)) (not (empty ?x_1)) (not (shakerLevel ?x_3 ?x_4)) ) ) (:action pour-shot-to-clean-shaker :parameters (?x_0 - shot ?x_1 - ingredient ?x_2 - shaker ?x_3 - hand ?x_4 - level ?x_5 - level) :precondition (and (contains ?x_0 ?x_1) (empty ?x_2) (clean ?x_2) (holding ?x_3 ?x_0) (shakerLevel ?x_2 ?x_4) (next ?x_4 ?x_5) ) :effect (and (contains ?x_2 ?x_1) (shakerLevel ?x_2 ?x_5) (unshaked ?x_2) (empty ?x_0) (not (clean ?x_2)) (not (empty ?x_2)) (not (contains ?x_0 ?x_1)) (not (shakerLevel ?x_2 ?x_4)) ) ) (:action pour-shot-to-used-shaker :parameters (?x_0 - shot ?x_1 - ingredient ?x_2 - shaker ?x_3 - hand ?x_4 - level ?x_5 - level) :precondition (and (contains ?x_0 ?x_1) (unshaked ?x_2) (holding ?x_3 ?x_0) (shakerLevel ?x_2 ?x_4) (next ?x_4 ?x_5) ) :effect (and (contains ?x_2 ?x_1) (shakerLevel ?x_2 ?x_5) (empty ?x_0) (not (contains ?x_0 ?x_1)) (not (shakerLevel ?x_2 ?x_4)) ) ) (:action shake :parameters (?x_0 - cocktail ?x_1 - ingredient ?x_2 - ingredient ?x_3 - shaker ?x_4 - hand ?x_5 - hand) :precondition (and (handEmpty ?x_5) (holding ?x_4 ?x_3) (contains ?x_3 ?x_1) (contains ?x_3 ?x_2) (unshaked ?x_3) (cocktailPart1 ?x_0 ?x_1) (cocktailPart2 ?x_0 ?x_2) ) :effect (and (shaked ?x_3) (contains ?x_3 ?x_0) (not (unshaked ?x_3)) (not (contains ?x_3 ?x_1)) (not (contains ?x_3 ?x_2)) ) ) ) """, """ (define (problem p-1-2-2) (:domain barman_htn) (:objects left right - hand shaker1 - shaker shot1 shot2 - shot ingredient1 ingredient2 - ingredient dispenser1 dispenser2 - dispenser cocktail1 - cocktail level1 level2 level3 - level ) (:htn :parameters () :subtasks (and (AchieveContainsShotCocktail shot2 cocktail1) ) ) (:init (ontable shaker1) (ontable shot1) (ontable shot2) (clean shaker1) (clean shot1) (clean shot2) (empty shaker1) (empty shot1) (empty shot2) (dispenses dispenser1 ingredient1) (dispenses dispenser2 ingredient2) (handEmpty left) (handEmpty right) (shakerEmptyLevel shaker1 level1) (shakerLevel shaker1 level1) (next level1 level1) (next level2 level2) (cocktailPart1 cocktail1 ingredient2) (cocktailPart2 cocktail1 ingredient1) ) ) """, ] parser = Lark(hddl_grammar_str, start="hddl_file") for i, input_ in enumerate(inputs_): result = parser.parse(input_) print(result)
if exps[1] == '<': return exps[0] < exps[2] elif exps[1] == '>': return exps[0] > exps[2] elif exps[1] == '<=': return exps[0] <= exps[2] elif exps[1] == '>=': return exps[0] >= exps[2] s0 = 'age1 > 10 || age2 < 20 && age3 < 30 || aaa == "bbb"' s1 = "(age > 10 && age < 20)" s2 = 'nationality in ["Israel"]' if __name__ == "__main__": text = s0 if len(sys.argv) != 1: text = sys.argv[1] if len(text) == 0: exit(0) parser = Lark(grammar, parser='lalr') transformer = SNMTransformer() tree = parser.parse(text) print(transformer.transform(tree))
_OPENQASMPARSER = Lark( r""" ID: /[a-z][A-Za-z0-9_]*/ REAL: /([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([eE][-+]?[0-9]+)?/ NNINTEGER: /[1-9]+[0-9]*|0/ PI: "pi" SIN: "sin" COS: "cos" TAN: "tan" EXP: "EXP" LN: "ln" SQRT: "sqrt" mainprogram: "OPENQASM" REAL ";" program program: statement | program statement statement: decl | gatedecl goplist "}" | gatedecl "}" | "opaque" ID idlist ";" | "opaque" ID "( )" idlist ";" | "opaque" ID "(" idlist ")" idlist ";" | qop | "if (" ID "==" NNINTEGER ")" qop | "barrier" anylist ";" | "include" ESCAPED_STRING ";" | /\/\/+.*/ decl: qreg | creg creg: "creg" ID "[" NNINTEGER "]" ";" qreg: "qreg" ID "[" NNINTEGER "]" ";" gatedecl: "gate" ID idlist "{" | "gate" ID "( )" idlist "{" | "gate" ID "(" idlist ")" idlist "{" goplist: uop | "barrier" idlist ";" | goplist uop | goplist "barrier" idlist ";" qop: uop | measure | reset measure: "measure" argument "->" argument ";" reset: "reset" argument ";" uop: ugate | cxgate | gate gate: ID anylist ";" | ID "( )" anylist ";" | ID "(" explist ")" anylist ";" ugate: "U (" explist ")" argument ";" cxgate: "CX" argument "," argument ";" anylist: idlist | mixedlist idlist: ID | idlist "," ID mixedlist: ID "[" NNINTEGER "]" | mixedlist "," ID | mixedlist "," ID "[" NNINTEGER "]" | idlist "," ID "[" NNINTEGER "]" argument: ID | ID "[" NNINTEGER "]" explist: exp | explist "," exp exp: mulexp ((/\+/ | /\-/) mulexp)* mulexp: primaryexp ((/\*/ | /\//) primaryexp)* usub: "-" exp pow: primaryexp "^" primaryexp parenexp: "(" exp ")" unaryexp: unaryop "(" exp ")" primaryexp: parenexp | REAL | NNINTEGER | PI | ID | pow | usub | unaryexp unaryop: SIN | COS | TAN | EXP | LN | SQRT %import common.ESCAPED_STRING %import common.WS %ignore WS """, parser='lalr', start='mainprogram', )
# Future versions of lark will make it easier to write these kinds of grammars. # # Another approach is to use the contextual lexer with LALR. It is less powerful than Earley, # but it can handle some ambiguity when lexing and it's much faster. # See examples/conf.py for an example of that approach. # from lark import Lark parser = Lark(r""" start: _NL? section+ section: "[" NAME "]" _NL item+ item: NAME "=" VALUE _NL VALUE: /./* %import common.CNAME -> NAME %import common.NEWLINE -> _NL %import common.WS_INLINE %ignore WS_INLINE """, lexer='dynamic') def test(): sample_conf = """ [bla] a=Hello this="that",4 """ r = parser.parse(sample_conf)
modname = items[0].value pinname = items[1].value mod = self.mm.get_mod(modname) pinnum = mod.ref_to_pinnum(pinname, 'full') node_on_pin = mod.get_node(pinnum) if node_on_pin == None: return ModPin(mod, pinnum) return node_on_pin g = open('circle.g').read() circle_parser = Lark(g, start='top') res = circle_parser.parse(open("mod.circ").read()) print(res.pretty()) nm = NodeManager() mm = ModManager() xfrm = CircleTransformer(nm, mm).transform(res) modulefactories = [mf for mf in xfrm.children if type(mf) == ModuleFactory] #print(modulefactories) m = modulefactories[0].make() n1 = Node('n1')
DOT: "." LPARENA: "A" LPARENC: "C" LPARENG: "G" LPARENU: "U" RPARENA: "a" RPARENC: "c" RPARENG: "g" RPARENU: "u" ''' test_data = lalrDataset(lines_s_srprna_test, lines_p_srprna_test) testloader = torch.utils.data.DataLoader(test_data, batch_size=1, shuffle=False, num_workers=0) parser = Lark(grammar, start='e', parser='lalr') tokenmap = [ str(t.pattern).replace(r'\\', '').strip("'") for t in parser.terminals ] tokenmap.append("_") assert set(tokenmap) == test_data.get_s_chars() tokenmap = test_data.get_tokenmap() from lalrnn_all_lets import SimpleGenerativeLALRNN #decoder = DecoderRNN(100,4) decoder = SimpleGenerativeLALRNN(grammar, 'e', tokenmap, '_', test_data.get_s_char2int()) encoder = EncoderRNN(5, 300) encoder.cuda(0) decoder.cuda(0)
@log def stmt(self, items): return self.recursive_join(items) asciimath_parser = Lark( r""" stmt: _csl exp: list | mat | NUMBER mat: "[:" _csl? ":]" list: (L _csl? R | DOT_L _csl? R | L _csl? DOT_R) -> list L.0: "(" | "[" | "{{" R.0: ")" | "]" | "}}" DOT_L.1: "[:" DOT_R.1: ":]" _csl: exp (/,/? exp)* /,/? %import common.WS %import common.NUMBER %ignore WS """, start="stmt", parser="lalr", debug=True, ) text = """[:[1,3,[2,3,[1,[2,7]]]]:]""" parsed_text = asciimath_parser.parse(text) print(parsed_text.pretty()) print(Transformer().transform(parsed_text))
def str_to_spn(text, features=None, str_to_spn_lambdas=_str_to_spn): from lark import Lark ext_name = "\n".join(map(lambda s: " | " + s, str_to_spn_lambdas.keys())) ext_grammar = "\n".join([s for _, s, _ in str_to_spn_lambdas.values()]) grammar = r""" %import common.DECIMAL -> DECIMAL %import common.WS %ignore WS %import common.WORD -> WORD %import common.DIGIT -> DIGIT ALPHANUM: "a".."z"|"A".."Z"|DIGIT PARAMCHARS: ALPHANUM|"_" FNAME: ALPHANUM+ PARAMNAME: PARAMCHARS+ NUMBER: DIGIT|DECIMAL NUMBERS: NUMBER+ list: "[" [NUMBERS ("," NUMBERS)*] "]" ?node: prodnode | sumnode """ + ext_name + r""" prodnode: "(" [node ("*" node)*] ")" sumnode: "(" [NUMBERS "*" node ("+" NUMBERS "*" node)*] ")" """ + ext_grammar parser = Lark(grammar, start='node') # print(grammar) tree = parser.parse(text) def tree_to_spn(tree, features): tnode = tree.data if tnode == "sumnode": node = Sum() for i in range(int(len(tree.children) / 2)): j = 2 * i w, c = tree.children[j], tree.children[j + 1] node.weights.append(float(w)) node.children.append(tree_to_spn(c, features)) return node if tnode == "prodnode": if len(tree.children) == 1: return tree_to_spn(tree.children[0], features) node = Product() for c in tree.children: node.children.append(tree_to_spn(c, features)) return node if tnode in str_to_spn_lambdas: return str_to_spn_lambdas[tnode][0](tree, features, str_to_spn_lambdas[tnode][2], tree_to_spn) raise Exception('Node type not registered: ' + tnode) spn = tree_to_spn(tree, features) rebuild_scopes_bottom_up(spn) assert is_valid(spn) # spn = prune(spn) assert is_valid(spn) assign_ids(spn) return spn
@v_args(inline=True) class PLUTO(Transformer): def ff(): return 0 # def step_num(self, STRING): # return "def " + str(STRING) +":" # def declare_body(self, *var_declaration): # return var_declaration # def var_declaration(self, var_name, var_type): # if str(var_type.children[0]) == "string": # return " " + str(var_name.children[0]) + " = \"\" " # def assign_command(self, assigned, assignee): # return " " + str(assigned.children[0]) + " = " + str(assignee.children[0]) parser = Lark(PLUTO_grammar, parser="lalr", transformer=PLUTO()) py_code = parser.parse def run_print(program): parse_tree = py_code(program) print(parse_tree.pretty()) def test(): run_print(PLUTO_code) def main(): while True: code = input('> ')
grammar = Lark(r""" start: function+ ?function: VAR_NAME VAR_NAME "(" (declaration ("," declaration)*)? ")" scope // Separation into argument for better organization in parsing // No default initialization added yet ?argument: VAR_NAME VAR_NAME -> argument scope: "{" statements statements* "}" ?statements: s_statement ";" | ns_statement // Semi-colon Statement ?s_statement: declaration | assignment | return | expression // No Semi-colon Statement ?ns_statement: loop | conditional // The only allowed ASSIGN_OP in the declaration should be "=" (treated here) ?declaration: VAR_NAME VAR_NAME (/=/ expression)? ?assignment: VAR_NAME ASSIGN_OP expression | VAR_NAME INCREMENT_OP -> post_increment | INCREMENT_OP VAR_NAME -> pre_increment ?expression: expression PLUS_OP term -> binary_operation | term ?term : expression MUL_OP term -> binary_operation | pow ?pow : atom POW_OP pow -> binary_operation | atom return: "return" expression? -> return_expression ?loop: count | while | for // Declaration cannot have assignment (not treated here) // VALUES need to be FLOAT or INTEGER or result in one of those (not treated here) ?count: "count" (VAR_NAME|declaration)? "from" expression "to" expression scope ?while: "while" ((or_condition)|("(" or_condition ")")) scope -> while_loop // Not all statements are allowed, such as return, loops or conditionals (treated here) // ?for: "for" (declaration|assignment|expression)? ";" or_condition ";" (declaration|assignment|expression)? scope -> for_loop // | "for" "(" (declaration|assignment|expression)? ";" or_condition ";" (declaration|assignment|expression)? ")" scope -> for_loop ?for: "for" ";" or_condition ";" scope -> minimal_for | "for" "(" ";" or_condition ";" ")" scope -> minimal_for | "for" (declaration|assignment|expression) ";" or_condition ";" scope -> left_for | "for" "(" (declaration|assignment|expression) ";" or_condition ";" ")" scope -> left_for | "for" ";" or_condition ";" (declaration|assignment|expression) scope -> right_for | "for" "(" ";" or_condition ";" (declaration|assignment|expression) ")" scope -> right_for | "for" (declaration|assignment|expression) ";" or_condition ";" (declaration|assignment|expression) scope -> for_loop | "for" "(" (declaration|assignment|expression) ";" or_condition ";" (declaration|assignment|expression) ")" scope -> for_loop // Switch pending ?conditional: if ?if: "if" ((or_condition)|("(" or_condition ")")) scope else? -> if_conditional ?else: "else" (if|scope) -> else_conditional ?or_condition: and_condition ("OR" and_condition)* ?and_condition: not_condition ("AND" not_condition)* not_condition: /NOT/? (condition) ?condition : condition_expression (COMP_OP condition_expression)? | condition_expression COMP_OP condition_expression COMP_OP condition_expression -> composite_condition ?condition_expression : expression | "(" or_condition ")" // (2 + 3) * x not working atom: INTEGER | FLOAT | STRING | CHARACTER | BOOLEAN | VAR_NAME | call | "(" expression ")" ?call: VAR_NAME "(" (expression ("," expression)*)? ")" // VAR_NAME refers to variables, function (which are variables as well) names and types. // Terminals INTEGER : /-?\d+/ FLOAT : /-?\d+\.\d+/ STRING : /"[^"]*"/ CHARACTER : /'[^']([^'])?'/ BOOLEAN : /(TRUE)|(FALSE)/ VAR_NAME: /[A-Za-z]\w*/ // ARITH_OP: /[\+-\*\/%\^]/ PLUS_OP: /[\+-]/ MUL_OP : /[\/*]/ POW_OP : /\^/ COMP_OP : /(==)|(!=)|(>=)|(<=)|(>)|(<)/ ASSIGN_OP : /=|(\+=)|(-=)|(\*=)|(\/=)|(&=)/ INCREMENT_OP: /(\+\+)|(--)/ LOGICAL_OP: /AND|OR/ UNARY_OP : /NOT/ %ignore /\s+/ """)
def __init__(self): """Initialize.""" self._transformer = LTLfTransformer() self._parser = Lark(open(str(Path(CUR_DIR, "ltlf.lark"))), parser="lalr")
def __init__(self): self.parser = Lark(grammar, parser='lalr', start='schema')
@staticmethod def mirror_mirror_device_row(children): return {"mirror_device_row": MirrorDevice(**DeepChainMap(*children))} @staticmethod def mirror_mirror_device_data(children): devices = [] for child in children: # We might get mirror_device_data because of the manual left recursion if "mirror_device_row" in child: devices.append(child["mirror_device_row"]) if "mirror_device_data" in child: devices.append(child["mirror_device_data"]) return {"mirror_device_data": devices} parser = Lark(DM_GRAMMAR) transformer = DeviceMapperTransformer() # The parser_fast only works on newer lark versions parser_fast = Lark(DM_GRAMMAR, parser="lalr", transformer=transformer) def parse(data: str, event: str): if version.Version(lark.__version__) >= version.Version("1.0.0"): out = parser_fast.parse(event + data) else: out = transformer.transform(parser.parse(event + data)) # TODO: Provide sanity checks for object construction e.g a linear target should always have LinearAttributes return out
JS: /{%.*?%}/s js: JS? NAME: /[a-zA-Z_$]\w*/ COMMENT: /#[^\n]*/ REGEXP: /\[.*?\]/ STRING: /".*?"/ %import common.WS %ignore WS %ignore COMMENT """ nearley_grammar_parser = Lark(nearley_grammar, parser='earley', lexer='standard') def _get_rulename(name): name = {'_': '_ws_maybe', '__': '_ws'}.get(name, name) return 'n_' + name.replace('$', '__DOLLAR__').lower() class NearleyToLark(InlineTransformer): def __init__(self): self._count = 0 self.extra_rules = {} self.extra_rules_rev = {} self.alias_js_code = {}
# # This demonstrates example-driven error reporting with the LALR parser # from lark import Lark, UnexpectedInput from .json_parser import json_grammar # Using the grammar from the json_parser example json_parser = Lark(json_grammar, parser='lalr') class JsonSyntaxError(SyntaxError): def __str__(self): context, line, column = self.args return '%s at line %s, column %s.\n\n%s' % (self.label, line, column, context) class JsonMissingValue(JsonSyntaxError): label = 'Missing Value' class JsonMissingOpening(JsonSyntaxError): label = 'Missing Opening' class JsonMissingClosing(JsonSyntaxError): label = 'Missing Closing' class JsonMissingComma(JsonSyntaxError): label = 'Missing Comma' class JsonTrailingComma(JsonSyntaxError): label = 'Trailing Comma'
from abc import ABC from pathlib import Path import pandas as pd from lark import Lark, Transformer, v_args from lark.exceptions import LarkError BASE_DIR = Path(__file__).resolve().parent with (BASE_DIR / "../lark/madx_seq.lark").open() as file: MADX_PARSER = Lark(file, parser="lalr", maybe_placeholders=True) file.seek(0) @v_args(inline=True) class AbstractSequenceFileTransformer(ABC, Transformer): def transform(self, tree): self.elements = [] self.seq = None self.name = None self.length = 0.0 super().transform(tree) return self.seq, self.elements, self.name, self.length int = int float = float word = str neg = lambda self, item: -item number = float name = lambda self, item: item.value.upper() string = lambda self, item: item[1:-1]
from lark import Lark def tokenize(s): return "".join(chr(97 + int(c)) if "0" <= c <= "9" else c for c in s) def is_valid(parse_func, line): try: parse_func(line) return True except Exception: return False with open("input-19", "r") as f: rules, candidates = f.read().split("\n\n") grammar1 = tokenize("start: 0\n" + rules) grammar2 = grammar1.replace(tokenize("8: 42"), tokenize("8: 42 | 42 8")).replace( tokenize("11: 42 31"), tokenize("11: 42 31 | 42 11 31")) parse1 = Lark(grammar1).parse parse2 = Lark(grammar2).parse print(sum(is_valid(parse1, line) for line in candidates.splitlines())) print(sum(is_valid(parse2, line) for line in candidates.splitlines()))
OR: "||" comp_op: "<"|">"|"=="|">="|"<="|"/=" NUMBER: /(0|[1-9][0-9]*)/ FUN_NAME: /(?!(if|else|fun)\b)[a-z]\w*/ PARAM_NAME: /(?!(if|else|var)\b)[a-z]\w*/ %import common.WS %import common.NEWLINE %ignore WS %ignore NEWLINE """ p = Lark(grammar) def make_png(parsed_tree, filename): pydot__tree_to_png(parsed_tree, filename) if __name__ == '__main__': with open(input_file, "r") as file: try: code = file.read() print(code) tree = p.parse(code) make_png(tree, output_file) except Exception as e: print(f"Parse error! {e}")
# -*-coding: utf-8 -*- from lark import Lark, Tree, Token, Transformer from tabulate import tabulate from tableGenerator2 import TableGenerator2 from grammar2 import * import sys fileName = sys.argv[1:][0] f = open(fileName, "r") fileContents = f.read() f.close() tableGen = TableGenerator2("Table") parser = Lark(grammar, parser="lalr", transformer=tableGen) #try: parseTree = parser.parse("%s\n" % fileContents) #print(parseTree) def getTokenInstances(tree, type, value): results = "" if isinstance(tree, Tree): for child in tree.children: if isinstance(child, Tree): innerSearch = getTokenInstances(child, type, value)
default="out", help="directory to store the datapack in") args = parser.parse_args() input_path = Path(args.input) if not input_path.exists() or input_path.is_dir(): print(f"No such file: '{input_path}'", file=sys.stderr) exit(1) output_path = Path(args.output) if not output_path.exists(): output_path.mkdir() if not output_path.is_dir(): print(f"'{output_path}' is not a directory", file=sys.stderr) exit(1) grammar = resource_string("mcfunction_compiler.resources", "grammar.lark").decode() l = Lark(grammar, parser="earley") with input_path.open() as file: tree = l.parse(file.read()) t = TreeTransformer() ast = t.transform(tree) ast.accept(NameResolver()) generator = CodeGenerator() ast.accept(generator) generator.write_to_files(output_path)
CURRENT_DATE: "today" | /this (week|month|year)/ LAST_DATE: "yesterday" | /last (week|month|year)/ X_AGO: /\d+ (day|week|month|year)s? ago/ UNIT: /(day|week|month|year)s?/ YEAR: /\d{4}/ MONTH: /\d{4}-\d{2}/ DAY: /\d{4}-\d{2}-\d{2}/ TIME: /\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}/ %import common.WS %import common.NUMBER %ignore WS """ parser = Lark(grammar) identity = lambda x: x dt_parse_func = dp.parse def date_trunc(dt, unit): dt = { 'year': lambda x: date(x.year, 1, 1), 'month': lambda x: date(x.year, x.month, 1), 'week': lambda x: x.date() - relativedelta(days=x.weekday()), 'day': lambda x: x.date() }.get(unit, identity)(dt) if isinstance(dt, date): dt = datetime(dt.year, dt.month, dt.day) return dt
def main(): parser = Lark(grammar, parser='lalr', transformer=Calculator()) print( sum([parser.parse(line.strip()) for line in sys.stdin if line.strip()]))
add = v_args(inline=True)(operator.add) sub = v_args(inline=True)(operator.sub) mul = v_args(inline=True)(operator.mul) div = v_args(inline=True)(operator.truediv) pow = v_args(inline=True)(operator.pow) neg = v_args(inline=True)(operator.neg) pos = v_args(inline=True)(operator.pos) function = v_args(inline=True)(str) keyword = v_args(inline=True)(str) grammar = pkgutil.get_data("pyquil._parser", "grammar.lark").decode() parser = Lark( grammar, start="quil", parser="lalr", transformer=QuilTransformer(), maybe_placeholders=True, ) def run_parser(program: str) -> List[AbstractInstruction]: """ Parse a raw Quil program and return a corresponding list of PyQuil objects. :param str quil: a single or multiline Quil program :return: list of instructions """ p = parser.parse(program) return p