def optimize_action_block(rules): action_block = rule_by_name(rules, "action_block") assert action_block.body.eq_relaxed(Antlr4parser().from_str("( ( statement )? KW_ELSE )? statement_or_null")) action_block.body = Antlr4parser().from_str(""" ( attribute_instance )* SEMI | KW_ELSE statement_or_null | statement ( KW_ELSE statement_or_null )? """)
def optimize_item_rules(rules): for r in ["package_or_generate_item_declaration", "module_or_generate_item", "module_or_generate_item_declaration", "module_common_item", "interface_or_generate_item", "checker_or_generate_item_declaration", ]: inline_rule(rules, r) generate_item = rule_by_name(rules, "generate_item") assert generate_item.body[-1].eq_relaxed(Antlr4Symbol("checker_or_generate_item", False)) generate_item.body[-1] = Antlr4parser().from_str("KW_RAND data_declaration") generate_item.body.append(Antlr4parser().from_str("program_generate_item"))
def test_selection_propagate_optionality1(self): r_str = "( a )? | b | ( c )?" expec = "( a | b | c)?" r = Antlr4parser().from_str(r_str) _selection_options_to_sequnces(r) res, _ = _selection_propagate_optionality(r) self.assertTextEq(expec, res.toAntlr4())
def test_selection_propagate_optionality3(self): r_str = "b | ( a )? ( a1 )* ( a2 )* | ( c )?" expec = "( b | a ( a1 )* ( a2 )* | ( a1 )+ ( a2 )* | ( a2 )+ | c)?" r = Antlr4parser().from_str(r_str) _selection_options_to_sequnces(r) res, _ = _selection_propagate_optionality(r) self.assertTextEq(expec, res.toAntlr4())
def test_common_prefix_empty(self): r_str = "a | a b" expec = "a ( | b )" r = Antlr4parser().from_str(r_str) _selection_options_to_sequnces(r) res, _ = _selection_share_prefix(r) self.assertTextEq(expec, res.toAntlr4())
def test_empty_option_to_optional_selection(self): r_str = "a | | c" expec = "( a | c )?" r = Antlr4parser().from_str(r_str) _selection_options_to_sequnces(r) res, _ = _selection_empty_option_to_optional(r) self.assertTextEq(expec, res.toAntlr4())
def _optimize_ps_parameter_identifier(rules): ps_parameter_identifier = rule_by_name(rules, "ps_parameter_identifier") # ( ( package_scope | class_scope )? | ( # identifier ( LSQUARE_BR constant_expression RSQUARE_BR )? DOT )* # ) identifier ps_parameter_identifier.body = Antlr4parser().from_str(""" package_or_class_scoped_id ( DOT identifier ( LSQUARE_BR constant_expression RSQUARE_BR )? )* """)
def rm_ambiguity(rules): rule = rule_by_name(rules, "variable_decl_assignment") to_repl = Antlr4parser().from_str("( ASSIGN class_new )?") def match_replace_fn(o): if o == to_repl: return o.body replace_item_by_sequence(rule, match_replace_fn)
def optimise_subroutine_call(rules): r = rule_by_name(rules, "subroutine_call") Antlr4GenericOptimizer().optimize([ r, ]) c0 = Antlr4parser().from_str(""" ( class_qualifier | ( primary | implicit_class_handle ) DOT )? ( identifier ( attribute_instance )* ( LPAREN list_of_arguments RPAREN )? | array_method_name ( attribute_instance )* ( LPAREN list_of_arguments RPAREN )? ( KW_WITH LPAREN expression RPAREN )? | randomize_call ) """) assert r.body[0].eq_relaxed(c0), r.body[0] subroutine_call_args = Antlr4Rule( "subroutine_call_args", Antlr4parser().from_str(""" ( attribute_instance )* ( LPAREN list_of_arguments RPAREN )? ( KW_WITH LPAREN expression RPAREN )? """)) rules.insert(rules.index(r), subroutine_call_args) new_c0 = Antlr4parser().from_str(""" ( primary_no_cast_no_call | cast ) subroutine_call_args ( DOT ( array_method_name | randomize_call | primary_no_cast_no_call | cast ) subroutine_call_args )* """) r.body[0] = new_c0 primary = rule_by_name(rules, "primary") assert primary.body[0].eq_relaxed( Antlr4Symbol("primary_no_cast_no_call", False)) del primary.body[0] c2 = Antlr4parser().from_str(""" any_system_tf_identifier ( LPAREN ( list_of_arguments | data_type ( COMMA expression )? | expression ( COMMA ( expression )? )* ( COMMA ( clocking_event )? )? ) RPAREN )? """) assert r.body[2].eq_relaxed(c2) r.body[2] = Antlr4parser().from_str(""" any_system_tf_identifier ( LPAREN ( ( data_type )? list_of_arguments ( COMMA clocking_event )? ) RPAREN )? """) c1 = Antlr4parser().from_str(""" ps_or_hierarchical_identifier ( attribute_instance )* ( LPAREN list_of_arguments RPAREN )? """) assert r.body[1].eq_relaxed(c1), r.body[1] del r.body[1]
def optimize_primary(rules): primary_no_cast_no_call = rule_by_name(rules, "primary_no_cast_no_call") def assert_eq(index, s): elm = Antlr4parser().from_str(s) assert (primary_no_cast_no_call.body[index].eq_relaxed(elm) ), primary_no_cast_no_call.body[index] assert_eq(5, "package_or_class_scoped_hier_id_with_const_select select") assert_eq(8, "let_expression") # is just call primary_no_cast_no_call.body[5] = Antlr4parser().from_str(""" package_or_class_scoped_hier_id_with_select """) del primary_no_cast_no_call.body[8]
def fix_implications(rules): """ :note: variants of implications are as a independent tokens, otherwise lexer parses it as -= > instead of - => """ any_impl_rule = Antlr4Rule( "any_implication", Antlr4parser().from_str("IMPLIES | IMPLIES_P | IMPLIES_N")) orig = Antlr4parser().from_str("( polarity_operator )? IMPLIES") def apply_rewrite(o): if isinstance(o, Antlr4Sequence): found_i = None for i, o2 in enumerate(o): if o2.eq_relaxed(orig[0]) and o[i + 1].eq_relaxed(orig[1]): found_i = i break if found_i is not None: del o[found_i + 1] o[found_i] = Antlr4Symbol(any_impl_rule.name, False) for r in rules: replace_item_by_sequence(r, apply_rewrite) rules.append(any_impl_rule)
def add_predicated_for_CLONE_ID_after_obj(rules, target_lang): c_id = Antlr4parser().from_str("( COLON identifier )?") la1 = target_lang.LA(1) def match_replace_fn(o: iAntlr4GramElem): if o == c_id: return Antlr4Selection([ o.body, Antlr4Sequence([ Antlr4Symbol("{%s != COLON}?" % la1, True, True), ]) ]) for r in rules: replace_item_by_sequence(r.body, match_replace_fn)
def numbers_add_whitespace_after_base(rules): number_rules = set([ "DECIMAL_NUMBER_WITH_BASE", "DECIMAL_INVALID_NUMBER_WITH_BASE", "DECIMAL_TRISTATE_NUMBER_WITH_BASE", "BINARY_NUMBER", "OCTAL_NUMBER", "HEX_NUMBER", ]) number_base_rules = set([ "DECIMAL_BASE", "BINARY_BASE", "OCTAL_BASE", "HEX_BASE", ]) # used only in integral_number inline_rule(rules, "decimal_number") def opt_ws(): return Antlr4Option(Antlr4Symbol("WHITE_SPACE", False)) Antlr4Option(Antlr4Symbol("UNSIGNED_NUMBER", False)), for r in rules: if r.name in number_rules: # ( SIZE )? *_BASE .... assert r.body[0].body.symbol == "SIZE", r assert r.body[1].symbol.endswith("_BASE"), r del r.body[0] r.is_fragment = True elif r.name in number_base_rules: # APOSTROPHE ( [sS] )? [dD]; r.body.insert(2, opt_ws()) r.body.insert(1, opt_ws()) r.body.append(opt_ws()) any_based_number = Antlr4Rule( "ANY_BASED_NUMBER", Antlr4Selection([Antlr4Symbol(n, False) for n in number_rules])) rules.insert(rules.index(rule_by_name(rules, "HEX_NUMBER")), any_based_number) integral_number = rule_by_name(rules, "integral_number") integral_number.body = Antlr4parser().from_str(""" ( UNSIGNED_NUMBER )? ANY_BASED_NUMBER | UNSIGNED_NUMBER """)
def test_common_suffix(self): r_str = """ x0 a b c | d | x1 ( x2 )? a b c """ expected = """ ( x0 a b | x1 ( x2 )? a b ) c | d """ r = Antlr4parser().from_str(r_str) _selection_options_to_sequnces(r) res, _ = _selection_share_suffix(r) self.assertTextEq(expected, res.toAntlr4())
def test_common_prefix(self): r_str = """ a ( b c d )? | a b e | a b f """ expected0 = """ a ( ( b c d )? | b e | b f ) """ expected1 = """ ( b c d )? | b ( e | f ) """ r = Antlr4parser().from_str(r_str) _selection_options_to_sequnces(r) res, _ = _selection_share_prefix(r) self.assertTextEq(expected0, res.toAntlr4()) res, _ = _selection_share_prefix(res[1]) self.assertTextEq(expected1, res.toAntlr4())
def test_sequence_flatten(self): r_str = "a b c ( d e f ) ( ) ( g ) ( h | i )" expec = "a b c d e f g ( h | i )" r = Antlr4parser().from_str(r_str) res, _ = _sequence_flatten(r) self.assertTextEq(expec, res.toAntlr4())
def solve_left_recurse_and_op_precedence_for_expression(rules): # split_rule(rules, "expression", # ["inside_expression"], # "expression_no_inside") # replace_symbol_in_rule( # rules, "inside_expression", # "expression", # "expression_no_inside") # iterate_everything_except_first( # rules, "inside_expression") # # # cond_predicate starting with expression_no_conditional instead of expression # # expression_no_conditional # split_rule(rules, "expression_no_inside", # ["conditional_expression"], # "expression_no_conditional") # expression only from rules for highest precedence ops etc. # expression: # ( unary_operator ( attribute_instance )* )? primary # | inc_or_dec_expression # | LPAREN operator_assignment RPAREN # | expression binary_operator ( attribute_instance )* expression # | conditional_expression # | expression KW_INSIDE LBRACE open_range_list RBRACE # | tagged_union_expression # ; p = Antlr4parser() expression_0 = extract_option_as_rule( rules, "expression", [ (0, p.from_str("( unary_operator ( attribute_instance )* )? primary") ), (1, p.from_str("inc_or_dec_expression")), (2, p.from_str("LPAREN operator_assignment RPAREN")), (6, p.from_str("tagged_union_expression")), ], "expression_0", ) # expression: # | expression binary_operator ( attribute_instance )* expression # | conditional_expression # | expression KW_INSIDE LBRACE open_range_list RBRACE; def handle_conditional_fn(bin_op_choices, current_expr_rule): # rm left recursion from cond_predicate/conditional_expression cond_predicate = rule_by_name(rules, "cond_predicate") conditional_expression = rule_by_name(rules, "conditional_expression") rules.remove(conditional_expression) _inline_rule([ conditional_expression, ], cond_predicate) bin_op_choices.append(Antlr4Sequence(conditional_expression.body[1:])) def handle_inside_fn(bin_op_choices, current_expr_rule): # expression (KW_INSIDE LBRACE open_range_list RBRACE)*; bin_op_choice = Antlr4parser().from_str( "KW_INSIDE LBRACE open_range_list RBRACE") bin_op_choices.append(bin_op_choice) rules.remove(rule_by_name(rules, "expression")) current_expr_rule = expression_0 op_group = get_operator_precedence_groups() for i, prec_group in enumerate(op_group): is_last = i == len(op_group) - 1 if is_last: new_rule_name = "expression" else: new_rule_name = "expression_%d" % (i + 1) current_expr_rule = extract_bin_ops(rules, current_expr_rule, prec_group, new_rule_name, handle_conditional_fn, handle_inside_fn)
def test_common_suffix_dual(self): r_str = "a c | b c | x0 y | x1 y" expec = "( a | b ) c | ( x0 | x1 ) y" r = Antlr4parser().from_str(r_str) res, _ = _selection_share_suffix(r) self.assertTextEq(expec, res.toAntlr4())
def handle_inside_fn(bin_op_choices, current_expr_rule): # expression (KW_INSIDE LBRACE open_range_list RBRACE)*; bin_op_choice = Antlr4parser().from_str( "KW_INSIDE LBRACE open_range_list RBRACE") bin_op_choices.append(bin_op_choice)
def rm_option_from_eps_rules(p): already_eps_rules = [ "tf_port_list", "data_type_or_implicit", "list_of_arguments", "let_list_of_arguments", "list_of_port_connections", "list_of_checker_port_connections", "sequence_list_of_arguments", "property_list_of_arguments", ] # because it already can be eps for r in already_eps_rules: rm_option_on_rule_usage(p.rules, r) # fix optinality on datatypes r = rule_by_name(p.rules, "implicit_data_type") # : (signing)? (packed_dimension)* # -> # : signing (packed_dimension)* # | (packed_dimension)+ # ; r.body = Antlr4parser().from_str("signing ( packed_dimension )* | ( packed_dimension )+") _inline_rules(p.rules, ["variable_port_header", "net_port_header", "interface_port_header"]) # make data_type_or_implicit optional for r in p.rules: to_optional = [ "port", "function_data_type_or_implicit", "var_data_type", "property_formal_type", "let_formal_type", "net_port_type"] if r.name not in ["sequence_formal_type", "let_formal_type", ]: to_optional.append("data_type_or_implicit") if r.name not in ["data_type_or_implicit", "function_data_type_or_implicit"]: to_optional.append("implicit_data_type") if r.name != "property_formal_type": to_optional.append("sequence_formal_type") def match_replace_fn(o): if isinstance(o, Antlr4Symbol) and o.symbol in to_optional: return Antlr4Option(o) replace_item_by_sequence(r, match_replace_fn) if r.name == "net_port_type": # net_port_type: # ( net_type )? data_type_or_implicit # | identifier # | KW_INTERCONNECT implicit_data_type; r.body[1] = Antlr4parser().from_str("data_type_or_implicit") r.body[0] = Antlr4parser().from_str("net_type ( data_type_or_implicit )?") port = rule_by_name(p.rules, "port") # ( port_expression )? # | DOT identifier LPAREN ( port_expression )? RPAREN; port.body[0] = Antlr4Symbol("port_expression", False) # var_data_type: data_type | KW_VAR data_type_or_implicit; # var_data_type = rule_by_name(p.rules, "var_data_type") # var_data_type.body = Antlr4parser().from_str("KW_VAR ( data_type_or_implicit )? | data_type_or_implicit") pa = Antlr4parser() data_declaration = rule_by_name(p.rules, "data_declaration") assert data_declaration.body[0].eq_relaxed( pa.from_str("""( KW_CONST )? ( KW_VAR )? ( lifetime )? ( data_type_or_implicit )? list_of_variable_decl_assignments SEMI""")) data_declaration.body[0] = pa.from_str("""( KW_CONST )? ( KW_VAR ( lifetime )? ( data_type_or_implicit )? | ( lifetime )? data_type_or_implicit ) list_of_variable_decl_assignments SEMI""")
def par(s): return Antlr4parser().from_str(s)
def rule_from_str(rule_str): name, body = rule_str.split(":") return Antlr4Rule(name.strip(), Antlr4parser().from_str(body))
def assert_eq(index, s): elm = Antlr4parser().from_str(s) assert (primary_no_cast_no_call.body[index].eq_relaxed(elm) ), primary_no_cast_no_call.body[index]
def run_cmp(self, a: str, b: str, can_rename=lambda x: True): a = Antlr4parser().from_str(a) b = Antlr4parser().from_str(b) cmp = Antlr4SyntCmp() return cmp.eq(a, b, can_rename=can_rename), cmp.eq_symbols
def optimize_constant_expression(rules): # constant_expression: # ( unary_operator ( attribute_instance )* )? constant_primary # | constant_expression ( QUESTIONMARK ( attribute_instance )* constant_expression COLON # | binary_operator ( attribute_instance )* # ) constant_expression # ; p = Antlr4parser() constant_expression = rule_by_name(rules, "constant_expression") assert constant_expression.body.eq_relaxed( p.from_str(""" ( unary_operator ( attribute_instance )* )? constant_primary | constant_expression ( binary_operator ( attribute_instance )* | QUESTIONMARK ( attribute_instance )* constant_expression COLON ) constant_expression """)), constant_expression constant_expression.body = p.from_str(""" constant_primary | unary_operator constant_expression | constant_expression QUESTIONMARK ( attribute_instance )* constant_expression COLON """) op_group = get_operator_precedence_groups() for g in op_group: if g == [ "QUESTIONMARK", ]: alternative = p.from_str( "constant_expression QUESTIONMARK ( attribute_instance )* constant_expression COLON constant_expression" ) else: alternative = p.from_str( "constant_expression PLACEHOLDER ( attribute_instance )* constant_expression" ) g = [i for i in g if i not in ["KW_DIST", "KW_INSIDE"]] o = p.from_str(" | ".join(g)) alternative[1] = o constant_expression.body.append(alternative) constant_expression.optimalizer_keep_out = True inline_rule(rules, "simple_type") inline_rule(rules, "casting_type") inline_rule(rules, "constant_cast") constant_primary = rule_by_name(rules, "constant_primary") constant_primary.body = p.from_str(""" ( LPAREN constant_mintypmax_expression | ( KW_STRING | KW_CONST | integer_type | non_integer_type | ps_type_identifier | ps_parameter_identifier | signing ) APOSTROPHE LPAREN constant_expression ) RPAREN | KW_NULL | primary_literal | ps_parameter_identifier constant_select | identifier ( LSQUARE_BR constant_range_expression RSQUARE_BR | constant_select ) | package_or_class_scoped_id | ( constant_concatenation | constant_multiple_concatenation ) ( LSQUARE_BR constant_range_expression RSQUARE_BR )? | any_system_tf_identifier ( LPAREN ( data_type COMMA )? list_of_arguments ( COMMA clocking_event )? RPAREN )? | ( KW_STD DOUBLE_COLON )? randomize_call | let_expression | assignment_pattern_expression | type_reference | constant_primary APOSTROPHE LPAREN constant_expression | constant_primary ( attribute_instance )* LPAREN list_of_arguments RPAREN ( KW_WITH LPAREN expression RPAREN )? | constant_primary ( attribute_instance )* KW_WITH LPAREN expression RPAREN """) constant_primary.optimalizer_keep_out = True
def _optimize_ps_type_identifier(rules): ps_type_identifier = rule_by_name(rules, "ps_parameter_identifier") # ps_type_identifier: ( KW_LOCAL DOUBLE_COLON | package_scope | class_scope )? identifier; ps_type_identifier.body = Antlr4parser().from_str(""" ( KW_LOCAL DOUBLE_COLON )? package_or_class_scoped_id """)
def test_optimize0(self): # data_type rule r_str = """ a ( b )? ( c )* | d ( b )? | e | f ( kw0 ( b )? )? kw1 f0 ( f0 )* kw2 ( c )* | kw3 ( a0 )? kw1 a1 ( kw4 a1 )* kw2 ( c )* | kw5 | kw6 | kw7 ( kw8 )? a2 ( a3 )? ( kw8 a2 )? | ( a4 | a5 )? a2 ( c )* | a6 | kw9 | a7 | a8 """ exp0 = """ ( a ( b )? | f ( kw0 ( b )? )? kw1 f0 ( f0 )* kw2 | kw3 ( a0 )? kw1 a1 ( kw4 a1 )* kw2 | ( a4 | a5 )? a2 ) ( c )* | d ( b )? | e | kw5 | kw6 | kw7 ( kw8 )? a2 ( a3 )? ( kw8 a2 )? | a6 | kw9 | a7 | a8 """ r = Antlr4parser().from_str(r_str) _selection_options_to_sequnces(r) res, _ = _selection_share_suffix(r) self.assertTextEq(exp0, res.toAntlr4()) exp1 = """ a ( b )? | ( f ( kw0 ( b )? )? kw1 f0 ( f0 )* | kw3 ( a0 )? kw1 a1 ( kw4 a1 )* ) kw2 | ( a4 | a5 )? a2 """ _selection_options_to_sequnces(r[0][0]) res, _ = _selection_share_suffix(r[0][0]) self.assertTextEq(exp1, res.toAntlr4()) exp2 = """ ( a ( b )? | ( f ( kw0 ( b )? )? kw1 f0 ( f0 )* | kw3 ( a0 )? kw1 a1 ( kw4 a1 )* ) kw2 | ( a4 | a5 )? a2 ) ( c )* | d ( b )? | e | kw5 | kw6 | kw7 ( kw8 )? a2 ( a3 )? ( kw8 a2 )? | a6 | kw9 | a7 | a8 """ r = Antlr4parser().from_str(r_str) Antlr4GenericOptimizer().optimize([ Antlr4Rule("tmp", r), ]) self.assertTextEq(exp2, r.toAntlr4())
def optimize_class_scope(rules): p = Antlr4parser() to_replace0 = p.from_str("( package_scope | class_scope )? identifier") to_replace1 = p.from_str("( class_scope | package_scope )? identifier") package_or_class_scoped_id = Antlr4Rule("package_or_class_scoped_id", p.from_str( """( identifier ( parameter_value_assignment )? | KW_DOLAR_UNIT ) ( DOUBLE_COLON identifier ( parameter_value_assignment )? )*""")) rules.append(package_or_class_scoped_id) def match_replace_fn_reduce_1_item_sequence(o): if isinstance(o, Antlr4Sequence) and len(o) == 1: return o[0] q0 = Antlr4Query(to_replace0) q1 = Antlr4Query(to_replace1) for r in rules: replace_item_by_sequence(r, match_replace_fn_reduce_1_item_sequence) # if r.name == "net_type_declaration": # print(r.toAntlr4()) m = q0.match(r.body) if not m: m = q1.match(r.body) if m: def apply_to_replace0_and_1(o): for match in m: v = match.get(id(o), None) if v is not None: del match[id(o)] if (v is to_replace0 or v is to_replace1 or (isinstance(v, Antlr4Symbol) and v.symbol == "identifier")): return Antlr4Symbol(package_or_class_scoped_id.name, False) else: return Antlr4Sequence([]) replace_item_by_sequence(r, apply_to_replace0_and_1) for _m in m: # assert that all matching items were replaced assert not _m # print(r.toAntlr4()) # print(m) # else: # if "package_scope | class_scope" in r.toAntlr4() or "class_scope | package_scope" in r.toAntlr4(): # print("not found " + r.toAntlr4()) # class_qualifier: # ( KW_LOCAL DOUBLE_COLON )? ( implicit_class_handle DOT # | class_scope # )?; # class_scope: # ps_identifier ( parameter_value_assignment )? # ( DOUBLE_COLON identifier # ( parameter_value_assignment )? # )* DOUBLE_COLON; # implicit_class_handle: # KW_THIS ( DOT KW_SUPER )? # | KW_SUPER # ; # package_scope: # ( KW_DOLAR_UNIT # | identifier # ) DOUBLE_COLON; # hierarchical_identifier: ( KW_DOLAR_ROOT DOT )? ( identifier constant_bit_select DOT )* identifier; to_replace2 = p.from_str("( class_qualifier | package_scope )? hierarchical_identifier") package_or_class_scoped_path = Antlr4Rule("package_or_class_scoped_path", p.from_str(""" ( KW_LOCAL DOUBLE_COLON )? ( KW_DOLAR_ROOT | implicit_class_handle | ( ( KW_DOLAR_UNIT | identifier ( parameter_value_assignment )? ) ( DOUBLE_COLON identifier ( parameter_value_assignment )? )* ) ) """)) package_or_class_scoped_hier_id_with_const_select = Antlr4Rule( "package_or_class_scoped_hier_id_with_const_select", p.from_str(""" package_or_class_scoped_path ( constant_bit_select )* ( DOT identifier ( constant_bit_select )* )* """)) # bit_select: # LSQUARE_BR expression RSQUARE_BR; # select: # ( DOT identifier # | bit_select # )* ( LSQUARE_BR part_select_range RSQUARE_BR )?; # part_select_range: # constant_range # | indexed_range # ; # indexed_range: # expression ( PLUS # | MINUS # ) COLON constant_expression; # constant_range: # constant_expression COLON constant_expression; package_or_class_scoped_hier_id_with_select = Antlr4Rule( "package_or_class_scoped_hier_id_with_select", p.from_str(""" package_or_class_scoped_path ( bit_select )* ( DOT identifier ( bit_select )* )* ( LSQUARE_BR expression ( PLUS | MINUS )? COLON constant_expression RSQUARE_BR )? """)) rules.append(package_or_class_scoped_path) rules.append(package_or_class_scoped_hier_id_with_const_select) rules.append(package_or_class_scoped_hier_id_with_select) primary_no_cast_no_call = rule_by_name(rules, "primary_no_cast_no_call") m = Antlr4Query(to_replace2).match(primary_no_cast_no_call.body) def apply_to_replace2(o): for match in m: v = match.get(id(o), None) if v is not None: if (v is to_replace2 or (isinstance(v, Antlr4Symbol) and v.symbol == "hierarchical_identifier")): return Antlr4Symbol(package_or_class_scoped_hier_id_with_const_select.name, False) else: return Antlr4Sequence([]) replace_item_by_sequence(primary_no_cast_no_call, apply_to_replace2) _optimize_ps_type_identifier(rules) _optimize_ps_parameter_identifier(rules) rules.remove(rule_by_name(rules, "class_qualifier"))