def test_ModifiersArgument(): parsed1 = peg.parse("modifiers=LevelFive", xkbparse.ModifiersArgument) assert parsed1[0] == "LevelFive" parsed2 = peg.parse("modifiers=LevelFive+Shift", xkbparse.ModifiersArgument) assert parsed2[0] == "LevelFive" assert parsed2[1] == "Shift"
def test_comment(self): """Test that simple comments are parsed correctly""" p = parse(self.comment, Comment) self.assertEqual(p, self.comment.replace("#", "")) p = parse(self.extra_white_space, Comment) self.assertEqual(p, self.extra_white_space.replace("#", "")) p = parse(self.empty_comment, Comment) self.assertEqual(p, self.empty_comment.replace("#", ""))
def test_KeySymLevels(): with open(os.path.join(TEST_DATA_DIR, "list_AE01")) as f: parsed = peg.parse(f.read(), xkbparse.KeySymList) assert parsed[0].name == "1" assert parsed[1].name == "exclam" assert parsed[2].name == "onesuperior" assert parsed[3].name == "exclamdown" with open(os.path.join(TEST_DATA_DIR, "list_empty")) as f: parsed = peg.parse(f.read(), xkbparse.KeySymList) assert len(parsed) == 0
def test_basics(root): parse('=', mkBasic(b'Eq', '=')) s = b'\r\r' print(root.grammar.lineSep) LS = mkBasic(b'LS', root.grammar.lineSep) class LS(object): grammar = (re.compile('\rr'),) def __init__(*a, **kw): print(repr((a, kw))) assert 'xyz' == parse(s, LS, whitespace='') assert 'xyz' == parse(s, LS, whitespace='') s = b'\n' assert b'abc' == parse(s, mkBasic(b'LSL', root.grammar.lineSepLax.pattern))
def get_1_for_type(type_): identity_map = { 'vec2': pypeg2.parse('vec2(1.f)', InvocationExpression), 'vec3': pypeg2.parse('vec3(1.f)', InvocationExpression), 'vec4': pypeg2.parse('vec4(1.f)', InvocationExpression), 'float': '1.0f', 'int': '1' } if type_ in identity_map: return identity_map[type_] else: throw_not_implemented_error(type_element, 'additives identities for types')
def get_0_for_type(type_): identity_map = { 'vec2': pypeg2.parse('vec2(0.f)', InvocationExpression), 'vec3': pypeg2.parse('vec3(0.f)', InvocationExpression), 'vec4': pypeg2.parse('vec4(0.f)', InvocationExpression), 'float': '0.0f', 'int': '0' } if type_ in identity_map: return identity_map[type_] else: throw_not_implemented_error(type_element, 'multiplicative identities for types')
def invenio_query(pattern, index='records-hep'): # Enhance query first # for enhancer in query_enhancers(): # enhancer(self, **kwargs) try: query = pypeg2.parse(pattern, Main, whitespace="") for walker in walkers: query = query.accept(walker) except SyntaxError: query = MalformedQuery("") try: search_walker = ElasticSearchNoKeywordsDSL() query.accept(search_walker) query = Q('multi_match', query=pattern, fields=get_fields_by_index(index), zero_terms_query="all") except QueryHasKeywords: query = query.accept(ElasticSearchDSL( current_app.config.get( "SEARCH_ELASTIC_KEYWORD_MAPPING", {} ) )) finally: if current_app.debug: current_app.logger.info(json.dumps(query.to_dict(), indent=4)) return query
def invenio_query(pattern, search): # Enhance query first # for enhancer in query_enhancers(): # enhancer(self, **kwargs) try: query = pypeg2.parse(pattern, Main, whitespace="") for walker in walkers: query = query.accept(walker) except SyntaxError: query = MalformedQuery("") try: search_walker = ElasticSearchNoKeywordsDSL() query.accept(search_walker) query = Q('multi_match', query=pattern, fields=search.default_fields(), zero_terms_query="all") except QueryHasKeywords: query = query.accept( ElasticSearchDSL( current_app.config.get("SEARCH_ELASTIC_KEYWORD_MAPPING", {}))) finally: if current_app.debug: current_app.logger.info(json.dumps(query.to_dict(), indent=4)) return query
def convert_text(input_text): input_text = input_text.replace('try_', 'get_') input_glsl = pypeg2.parse(input_text, pypeg2glsl.glsl) output_glsl = convert_glsl(input_glsl) output_text = pypeg2.compose(output_glsl, pypeg2glsl.glsl, autoblank = False) output_text = APPENDED + output_text return output_text
def parse_headers(content_disposition): """Build a ContentDisposition from header values.""" # https://bitbucket.org/logilab/pylint/issue/492/ # pylint: disable=no-member # We allow non-ascii here (it will only be parsed inside of qdtext, and # rejected by the grammar if it appears in other places), although parsing # it can be ambiguous. Parsing it ensures that a non-ambiguous filename* # value won't get dismissed because of an unrelated ambiguity in the # filename parameter. But it does mean we occasionally give # less-than-certain values for some legacy senders. content_disposition = content_disposition.decode('iso-8859-1') log.rfc6266.debug("Parsing Content-Disposition: {}".format( content_disposition)) # Our parsing is relaxed in these regards: # - The grammar allows a final ';' in the header; # - We do LWS-folding, and possibly normalise other broken # whitespace, instead of rejecting non-lws-safe text. # XXX Would prefer to accept only the quoted whitespace # case, rather than normalising everything. content_disposition = normalize_ws(content_disposition) try: parsed = peg.parse(content_disposition, ContentDispositionValue) except (SyntaxError, DuplicateParamError, InvalidISO8859Error): log.rfc6266.exception("Error while parsing Content-Disposition") return ContentDisposition() else: return ContentDisposition(disposition=parsed.dtype, assocs=parsed.params)
def test_LatchModsAction(): parsed = peg.parse("LatchMods(modifiers=LevelFive,clearLocks,latchToLock)", xkbparse.LatchModsAction) assert parsed.get_bool("clearLocks") assert parsed.get_bool("latchToLock") # parsed.set_bool("clearLocks", False) print(peg.compose(parsed, xkbparse.LatchModsAction))
def fromIslevString(bidstring): #pattern = re.compile( # "(?P<bidder>V|Ø|N|S) (?P<tricks>[1-7])(?P<strain>UT|SP|HJ|RU|KL) *(?P<dbl>[PDR]*)") #match = pattern.match(bidstring) #print('bidstring >{}<'.format(bidstring)) #if match: # player = Seat.fromDKId(match.group("bidder")) # tricks = match.group("tricks") # strain = Strain.fromDKString(match.group("strain")) # if not(match.group("dbl")): # dbl = 'P' # else: # dbl = match.group("dbl") #else: # raise (BaseException("bid exception")) if len(bidstring) < 3: #hack to deal with games that wont get played return Bid() res = parse(bidstring, parsing.Bid) if hasattr(res, 'not_played'): return Bid() elif hasattr(res, 'passed'): return Bid(strain = Strain.fromId('P')) else: res = res.played player = Seat.fromDKId(res.seat) tricks = int(res.tricks) strain = Strain.fromDKString(res.strain) if res.dbl == '': dbl = 'P' else: dbl = res.dbl return Bid(player, tricks, strain, dbl)
def parse_command(self, command_string=None): class DateTime(str): grammar = optional([self.date_time_re, self.date_re, self.time_re, self.date_functions_re, self.user_macros_re]) class AnniversaryName(str): grammar = self.anniversary_name_re class Add(str): grammar = self.add_command_re class Delete(str): grammar = self.delete_command_re class Edit(str): grammar = self.edit_command_re class Commands(List): grammar = [ (attr("delete", Delete), attr("anniversaryName", AnniversaryName)), (attr("edit", Edit), attr("anniversaryName", AnniversaryName), attr("dateTime", DateTime)), (attr("add", Add), attr("anniversaryName", AnniversaryName), attr("dateTime", DateTime)) ] return parse(command_string, Commands)
def parse_command(self, command_string=None): class DateTime(str): grammar = optional([ self.date_time_re, self.date_re, self.time_re, self.date_functions_re, self.user_macros_re ]) class AnniversaryName(str): grammar = self.anniversary_name_re class Add(str): grammar = self.add_command_re class Delete(str): grammar = self.delete_command_re class Edit(str): grammar = self.edit_command_re class Commands(List): grammar = [(attr("delete", Delete), attr("anniversaryName", AnniversaryName)), (attr("edit", Edit), attr("anniversaryName", AnniversaryName), attr("dateTime", DateTime)), (attr("add", Add), attr("anniversaryName", AnniversaryName), attr("dateTime", DateTime))] return parse(command_string, Commands)
def test_multiline_comment(self): p = parse(self.multiline, some(Comment)) expected = map(str, self.multiline.split("\n")) hashes = len(expected) * ["#"] blanks = len(expected) * [""] expected = map(str.replace, expected, hashes, blanks) self.assertEqual(p, expected)
def test_table_2(): text = '| !:tag\n|foo|bar|baz|\n|banana|apple|mango|' res = pypeg2.parse(text, nm.parser.Table, whitespace=None) assert len(res.tags) == 1 assert len(res.content) == 2
def test_code_snippets_with_comments(self): for (language, snippet) in self.code_snippets: code_snippet = self.gen_code_snippet(snippet, language, self.comments) p = parse(code_snippet, CodeSnippet) self.assertEqual(p, snippet) self.assertEqual(map(compose, p.comments), self.comments) self.assertEqual(p.language, language)
def parse_headers(content_disposition): """ Build a ContentDisposition from header values. @param content_disposition contents of the disposition header @type bytes """ # We allow non-ascii here (it will only be parsed inside of qdtext, and # rejected by the grammar if it appears in other places), although # parsing it can be ambiguous. Parsing it ensures that a non-ambiguous # filename* value won't get dismissed because of an unrelated ambiguity # in the filename parameter. But it does mean we occasionally give # less-than-certain values for some legacy senders. content_disposition = content_disposition.decode('iso-8859-1') # Our parsing is relaxed in these regards: # - The grammar allows a final ';' in the header; # - We do LWS-folding, and possibly normalise other broken # whitespace, instead of rejecting non-lws-safe text. # XXX Would prefer to accept only the quoted whitespace # case, rather than normalising everything. content_disposition = normalize_ws(content_disposition) try: parsed = peg.parse(content_disposition, ContentDispositionValue) except (SyntaxError, DuplicateParamError, InvalidISO8859Error): return ContentDisposition() else: return ContentDisposition(disposition=parsed.dtype, assocs=parsed.params)
def test_list_2(): text = ' * !:tag\n * Line 1\n * Line 2\n * Line 3' res = pypeg2.parse(text, nm.parser.List, whitespace=None) assert len(res.tags) == 1 assert len(res.content) == 3
def test_program_diff_env(self): """Test simple 2 line program with different initial conditions.""" simple_lines = \ """ y = x + 3; z = y + 5; """ ast = parse(simple_lines, p.Program) prog = ast.to_simple() env_expected = dict(x=Number(2), y=Number(5), z=Number(10)) env = dict(x=Number(2)) env2 = prog.evaluate(env) self.assertEqual(len(env_expected), len(env2)) for x in env_expected.keys(): self.assertEqual(env_expected[x], env2[x]) # show that we can reuse the program env_expected = dict(x=Number(9), y=Number(12), z=Number(17)) env2['x'] = Number(9) env3 = prog.evaluate(env2) self.assertEqual(len(env_expected), len(env3)) for x in env_expected.keys(): self.assertEqual(env_expected[x], env3[x])
def parse_headers(content_disposition): """Build a ContentDisposition from header values.""" # pylint: disable=maybe-no-member # We allow non-ascii here (it will only be parsed inside of qdtext, and # rejected by the grammar if it appears in other places), although parsing # it can be ambiguous. Parsing it ensures that a non-ambiguous filename* # value won't get dismissed because of an unrelated ambiguity in the # filename parameter. But it does mean we occasionally give # less-than-certain values for some legacy senders. content_disposition = content_disposition.decode('iso-8859-1') log.rfc6266.debug( "Parsing Content-Disposition: {}".format(content_disposition)) # Our parsing is relaxed in these regards: # - The grammar allows a final ';' in the header; # - We do LWS-folding, and possibly normalise other broken # whitespace, instead of rejecting non-lws-safe text. # XXX Would prefer to accept only the quoted whitespace # case, rather than normalising everything. content_disposition = normalize_ws(content_disposition) try: parsed = peg.parse(content_disposition, ContentDispositionValue) except (SyntaxError, DuplicateParamError, InvalidISO8859Error): log.rfc6266.exception("Error while parsing Content-Disposition") return ContentDisposition() else: return ContentDisposition(disposition=parsed.dtype, assocs=parsed.params)
def test_paragraph_6(): text = 'This is a mutline\nparagraph test.' res = pypeg2.parse(text, nm.parser.Paragraph, whitespace=None) assert len(res.content) == 1 assert res.content[0].content == text
def load(cls, text): # choose a mode, lax (\n newlines, no MLLP block markers) or mllp fs, cs, lax = Root.preparse(text) self = cls(fieldSep=fs, componentSep=cs, lax=lax) language = self.grammar.InterfaceMessageLax if self.lax else self.grammar.InterfaceMessage obj = parse(text.splitlines()[0], language) return obj
def invenio_query(pattern, search): try: query = pypeg2.parse(pattern, Main, whitespace='') for walker in walkers: query = query.accept(walker) except SyntaxError: query = MalformedQuery("") try: search_walker = ElasticSearchNoKeywordsDSL() query.accept(search_walker) query = Q('multi_match', query=pattern, fields=search.default_fields(), zero_terms_query="all") except QueryHasKeywords: query = query.accept(ElasticSearchDSL( current_app.config.get( "SEARCH_ELASTIC_KEYWORD_MAPPING", {} ) )) finally: return query
def test_if_tx1yx2y(self): """Test parsing if statement if (x > y) {...} else {...}.""" expected = "if (x > y)\n" expected += "{\n" expected += " x = 1;\n" expected += " y = x * 3;\n" expected += "}\n" expected += "else\n" expected += "{\n" expected += " x = 2;\n" expected += " y = 3 * x;\n" expected += "}" ast = parse(expected, p.If) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual(expected, c) self.assertEqual( If( GreaterThan(Variable('x'), Variable('y')), Sequence( Assign('x', Number(1)), Assign('y', Multiply(Variable('x'), Number(3)))), Sequence( Assign('x', Number(2)), Assign('y', Multiply(Number(3), Variable('x'))))), e)
def invenio_query(pattern, search): try: query = pypeg2.parse(pattern, Main, whitespace='') for walker in walkers: query = query.accept(walker) except SyntaxError: query = MalformedQuery("") try: search_walker = ElasticSearchNoKeywordsDSL() query.accept(search_walker) query = Q('multi_match', query=pattern, fields=search.default_fields(), zero_terms_query="all") except QueryHasKeywords: query = query.accept( ElasticSearchDSL( current_app.config.get("SEARCH_ELASTIC_KEYWORD_MAPPING", {}))) finally: return query
def parse(self): '''Parse target''' self.escape() result = parse(self.target, Clazz, comment=[comment_doc, comment_line]) return result.object()
def test_or_nxy(self): """Test parsing logical or expression !x || y.""" ast = parse("!x || y", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("!x || y", c) self.assertEqual(Or(Not(Variable('x')), Variable('y')), e)
def test_block_1(): text = '````\nThis is a block\n````' res = pypeg2.parse(text, nm.parser.Block, whitespace=None) assert len(res.content) == 1 assert len(res.content[0].content) == 1 assert res.content[0].content[0].content == 'This is a block'
def test_assign_nxt(self): """Test parsing assignment statement.""" ast = parse("x = !true;", p.Assign) c = compose(ast, indent=" ", autoblank=False) s = ast.to_simple() self.assertEqual("x = !true;", c) self.assertEqual(Assign('x', Not(Boolean(True))), s)
def test_or_xf(self): """Test parsing logical or expression x || false.""" ast = parse("x || false", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("x || false", c) self.assertEqual(Or(Variable('x'), Boolean(False)), e)
def test_lessthan_2m1m(self): """Test parsing less than expression -2 - -1.""" ast = parse("-2 < -1", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("-2 < -1", c) self.assertEqual(LessThan(Number(-2), Number(-1)), e)
def test_greaterthan_x25(self): """Test parsing greater than expression x > 2.5.""" ast = parse("x > 2.5", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("x > 2.5", c) self.assertEqual(GreaterThan(Variable('x'), Number(2.5)), e)
def test_subtraction_xy(self): """Test parsing subtraction expression x - y.""" ast = parse("x - y", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("x - y", c) self.assertEqual(Subtract(Variable('x'), Variable('y')), e)
def test_number_int_2(self): """Test parsing integer literal value -123.""" ast = parse("-123", p.term_expression) c = compose(ast, indent=" ", autoblank=False) n = ast.to_simple() self.assertEqual("-123", c) self.assertEqual(Number(-123), n)
def test_number_fp_1(self): """Test parsing floating point literal value 1.23.""" ast = parse("1.23", p.term_expression) c = compose(ast, indent=" ", autoblank=False) n = ast.to_simple() self.assertEqual("1.23", c) self.assertEqual(Number(1.23), n)
def test_or_ft(self): """Test parsing logical or expression false || true.""" ast = parse("false || true", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("false || true", c) self.assertEqual(Or(Boolean(False), Boolean(True)), e)
def test_assign_x12(self): """Test parsing assignment statement.""" ast = parse("x = 1 + 2;", p.Assign) c = compose(ast, indent=" ", autoblank=False) s = ast.to_simple() self.assertEqual("x = 1 + 2;", c) self.assertEqual(Assign('x', Add(Number(1), Number(2))), s)
def test_and_tt(self): """Test parsing logical and expression true && true.""" ast = parse("true && true", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("true && true", c) self.assertEqual(And(Boolean(True), Boolean(True)), e)
def test_assign_xyz(self): """Test parsing assignment statement.""" ast = parse("x = y + z;", p.Assign) c = compose(ast, indent=" ", autoblank=False) s = ast.to_simple() self.assertEqual("x = y + z;", c) self.assertEqual(Assign('x', Add(Variable('y'), Variable('z'))), s)
def test_and_1my(self): """Test parsing logical and expression -1 && y.""" ast = parse("-1 && y", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("-1 && y", c) self.assertEqual(And(Number(-1), Variable('y')), e)
def test_and_xny(self): """Test parsing logical and expression x && !y.""" ast = parse("x && !y", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("x && !y", c) self.assertEqual(And(Variable('x'), Not(Variable('y'))), e)
def test_not_1p1(self): """Test parsing logical negation expression !1.1.""" ast = parse("!1.1", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("!1.1", c) self.assertEqual(Not(Number(1.1)), e)
def test_paragraph_10(): text = 'foo bar [bold][baz]' res = pypeg2.parse(text, nm.parser.Paragraph, whitespace=None) assert len(res.content) == 2 assert res.content[0].content == 'foo bar ' assert res.content[1].content == 'baz'
def test_not_false(self): """Test parsing logical negation expression !false.""" ast = parse("!false", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("!false", c) self.assertEqual(Not(Boolean(False)), e)
def test_not_x(self): """Test parsing logical negation expression !x.""" ast = parse("!x", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("!x", c) self.assertEqual(Not(Variable('x')), e)
def test_lessthan_15x(self): """Test parsing less than expression 1.5 < x.""" ast = parse("1.5 < x", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("1.5 < x", c) self.assertEqual(LessThan(Number(1.5), Variable('x')), e)
def test_paragraph_8(): text = '[bold][foo] bar baz' res = pypeg2.parse(text, nm.parser.Paragraph, whitespace=None) assert len(res.content) == 2 assert res.content[0].content == 'foo' assert res.content[1].content == ' bar baz'
def test_lessthan_xy(self): """Test parsing less than expression x < y.""" ast = parse("x < y", p.Expression) c = compose(ast, indent=" ", autoblank=False) e = ast.to_simple() self.assertEqual("x < y", c) self.assertEqual(LessThan(Variable('x'), Variable('y')), e)