def parse_element(cls, indent_stack): """Set to the rule the description.""" doc_double = QuotedString(quoteChar='"""', multiline=True) doc_single = QuotedString(quoteChar="'''", multiline=True) doc_string = indentedBlock((doc_double | doc_single), indent_stack) description = (Keyword('description:').suppress() + doc_string) return (description | doc_double | doc_single)\ .setResultsName('description')
def parse_element(cls, indent_stack): """Set. to the rule the list of producers in ``producer`` attribute.""" producer_body = ( Word(alphanums + "_") + originalTextFor(nestedExpr()) + Suppress(',') + PYTHON_ALLOWED_EXPR).setParseAction(lambda toks: { 'code': toks[0], 'params': eval(toks[1]), 'rule': eval(toks[2]) }) return (Keyword('producer:').suppress() + indentedBlock( OneOrMore(producer_body), indent_stack)).setResultsName('producer')
def parse_element(cls, indent_stack): """Set. to the rule the list of producers in ``producer`` attribute.""" producer_body = (Word(alphanums + "_") + originalTextFor(nestedExpr()) + Suppress(',') + PYTHON_ALLOWED_EXPR ).setParseAction( lambda toks: {'code': toks[0], 'params': eval(toks[1]), 'rule': eval(toks[2])}) return (Keyword('producer:').suppress() + indentedBlock(OneOrMore(producer_body), indent_stack) ).setResultsName('producer')
def parse_element(cls, indent_stack): """Set to the rule the list of aggregations.""" aggregation_body = ( Word(alphanums + "_") + originalTextFor(nestedExpr()) + Suppress(',') + PYTHON_ALLOWED_EXPR).setParseAction(lambda toks: { 'name': toks[0], 'engine': eval(toks[1]), 'rule': eval(toks[2]) }) return (Keyword('aggregation:').suppress() + indentedBlock(OneOrMore(aggregation_body), indent_stack)).setResultsName('aggregation')
def parse_element(cls, indent_stack): """Set ``json_ext`` in the rule.""" json_dumps = (Keyword('dumps').suppress() + Literal(',').suppress() + PYTHON_ALLOWED_EXPR ).setResultsName("dumps")\ .setParseAction(lambda toks: toks[0].strip()) json_loads = (Keyword("loads").suppress() + Literal(",").suppress() + PYTHON_ALLOWED_EXPR ).setResultsName("loads")\ .setParseAction(lambda toks: toks[0].strip()) return (Keyword('json:').suppress() + indentedBlock((json_dumps & json_loads), indent_stack) ).setResultsName('json_ext')
def parse_element(cls, indent_stack): """Set ``json_ext`` in the rule.""" json_dumps = (Keyword('dumps').suppress() + Literal(',').suppress() + PYTHON_ALLOWED_EXPR ).setResultsName("dumps")\ .setParseAction(lambda toks: toks[0].strip()) json_loads = (Keyword("loads").suppress() + Literal(",").suppress() + PYTHON_ALLOWED_EXPR ).setResultsName("loads")\ .setParseAction(lambda toks: toks[0].strip()) return (Keyword('json:').suppress() + indentedBlock( (json_dumps & json_loads), indent_stack)).setResultsName('json_ext')
def parse_element(cls, indent_stack): """Set to the rule the list of aggregations.""" aggregation_body = ( Word(alphanums + "_") + originalTextFor(nestedExpr()) + Suppress(',') + PYTHON_ALLOWED_EXPR ).setParseAction(lambda toks: { 'name': toks[0], 'engine': eval(toks[1]), 'rule': eval(toks[2]) }) return (Keyword('aggregation:').suppress() + indentedBlock(OneOrMore(aggregation_body), indent_stack) ).setResultsName('aggregation')
def parse_element(cls, indent_stack): """Sets ``extensions`` attribute to the rule definition""" import_line = quotedString.setParseAction(removeQuotes) + restOfLine return (Keyword('extensions:').suppress() + indentedBlock(OneOrMore(import_line), indent_stack) ).setResultsName('extensions')
def parse_element(cls, indent_stack): """Set the ``schema`` attribute inside the rule.""" return (Keyword('schema:').suppress() + indentedBlock(DICT_DEF, indent_stack) ).setParseAction(lambda toks: toks[0]).setResultsName('schema')
def parse_element(cls, indent_stack): """Parse ``elasticsearch`` section. This is an example of the content of this section:: @extend title: elasticsearch: mapping: { "properties": { "title": { "index_name": "title", "type": "multi_field", "fields": { "title": { "type": "string", "analyzer": "standard" }, "sort_title": { "type": "string", "analyzer": "simple" } } } } } local_tokenizer: title.title, invenio.ext.elasticsearch.token1 title.subtitle, invenio.ext.elasticsearch.token2 facets: { "authors": { "terms" : { "field" : "facet_authors", "size": 10, "order" : { "_count" : "desc" } } } } highlights: { "number_of_fragments" : 3, "fragment_size" : 70 } """ mapping = (Keyword('mapping:').suppress() + DICT_DEF)\ .setResultsName('mapping')\ .setParseAction(lambda toks: toks[0]) tokenizer_field = Word(alphanums + '_' + '.') local_tokenizer = (Keyword('local_tokenizer:').suppress() + indentedBlock( OneOrMore(tokenizer_field + Suppress(',') + restOfLine), indent_stack) ).setResultsName('local_tokenizer') facets = (Keyword('facets:').suppress() + DICT_DEF)\ .setResultsName('facets')\ .setParseAction(lambda toks: toks[0]) highlights = (Keyword('highlights:').suppress() + DICT_DEF)\ .setResultsName('highlights')\ .setParseAction(lambda toks: toks[0]) return (Keyword('elasticsearch:').suppress() + indentedBlock( Each([Optional(mapping), Optional(local_tokenizer), Optional(facets), Optional(highlights)]), indent_stack) ).setResultsName('elasticsearch')