コード例 #1
0
def test_basic_tokenizer_no_fields():
    query = '''
       query Sample {
       }
    '''
    assert tuple(tokenize(query)) == (
        Toks.RootQueryToken(query_model='Sample'),
        Toks.CloseObjectToken(),
    )
コード例 #2
0
def test_tokenizer_one_field():
    query = '''
       query Sample {
         field1
       }
    '''
    assert tuple(tokenize(query)) == (
        Toks.RootQueryToken(query_model='Sample'),
        Toks.AttributeToken(attribute_name='field1'),
        Toks.CloseObjectToken(),
    )
コード例 #3
0
def test_tokenizer_filters2():
    query = '''
       query Sample {
         field1 [* == 4 and * <= 3]
       }
    '''
    assert tuple(tokenize(query)) == (
        Toks.RootQueryToken(query_model='Sample'),
        Toks.AttributeToken(attribute_name='field1'),
        Toks.FilterStartToken(),
        Toks.FilterBoolToken(sel='*', op='==', val=4),
        Toks.FilterBinaryLogicToken(logic_op='and'),
        Toks.FilterBoolToken(sel='*', op='<=', val=3),
        Toks.FilterEndToken(),
        Toks.CloseObjectToken(),
    )
コード例 #4
0
def test_tokenizer_filters1():
    query = '''
       query Sample {
         field1 [* == 'hi']
       }
    '''
    assert tuple(tokenize(query)) == (
        Toks.RootQueryToken(query_model='Sample'),
        Toks.AttributeToken(attribute_name='field1'),
        Toks.FilterStartToken(),
        Toks.FilterBoolToken(sel='*', op='==', val='hi'),
        Toks.FilterEndToken(),
        Toks.CloseObjectToken(),
    )
コード例 #5
0
def test_tokenizer_filters7():
    query = '''
       query Sample {
         field1 [* in [true, false]]
       }
    '''
    assert tuple(tokenize(query)) == (
        Toks.RootQueryToken(query_model='Sample'),
        Toks.AttributeToken(attribute_name='field1'),
        Toks.FilterStartToken(),
        Toks.FilterBoolToken(sel='*', op='in', val=[True, False]),
        Toks.FilterEndToken(),
        Toks.CloseObjectToken(),
    )
コード例 #6
0
ファイル: test_parser.py プロジェクト: jaychia/shoedog
from tests.mock_app import db, Sample, Tube

mock_registry = build_registry(db)

"""
query Sample {
    id
    tube {
        name
        type [any in ['a', 'b'] or all != 'c']
    }
    date [((* < '3/9/2017') or * == '3/3/2017') and ((* > '3/10/2017'))]
}
"""
test_token_gen_1 = (x for x in (
    Toks.RootQueryToken(query_model='Sample'),
    Toks.AttributeToken(attribute_name='id'),
    Toks.OpenObjectToken(rel='tube'),
    Toks.AttributeToken(attribute_name='name'),
    Toks.AttributeToken(attribute_name='type'),
    Toks.FilterStartToken(),
    Toks.FilterBoolToken(sel='any', op='in', val=['a', 'b']),
    Toks.FilterBinaryLogicToken(logic_op='or'),
    Toks.FilterBoolToken(sel='all', op='!=', val='c'),
    Toks.FilterEndToken(),
    Toks.CloseObjectToken(),
    Toks.AttributeToken(attribute_name='date'),
    Toks.FilterStartToken(),
    Toks.FilterOpenParanToken(),
    Toks.FilterOpenParanToken(),
    Toks.FilterBoolToken(sel='*', op='<', val='3/9/2017'),
コード例 #7
0
def test_tokenizer_filters3():
    query = '''
       query Sample {
         field1 [(* == 'hi') and (* != 'bye')]
       }
    '''
    assert tuple(tokenize(query)) == (
        Toks.RootQueryToken(query_model='Sample'),
        Toks.AttributeToken(attribute_name='field1'),
        Toks.FilterStartToken(),
        Toks.FilterOpenParanToken(),
        Toks.FilterBoolToken(sel='*', op='==', val='hi'),
        Toks.FilterCloseParanToken(),
        Toks.FilterBinaryLogicToken(logic_op='and'),
        Toks.FilterOpenParanToken(),
        Toks.FilterBoolToken(sel='*', op='!=', val='bye'),
        Toks.FilterCloseParanToken(),
        Toks.FilterEndToken(),
        Toks.CloseObjectToken(),
    )
コード例 #8
0
def test_tokenizer_parser_test_2():
    query = '''
        query Sample {
            id
            tube {
                name [* == 'only-this-tube']
                self_tube {
                    name [* == 'only-this-tube-self-tube']
                }
            }
            self_sample {
                name [* == 'only-this-self-sample']
                tube {
                    self_tube {
                        name [* == 'only-this-self-sample-tube-self-tube']
                    }
                }
            }
        }
    '''
    assert tuple(tokenize(query)) == (
        Toks.RootQueryToken(query_model='Sample'),
        Toks.AttributeToken(attribute_name='id'),
        Toks.OpenObjectToken(rel='tube'),
        Toks.AttributeToken(attribute_name='name'),
        Toks.FilterStartToken(),
        Toks.FilterBoolToken(sel='*', op='==', val='only-this-tube'),
        Toks.FilterEndToken(),
        Toks.OpenObjectToken(rel='self_tube'),
        Toks.AttributeToken(attribute_name='name'),
        Toks.FilterStartToken(),
        Toks.FilterBoolToken(sel='*', op='==', val='only-this-tube-self-tube'),
        Toks.FilterEndToken(),
        Toks.CloseObjectToken(),
        Toks.CloseObjectToken(),
        Toks.OpenObjectToken(rel='self_sample'),
        Toks.AttributeToken(attribute_name='name'),
        Toks.FilterStartToken(),
        Toks.FilterBoolToken(sel='*', op='==', val='only-this-self-sample'),
        Toks.FilterEndToken(),
        Toks.OpenObjectToken(rel='tube'),
        Toks.OpenObjectToken(rel='self_tube'),
        Toks.AttributeToken(attribute_name='name'),
        Toks.FilterStartToken(),
        Toks.FilterBoolToken(sel='*',
                             op='==',
                             val='only-this-self-sample-tube-self-tube'),
        Toks.FilterEndToken(),
        Toks.CloseObjectToken(),
        Toks.CloseObjectToken(),
        Toks.CloseObjectToken(),
        Toks.CloseObjectToken(),
    )
コード例 #9
0
def test_tokenizer_parser_test_1():
    query = '''
        query Sample {
            id
            tube {
                name
                type [any in ['a', 'b'] or all != 'c']
            }
            date [((* < '3/9/2017') or * == '3/3/2017') and ((* > '3/10/2017'))]
        }
    '''
    assert tuple(tokenize(query)) == (
        Toks.RootQueryToken(query_model='Sample'),
        Toks.AttributeToken(attribute_name='id'),
        Toks.OpenObjectToken(rel='tube'),
        Toks.AttributeToken(attribute_name='name'),
        Toks.AttributeToken(attribute_name='type'),
        Toks.FilterStartToken(),
        Toks.FilterBoolToken(sel='any', op='in', val=['a', 'b']),
        Toks.FilterBinaryLogicToken(logic_op='or'),
        Toks.FilterBoolToken(sel='all', op='!=', val='c'),
        Toks.FilterEndToken(),
        Toks.CloseObjectToken(),
        Toks.AttributeToken(attribute_name='date'),
        Toks.FilterStartToken(),
        Toks.FilterOpenParanToken(),
        Toks.FilterOpenParanToken(),
        Toks.FilterBoolToken(sel='*', op='<', val='3/9/2017'),
        Toks.FilterCloseParanToken(),
        Toks.FilterBinaryLogicToken(logic_op='or'),
        Toks.FilterBoolToken(sel='*', op='==', val='3/3/2017'),
        Toks.FilterCloseParanToken(),
        Toks.FilterBinaryLogicToken(logic_op='and'),
        Toks.FilterOpenParanToken(),
        Toks.FilterOpenParanToken(),
        Toks.FilterBoolToken(sel='*', op='>', val='3/10/2017'),
        Toks.FilterCloseParanToken(),
        Toks.FilterCloseParanToken(),
        Toks.FilterEndToken(),
        Toks.CloseObjectToken(),
    )
コード例 #10
0
def test_tokenizer_subobject_filters2():
    query = '''
       query Sample {
         field1 [* in ['foo', 'bar']]
         tube {
           field2 [* == 3]
         }
       }
    '''
    assert tuple(tokenize(query)) == (
        Toks.RootQueryToken(query_model='Sample'),
        Toks.AttributeToken(attribute_name='field1'),
        Toks.FilterStartToken(),
        Toks.FilterBoolToken(sel='*', op='in', val=['foo', 'bar']),
        Toks.FilterEndToken(),
        Toks.OpenObjectToken(rel='tube'),
        Toks.AttributeToken(attribute_name='field2'),
        Toks.FilterStartToken(),
        Toks.FilterBoolToken(sel='*', op='==', val=3),
        Toks.FilterEndToken(),
        Toks.CloseObjectToken(),
        Toks.CloseObjectToken(),
    )