コード例 #1
0
def test_parsing_simple_input_object_with_args_should_fail():
    body = '''
input Hello {
  world(foo: Int): String
}
'''
    with raises(GraphQLSyntaxError) as excinfo:
        parse(body)

    assert 'Syntax Error GraphQL (3:8) Expected :, found (' in excinfo.value.message
コード例 #2
0
def test_cannot_be_used_for_execution():
    ast = parse('''
      extend type Query {
        newField: String
      }
    ''')
    extended_schema = extend_schema(test_schema, ast)
    clientQuery = parse('{ newField }')

    result = execute(extended_schema, clientQuery, object())
    assert result.data['newField'] is None
    assert str(result.errors[0]
               ) == 'Client Schema cannot be used for execution.'
コード例 #3
0
def test_it_concatenates_two_acts_together():
    source_a = Source('{ a, b, ... Frag }')
    source_b = Source('''
        fragment Frag on T {
            c
        }
    ''')

    ast_a = parse(source_a)
    ast_b = parse(source_b)
    ast_c = concat_ast([ast_a, ast_b])

    assert print_ast(ast_c) == '''{
コード例 #4
0
    def test_missing_directives_in_schema(self):
        """Ensure that validators properly identifiy missing directives in the schema.

        The schema should contain all directives that are supported by the graphql compiler,
        even if they might not be used in the query. Hence we raise an error when the following
        directive is not declared in the schema: directive @recurse(depth: Int!) on FIELD.
        """
        incomplete_schema_text = '''
            schema {
                query: RootSchemaQuery
            }
            directive @filter(op_name: String!, value: [String!]!) on FIELD | INLINE_FRAGMENT
            directive @tag(tag_name: String!) on FIELD
            directive @output(out_name: String!) on FIELD
            directive @output_source on FIELD
            directive @optional on FIELD
            directive @fold on FIELD
            type Animal {
                name: String
            }
            type RootSchemaQuery {
                Animal: Animal
            }
        '''
        incomplete_schema = build_ast_schema(parse(incomplete_schema_text))
        query = '''{
            Animal {
                name @output(out_name: "animal_name")
            }
        }'''
        with self.assertRaises(GraphQLValidationError):
            graphql_to_ir(incomplete_schema, query)
コード例 #5
0
 def test_incorrect_directive_locations_in_schema(self):
     """Ensure appropriate errors are raised if nonexistent directive is provided."""
     schema_with_extra_directive = '''
         schema {
             query: RootSchemaQuery
         }
         directive @filter(op_name: String!, value: [String!]!) on FIELD | INLINE_FRAGMENT
         directive @tag(tag_name: String!) on FIELD
         directive @output(out_name: String!) on FIELD
         directive @output_source on FIELD
         directive @optional on FIELD
         directive @fold on FIELD
         directive @recurse(depth: Int!) on FIELD
         directive @nonexistent on FIELD
         type Animal {
             name: String
         }
         type RootSchemaQuery {
             Animal: Animal
         }
     '''
     parsed_schema_with_extra_directive = build_ast_schema(parse(schema_with_extra_directive))
     query = '''{
         Animal {
             name @output(out_name: "animal_name")
         }
     }'''
     with self.assertRaises(GraphQLValidationError):
         graphql_to_ir(parsed_schema_with_extra_directive, query)
コード例 #6
0
def test_extends_objects_by_adding_implemented_interfaces_2():
    ast = parse('''
      extend type Foo {
        newObject: NewObject
        newInterface: NewInterface
        newUnion: NewUnion
        newScalar: NewScalar
        newEnum: NewEnum
        newTree: [Foo]!
      }
      type NewObject implements NewInterface {
        baz: String
      }
      type NewOtherObject {
        fizz: Int
      }
      interface NewInterface {
        baz: String
      }
      union NewUnion = NewObject | NewOtherObject
      scalar NewScalar
      enum NewEnum {
        OPTION_A
        OPTION_B
      }
    ''')
    original_print = print_schema(test_schema)
    extended_schema = extend_schema(test_schema, ast)
    assert extended_schema != test_schema
    assert print_schema(test_schema) == original_print
    assert print_schema(extended_schema) == \
        '''schema {
コード例 #7
0
ファイル: __init__.py プロジェクト: graphql-python/gql
    def check_gql(self):
        if not self.tree or not self.lines:
            self.load_file()

        visitor = self.visitor_class(self.filename, self.options)
        visitor.visit(self.tree)

        for node in visitor.calls:
            # Lines with the noqa flag are ignored entirely
            if pycodestyle.noqa(self.lines[node.lineno - 1]):
                continue

            query = visitor.node_query(node)
            if not query:
                continue

            try:
                source = Source(query, 'gql query')
                ast = parse(source)
            except Exception as e:
                message = str(e)
                yield self.error(node, GQL_SYNTAX_ERROR, message)
                continue

            validation_errors = self.validation_errors(ast)
            if validation_errors:
                for error in validation_errors:
                    message = str(error)
                    yield self.error(node, GQL_VALIDATION_ERROR, message)
コード例 #8
0
def test_parses_union_with_two_types():
    body = 'union Hello = Wo | Rld'
    loc = create_loc_fn(body)
    doc = parse(body)
    expected = ast.Document(
        definitions=[
            ast.UnionTypeDefinition(
                name=ast.Name(
                    value='Hello',
                    loc=loc(6, 11)
                ),
                types=[
                    ast.NamedType(
                        name=ast.Name(
                            value='Wo',
                            loc=loc(14, 16)
                        ),
                        loc=loc(14, 16)
                    ),
                    ast.NamedType(
                        name=ast.Name(
                            value='Rld',
                            loc=loc(19, 22)
                        ),
                        loc=loc(19, 22)
                    )
                ],
                loc=loc(0, 22)
            )
        ],
        loc=loc(0, 22)
    )
    assert doc == expected
コード例 #9
0
def test_big_list_of_containers_with_multiple_fields(benchmark):
    Container = namedtuple('Container', 'x y z o')

    ContainerType = GraphQLObjectType('Container', fields={
        'x': GraphQLField(GraphQLInt),
        'y': GraphQLField(GraphQLInt),
        'z': GraphQLField(GraphQLInt),
        'o': GraphQLField(GraphQLInt),
    })

    big_container_list = [Container(x=x, y=x, z=x, o=x) for x in range(5000)]

    def resolve_all_containers(root, args, context, info):
        return big_container_list

    Query = GraphQLObjectType('Query', fields={
        'allContainers': GraphQLField(
            GraphQLList(ContainerType),
            resolver=resolve_all_containers
        )
    })
    hello_schema = GraphQLSchema(Query)
    source = Source('{ allContainers { x, y, z } }')
    ast = parse(source)
    big_list_query = partial(execute, hello_schema, ast)
    result = benchmark(big_list_query)
    # result = big_list_query()
    assert not result.errors
    assert result.data == {'allContainers': [{'x': c.x, 'y': c.y, 'z': c.z} for c in big_container_list]}
コード例 #10
0
 def test_directives_with_incorrect_arguments(self):
     """Ensure that proper errors are raised if directives are provided with incorrect args."""
     # Change @filter arg from String! to Int!
     schema_with_incorrect_args = '''
         schema {
             query: RootSchemaQuery
         }
         directive @filter(op_name: Int!, value: [String!]!) on FIELD | INLINE_FRAGMENT
         directive @tag(tag_name: String!) on INLINE_FRAGMENT
         directive @output(out_name: String!) on FIELD
         directive @output_source on FIELD
         directive @optional on FIELD
         directive @fold on FIELD
         directive @recurse(depth: Int!) on FIELD
         type Animal {
             name: String
         }
         type RootSchemaQuery {
             Animal: Animal
         }
     '''
     parsed_incorrect_schema = build_ast_schema(parse(schema_with_incorrect_args))
     query = '''{
         Animal {
             name @output(out_name: "animal_name")
         }
     }'''
     with self.assertRaises(GraphQLValidationError):
         graphql_to_ir(parsed_incorrect_schema, query)
コード例 #11
0
def cycle_output(body):
    """This function does a full cycle of going from a string with the contents of the DSL,
    parsed in a schema AST, materializing that schema AST into an in-memory GraphQLSchema,
    and then finally printing that GraphQL into the DSL"""
    ast = parse(body)
    schema = build_ast_schema(ast)
    return "\n" + print_schema(schema)
コード例 #12
0
def test_validates_using_a_custom_type_info():
    type_info = TypeInfo(test_schema, lambda *_: None)

    ast = parse('''
      query {
        catOrDog {
          ... on Cat {
            furColor
          }
          ... on Dog {
            isHousetrained
          }
        }
      }
    ''')

    errors = visit_using_rules(
        test_schema,
        type_info,
        ast,
        specified_rules
    )

    assert len(errors) == 3
    assert errors[0].message == 'Cannot query field "catOrDog" on type "QueryRoot".'
    assert errors[1].message == 'Cannot query field "furColor" on type "Cat".'
    assert errors[2].message == 'Cannot query field "isHousetrained" on type "Dog".'
コード例 #13
0
def test_parses_simple_field_with_list_arg():
    body = '''
type Hello {
  world(things: [String]): String
}'''
    loc = create_loc_fn(body)
    doc = parse(body)
    expected = ast.Document(
        definitions=[
            ast.ObjectTypeDefinition(
                name=ast.Name(
                    value='Hello',
                    loc=loc(6, 11)
                ),
                interfaces=[],
                directives=[],
                fields=[
                    ast.FieldDefinition(
                        name=ast.Name(
                            value='world',
                            loc=loc(16, 21)
                        ),
                        arguments=[
                            ast.InputValueDefinition(
                                name=ast.Name(
                                    value='things',
                                    loc=loc(22, 28)
                                ),
                                type=ast.ListType(
                                    type=ast.NamedType(
                                        name=ast.Name(
                                            value='String',
                                            loc=loc(31, 37)
                                        ),
                                        loc=loc(31, 37)
                                    ),
                                    loc=loc(30, 38)
                                ),
                                default_value=None,
                                directives=[],
                                loc=loc(22, 38)
                            )
                        ],
                        type=ast.NamedType(
                            name=ast.Name(
                                value='String',
                                loc=loc(41, 47)
                            ),
                            loc=loc(41, 47)
                        ),
                        directives=[],
                        loc=loc(16, 47)
                    )
                ],
                loc=loc(1, 49)
            )
        ],
        loc=loc(1, 49)
    )
    assert doc == expected
コード例 #14
0
def test_parses_single_value_enum():
    body = 'enum Hello { WORLD }'
    loc = create_loc_fn(body)
    doc = parse(body)
    expected = ast.Document(
        definitions=[
            ast.EnumTypeDefinition(
                name=ast.Name(
                    value='Hello',
                    loc=loc(5, 10)
                ),
                directives=[],
                values=[
                    ast.EnumValueDefinition(
                        name=ast.Name(
                            value='WORLD',
                            loc=loc(13, 18)
                        ),
                        directives=[],
                        loc=loc(13, 18)
                    )
                ],
                loc=loc(0, 20)
            )
        ],
        loc=loc(0, 20)
    )

    assert doc == expected
コード例 #15
0
def test_parses_simple_type_inheriting_multiple_interfaces():
    body = 'type Hello implements Wo, rld { }'
    loc = create_loc_fn(body)
    doc = parse(body)
    expected = ast.Document(
        definitions=[
            ast.ObjectTypeDefinition(
                name=ast.Name(
                    value='Hello',
                    loc=loc(5, 10)
                ),
                interfaces=[
                    ast.NamedType(
                        name=ast.Name(
                            value='Wo',
                            loc=loc(22, 24)
                        ),
                        loc=loc(22, 24)
                    ),
                    ast.NamedType(
                        name=ast.Name(
                            value='rld',
                            loc=loc(26, 29)
                        ),
                        loc=loc(26, 29)
                    )
                ],
                fields=[],
                loc=loc(0, 33)
            )
        ],
        loc=loc(0, 33)
    )
    assert doc == expected
コード例 #16
0
def test_parses_double_value_enum():
    body = 'enum Hello { WO, RLD }'
    loc = create_loc_fn(body)
    doc = parse(body)
    expected = ast.Document(
        definitions=[
            ast.EnumTypeDefinition(
                name=ast.Name(
                    value='Hello',
                    loc=loc(5, 10)
                ),
                values=[
                    ast.EnumValueDefinition(
                        name=ast.Name(
                            value='WO',
                            loc=loc(13, 15)
                        ),
                        loc=loc(13, 15)
                    ),
                    ast.EnumValueDefinition(
                        name=ast.Name(
                            value='RLD',
                            loc=loc(17, 20)
                        ),
                        loc=loc(17, 20)
                    )
                ],
                loc=loc(0, 22)
            )
        ],
        loc=loc(0, 22)
    )

    assert doc == expected
コード例 #17
0
def test_parses_simple_union():
    body = 'union Hello = World'
    loc = create_loc_fn(body)
    doc = parse(body)
    expected = ast.Document(
        definitions=[
            ast.UnionTypeDefinition(
                name=ast.Name(
                    value='Hello',
                    loc=loc(6, 11)
                ),
                types=[
                    ast.NamedType(
                        name=ast.Name(
                            value='World',
                            loc=loc(14, 19)
                        ),
                        loc=loc(14, 19)
                    )
                ],
                loc=loc(0, 19)
            )
        ],
        loc=loc(0, 19)
    )
    assert doc == expected
コード例 #18
0
def test_ast_to_code_using_kitchen_sink():
    doc = parse(fixtures.KITCHEN_SINK)
    code_ast = ast_to_code(doc)
    source = Source(fixtures.KITCHEN_SINK)
    loc = lambda start, end: Loc(start, end, source)

    parsed_code_ast = eval(code_ast, {}, {'ast': ast, 'loc': loc})
    assert doc == parsed_code_ast
コード例 #19
0
def test_parses_simple_field_with_arg_with_default_value():
    body = '''
type Hello {
  world(flag: Boolean = true): String
}'''
    loc = create_loc_fn(body)
    doc = parse(body)
    expected = ast.Document(
        definitions=[
            ast.ObjectTypeDefinition(
                name=ast.Name(
                    value='Hello',
                    loc=loc(6, 11)
                ),
                interfaces=[],
                fields=[
                    ast.FieldDefinition(
                        name=ast.Name(
                            value='world',
                            loc=loc(16, 21)
                        ),
                        arguments=[
                            ast.InputValueDefinition(
                                name=ast.Name(
                                    value='flag',
                                    loc=loc(22, 26)
                                ),
                                type=ast.NamedType(
                                    name=ast.Name(
                                        value='Boolean',
                                        loc=loc(28, 35)
                                    ),
                                    loc=loc(28, 35)
                                ),
                                default_value=ast.BooleanValue(
                                    value=True,
                                    loc=loc(38, 42)
                                ),
                                loc=loc(22, 42)
                            )
                        ],
                        type=ast.NamedType(
                            name=ast.Name(
                                value='String',
                                loc=loc(45, 51)
                            ),
                            loc=loc(45, 51)
                        ),
                        loc=loc(16, 51)
                    )
                ],
                loc=loc(1, 53)
            )
        ],
        loc=loc(1, 53)
    )

    assert doc == expected
コード例 #20
0
    def execute_graphql_request(self, request):
        '''
        THIS IS IMPLEMENTED IN A SUB-OPTIMAL MANNER. DO NOT...
        A.) Judge Me.
        B.) Use unless you accept that the performance here probably is miserable.
        '''
        subqueries = self.parse_body(request)
        results = []
        for subquery in subqueries:
            query, variables, operation_name, _id = self.get_graphql_params(subquery)

            if not query:
                raise HttpError(HttpResponseBadRequest('Must provide query string.'))

            source = Source(query, name='GraphQL request')

            try:
                document_ast = parse(source)
                validation_errors = validate(self.schema, document_ast)
                if validation_errors:
                    # TODO: Do not return here. We should handle this per subquery.
                    return ExecutionResult(
                        errors=validation_errors,
                        invalid=True,
                    )
            except Exception as e:
                return ExecutionResult(errors=[e], invalid=True)

            if request.method.lower() == 'get':
                operation_ast = get_operation_ast(document_ast, operation_name)
                if operation_ast and operation_ast.operation != 'query':
                    raise HttpError(HttpResponseNotAllowed(
                        ['POST'], 'Can only perform a {} operation from a POST request.'.format(operation_ast.operation)
                    ))

            try:
                result = self.execute(
                    document_ast,
                    root_value=self.get_root_value(request),
                    variable_values=variables,
                    operation_name=operation_name,
                    context_value=self.get_context(request)
                )
                # TODO: This is really optimistic.
                # We may have status, we may have errors, etc...
                # payload should be set according to graphql spec, not
                # simply "IT WORKED Spec".
                results.append({
                    "id": _id,
                    "payload": {
                        "data": result.data,
                    }
                })
            except Exception as e:
                return ExecutionResult(errors=[e], invalid=True)

        return results
コード例 #21
0
def test_does_not_allow_extending_a_scalar():
    ast = parse('''
      extend type String {
        baz: String
      }
    ''')
    with raises(Exception) as exc_info:
        extend_schema(test_schema, ast)

    assert str(exc_info.value) == 'Cannot extend non-object type "String".'
コード例 #22
0
def test_unicode_error_message():
    ast = parse("query Example { unicode }")

    def resolver(context, *_):
        raise Exception(u"UNIÇODÉ!")

    Type = GraphQLObjectType("Type", {"unicode": GraphQLField(GraphQLString, resolver=resolver)})

    result = execute(GraphQLSchema(Type), ast)
    assert isinstance(result.errors[0], GraphQLLocatedError)
コード例 #23
0
ファイル: schema.py プロジェクト: rmyers/cannula
def load_schema(directory: str) -> typing.List[DocumentNode]:
    assert os.path.isdir(directory), f'Directory not found: {directory}'
    path = pathlib.Path(directory)

    def find_graphql_files():
        for graph in path.glob('**/*.graphql'):
            with open(os.path.join(directory, graph)) as graphfile:
                yield graphfile.read()

    return [parse(schema) for schema in find_graphql_files()]
コード例 #24
0
def test_requires_a_schema_definition():
    body = '''
type Hello {
  bar: Bar
}
'''
    doc = parse(body)
    with raises(Exception) as excinfo:
        build_ast_schema(doc)

    assert 'Must provide a schema definition.' == str(excinfo.value)
コード例 #25
0
 def total():
     Query = GraphQLObjectType('Query', fields={
     'allInts': GraphQLField(
         GraphQLList(GraphQLInt),
             resolver=resolve_all_ints
         )
     })
     hello_schema = GraphQLSchema(Query)
     source = Source('{ allInts }')
     ast = parse(source)
     return partial(execute, hello_schema, ast)
コード例 #26
0
ファイル: client.py プロジェクト: graphql-python/gql
    def __init__(self, schema=None, introspection=None, type_def=None, transport=None,
                 fetch_schema_from_transport=False, retries=0):
        assert not(type_def and introspection), 'Cant provide introspection type definition at the same time'
        if transport and fetch_schema_from_transport:
            assert not schema, 'Cant fetch the schema from transport if is already provided'
            introspection = transport.execute(parse(introspection_query)).data
        if introspection:
            assert not schema, 'Cant provide introspection and schema at the same time'
            schema = build_client_schema(introspection)
        elif type_def:
            assert not schema, 'Cant provide Type definition and schema at the same time'
            type_def_ast = parse(type_def)
            schema = build_ast_schema(type_def_ast)
        elif schema and not transport:
            transport = LocalSchemaTransport(schema)

        self.schema = schema
        self.introspection = introspection
        self.transport = transport
        self.retries = retries
コード例 #27
0
def pretty_print_graphql(query, use_four_spaces=True):
    """Take a GraphQL query, pretty print it, and return it."""
    # Use our custom visitor, which fixes directive argument order
    # to get the canonical representation
    output = visit(parse(query), CustomPrintingVisitor())

    # Using four spaces for indentation makes it easier to edit in
    # Python source files.
    if use_four_spaces:
        return fix_indentation_depth(output)
    return output
コード例 #28
0
 def total():
     Query = GraphQLObjectType('Query', fields={
         'allContainers': GraphQLField(
             GraphQLList(ContainerType),
             resolver=resolve_all_containers
         )
     })
     hello_schema = GraphQLSchema(Query)
     source = Source('{ allContainers { x } }')
     ast = parse(source)
     result = partial(execute, hello_schema, ast)
コード例 #29
0
def test_does_not_consider_fragment_names():
    body = '''schema {
  query: Foo
}

fragment Foo on Type { field } '''
    doc = parse(body)
    with raises(Exception) as excinfo:
        build_ast_schema(doc)

    assert 'Specified query type "Foo" not found in document' in str(excinfo.value)
コード例 #30
0
ファイル: test_subscribe.py プロジェクト: marcosptf/fedora
def test_accepts_an_object_with_named_properties_as_arguments():
    document = parse('''
      subscription {
        importantEmail
      }
  ''')
    result = subscribe(
        email_schema,
        document,
        root_value=None
    )
    assert isinstance(result, Observable)
コード例 #31
0
def gql(value: str) -> str:
    parse(value)
    return value
コード例 #32
0
    def test_two_edges_on_same_field_in_chain(self):
        query_str = dedent("""\
            {
              Creature {
                age @output(out_name: "age")
                in_Animal_Creature {
                  name @output(out_name: "name")
                  out_Animal_Critter {
                    size @output(out_name: "size")
                  }
                }
              }
            }
        """)
        parent_str = dedent("""\
            {
              Creature {
                age @output(out_name: "age")
                id @output(out_name: "__intermediate_output_0")
              }
            }
        """)
        child1_str = dedent("""\
            {
              Animal {
                name @output(out_name: "name")
                uuid @output(out_name: "__intermediate_output_1")
              }
            }
        """)

        child2_str = dedent("""\
            {
              Critter {
                size @output(out_name: "size")
                ID @output(out_name: "__intermediate_output_2")
              }
            }
        """)
        expected_query_node = ExpectedQueryNode(
            query_str=parent_str,
            schema_id="second",
            child_query_nodes_and_out_names=[
                (
                    ExpectedQueryNode(
                        query_str=child1_str,
                        schema_id="first",
                        child_query_nodes_and_out_names=[(
                            ExpectedQueryNode(
                                query_str=child2_str,
                                schema_id="third",
                                child_query_nodes_and_out_names=[],
                            ),
                            "__intermediate_output_1",
                            "__intermediate_output_2",
                        )],
                    ),
                    "__intermediate_output_0",
                    "__intermediate_output_1",
                ),
            ],
        )
        query_node, intermediate_outputs = split_query(parse(query_str),
                                                       three_merged_schema)
        self._check_query_node_structure(query_node, expected_query_node)
        self.assertEqual(intermediate_outputs,
                         self._get_intermediate_outputs_set(3))
コード例 #33
0
import graphql
from graphql.language import print_ast


document_node = graphql.parse(
    """
{
  root { name }
}
"""
)
print(f"{document_node.__class__.__module__}.{document_node.__class__.__name__}")
print(print_ast(document_node))
def test_should_not_allow_two_fields_in_the_subscription(validation_schema):
    sub = 'subscription S3{ test1 test2 }'
    errors = validate(validation_schema, parse(sub),
                      [SubscriptionHasSingleRootField])
    assert len(errors) == 1
    assert errors[0].message == 'Subscription "S3" must have only one field.'
コード例 #35
0
ファイル: __init__.py プロジェクト: avanov/graphql-dsl
def parse(src: str) -> graphql.DocumentNode:
    schema = graphql.parse(src)
    schema.definitions[0].operation_types[0].type.name.value
    return schema
コード例 #36
0
def get_schema_with_macros(macro_registry):
    """Get a new GraphQLSchema with fields where macro edges can be used.

    Preconditions:
    1. No macro in the registry has the same name as a field on the vertex where it applies.
    2. Members of a union type do not have outgoing macros with the same name.

    An easy way to satisfy the preconditions is to create the macro_registry using
    create_macro_registry, and only update it with register_macro_edge, which does all
    the necessary validation.

    Postconditions:
    1. Every GraphQLQuery that uses macros from this registry appropriately should
       successfully type-check against the schema generated from this function.
    2. A GraphQLQuery that uses macros not present in the registry, or uses valid
       macros but on types they are not defined at should fail schema validation with
       the schema generated from this function.
    3. This function is total -- A valid macro registry should not fail to create a
       GraphQL schema with macros.

    Args:
        macro_registry: MacroRegistry object containing a schema and macro descriptors
                        we want to add to the schema.

    Returns:
        GraphQLSchema with additional fields where macro edges can be used.
    """
    # The easiest way to manipulate the schema is through its AST. The easiest
    # way to get an AST is to print it and parse it.
    schema_ast = parse(print_schema(macro_registry.schema_without_macros))

    fields_by_definition_name = {}
    for definition in schema_ast.definitions:
        if isinstance(definition, (ObjectTypeDefinitionNode, InterfaceTypeDefinitionNode)):
            # Cast to list (from FrozenList) to allow for updates.
            fields_by_definition_name[definition.name.value] = list(definition.fields)

    for class_name, macros_for_class in six.iteritems(macro_registry.macro_edges_at_class):
        for macro_edge_name, macro_edge_descriptor in six.iteritems(macros_for_class):
            list_type_at_target = ListTypeNode(
                type=NamedTypeNode(name=NameNode(value=macro_edge_descriptor.target_class_name))
            )
            arguments = []
            directives = [DirectiveNode(name=NameNode(value=MacroEdgeDirective.name))]
            fields_by_definition_name[class_name].append(
                FieldDefinitionNode(
                    name=NameNode(value=macro_edge_name),
                    arguments=arguments,
                    type=list_type_at_target,
                    directives=directives,
                )
            )

    new_definitions = []
    for definition in schema_ast.definitions:
        # Create new (Object)/(Interface)TypeDefinitionNode based on the updated fields.
        if isinstance(definition, ObjectTypeDefinitionNode):
            new_definitions.append(
                ObjectTypeDefinitionNode(
                    interfaces=definition.interfaces,
                    description=definition.description,
                    name=definition.name,
                    directives=definition.directives,
                    loc=definition.loc,
                    fields=FrozenList(fields_by_definition_name[definition.name.value]),
                )
            )
        elif isinstance(definition, InterfaceTypeDefinitionNode):
            new_definitions.append(
                InterfaceTypeDefinitionNode(
                    description=definition.description,
                    name=definition.name,
                    directives=definition.directives,
                    loc=definition.loc,
                    fields=FrozenList(fields_by_definition_name[definition.name.value]),
                )
            )
        else:
            new_definitions.append(definition)

    new_schema_ast = DocumentNode(definitions=new_definitions)
    return build_ast_schema(new_schema_ast)
コード例 #37
0
def test_gets_an_operation_from_a_document_with_named_subscription_operation():
    doc = parse("subscription Test { field }")
    assert get_operation_ast(doc) == doc.definitions[0]
コード例 #38
0
    def subscribe(self, query, operation_name, callback, variables, context,
                  format_error, format_response):
        parsed_query = parse(query)
        rules = specified_rules + [SubscriptionHasSingleRootField]
        errors = validate(self.schema, parsed_query, rules=rules)

        if errors:
            return Promise.rejected(ValidationError(errors))

        args = {}

        subscription_name = ''

        for definition in parsed_query.definitions:

            if isinstance(definition, OperationDefinition):
                root_field = definition.selection_set.selections[0]
                subscription_name = root_field.name.value

                fields = self.schema.get_subscription_type().fields

                for arg in root_field.arguments:

                    arg_definition = [
                        arg_def for _, arg_def in fields.get(
                            subscription_name).args.iteritems()
                        if arg_def.out_name == arg.name.value
                    ][0]

                    args[arg_definition.out_name] = value_from_ast(
                        arg.value, arg_definition.type, variables=variables)

        if self.setup_funcs.get(to_snake_case(subscription_name)):
            trigger_map = self.setup_funcs[to_snake_case(subscription_name)](
                query=query,
                operation_name=operation_name,
                callback=callback,
                variables=variables,
                context=context,
                format_error=format_error,
                format_response=format_response,
                args=args,
                subscription_name=subscription_name)
        else:
            trigger_map = {}
            trigger_map[subscription_name] = {}

        external_subscription_id = self.max_subscription_id
        self.max_subscription_id += 1
        self.subscriptions[external_subscription_id] = []
        subscription_promises = []

        for trigger_name in trigger_map.viewkeys():
            try:
                channel_options = trigger_map[trigger_name].get(
                    'channel_options', {})
                filter = trigger_map[trigger_name].get('filter',
                                                       lambda arg1, arg2: True)

            # TODO: Think about this some more...the Apollo library
            # let's all messages through by default, even if
            # the users incorrectly uses the setup_funcs (does not
            # use 'filter' or 'channel_options' keys); I think it
            # would be better to raise an exception here
            except AttributeError:
                channel_options = {}

                def filter(arg1, arg2):
                    return True

            def on_message(root_value):
                def context_promise_handler(result):
                    if isinstance(context, FunctionType):
                        return context()
                    else:
                        return context

                def filter_func_promise_handler(context):
                    return Promise.all([context, filter(root_value, context)])

                def context_do_execute_handler(result):
                    context, do_execute = result
                    if not do_execute:
                        return
                    else:
                        return execute(self.schema, parsed_query, root_value,
                                       context, variables, operation_name)

                return Promise.resolve(True).then(
                    context_promise_handler).then(
                        filter_func_promise_handler).then(
                            context_do_execute_handler).then(
                                lambda result: callback(None, result)).catch(
                                    lambda error: callback(error, None))

            subscription_promises.append(
                self.pubsub.subscribe(
                    trigger_name, on_message,
                    channel_options).then(lambda id: self.subscriptions[
                        external_subscription_id].append(id)))

        return Promise.all(subscription_promises).then(
            lambda result: external_subscription_id)
コード例 #39
0
ファイル: test_scalars.py プロジェクト: zhangxsgit/prefect
 async def test_json_scalar_as_output(self):
     query = "query { json_output }"
     result = graphql.execute(schema, graphql.parse(query))
     assert result.data["json_output"] == {"x": [1, 2]}
コード例 #40
0
ファイル: test_scalars.py プロジェクト: zhangxsgit/prefect
 async def test_uuid_scalar_output(self):
     query = "query { uuid_output }"
     result = graphql.execute(schema, graphql.parse(query))
     assert result.data[
         "uuid_output"] == "8c9c95c5-30b8-467b-8acb-384c86dc3ab8"
コード例 #41
0
ファイル: client.py プロジェクト: vikramarunBT/gql
    def __init__(
        self,
        schema: Optional[Union[str, GraphQLSchema]] = None,
        introspection=None,
        type_def: Optional[str] = None,
        transport: Optional[Union[Transport, AsyncTransport]] = None,
        fetch_schema_from_transport: bool = False,
        execute_timeout: Optional[int] = 10,
    ):
        """Initialize the client with the given parameters.

        :param schema: an optional GraphQL Schema for local validation
                See :ref:`schema_validation`
        :param transport: The provided :ref:`transport <Transports>`.
        :param fetch_schema_from_transport: Boolean to indicate that if we want to fetch
                the schema from the transport using an introspection query
        :param execute_timeout: The maximum time in seconds for the execution of a
                request before a TimeoutError is raised. Only used for async transports.
        """
        assert not (
            type_def and introspection
        ), "Cannot provide introspection and type definition at the same time."

        if type_def:
            assert (
                not schema
            ), "Cannot provide type definition and schema at the same time."
            warnings.warn(
                "type_def is deprecated; use schema instead",
                category=DeprecationWarning,
            )
            schema = type_def

        if introspection:
            assert (
                not schema
            ), "Cannot provide introspection and schema at the same time."
            schema = build_client_schema(introspection)

        if isinstance(schema, str):
            type_def_ast = parse(schema)
            schema = build_ast_schema(type_def_ast)

        if transport and fetch_schema_from_transport:
            assert (
                not schema
            ), "Cannot fetch the schema from transport if is already provided."

        if schema and not transport:
            transport = LocalSchemaTransport(schema)

        # GraphQL schema
        self.schema: Optional[GraphQLSchema] = schema

        # Answer of the introspection query
        self.introspection = introspection

        # GraphQL transport chosen
        self.transport: Optional[Union[Transport, AsyncTransport]] = transport

        # Flag to indicate that we need to fetch the schema from the transport
        # On async transports, we fetch the schema before executing the first query
        self.fetch_schema_from_transport: bool = fetch_schema_from_transport

        # Enforced timeout of the execute function (only for async transports)
        self.execute_timeout = execute_timeout
コード例 #42
0
def expect_valid(schema, query_string):
    errors = validate(schema, parse(query_string))
    assert not errors
コード例 #43
0
    def test_directives_on_wrong_fields(self):
        """Ensure appropriate errors are raised if any directives are on the wrong location."""
        # Change @tag from FIELD to INLINE_FRAGMENT
        schema_with_wrong_directive_on_inline_fragment = '''
            schema {
                query: RootSchemaQuery
            }
            directive @filter(op_name: String!, value: [String!]!) on FIELD | INLINE_FRAGMENT
            directive @tag(tag_name: String!) on INLINE_FRAGMENT
            directive @output(out_name: String!) on FIELD
            directive @output_source on FIELD
            directive @optional on FIELD
            directive @fold on FIELD
            directive @recurse(depth: Int!) on FIELD
            type Animal {
                name: String
            }
            type RootSchemaQuery {
                Animal: Animal
            }
        '''

        # Remove INLINE_FRAGMENT from @filter
        schema_with_directive_missing_location = '''
            schema {
                query: RootSchemaQuery
            }
            directive @filter(op_name: String!, value: [String!]!) on FIELD
            directive @tag(tag_name: String!) on FIELD
            directive @output(out_name: String!) on FIELD
            directive @output_source on FIELD
            directive @optional on FIELD
            directive @fold on FIELD
            directive @recurse(depth: Int!) on FIELD
            type Animal {
                name: String
            }
            type RootSchemaQuery {
                Animal: Animal
            }
        '''

        # Change @output_source from FIELD to FIELD | INLINE_FRAGMENT
        schema_with_directive_missing_location = '''
            schema {
                query: RootSchemaQuery
            }
            directive @filter(op_name: String!, value: [String!]!) on FIELD | INLINE_FRAGMENT
            directive @tag(tag_name: String!) on FIELD
            directive @output(out_name: String!) on FIELD
            directive @output_source on FIELD | INLINE_FRAGMENT
            directive @optional on FIELD
            directive @fold on FIELD
            directive @recurse(depth: Int!) on FIELD
            type Animal {
                name: String
            }
            type RootSchemaQuery {
                Animal: Animal
            }
        '''

        incorrect_schemas = [
            schema_with_wrong_directive_on_inline_fragment,
            schema_with_directive_missing_location,
            schema_with_directive_missing_location,
        ]

        query = '''{
            Animal {
                name @output(out_name: "animal_name")
            }
        }'''

        for schema in incorrect_schemas:
            parsed_incorrect_schema = build_ast_schema(parse(schema))
            with self.assertRaises(GraphQLValidationError):
                graphql_to_ir(parsed_incorrect_schema, query)
コード例 #44
0
def test_does_not_get_missing_operation():
    doc = parse("{ field } mutation Test { field }")
    assert not get_operation_ast(doc)
コード例 #45
0
 def test(query):
     graphql.parse(query)
コード例 #46
0
 async def fetch_schema(self) -> None:
     execution_result = await self.transport.execute(
         parse(get_introspection_query())
     )
     self.client.introspection = execution_result.data
     self.client.schema = build_client_schema(self.client.introspection)
コード例 #47
0
def _compute_schema_text_fingerprint(schema_text: str):
    """Parse the schema text and compute the fingerprint of the GraphQLSchema."""
    return compute_schema_fingerprint(build_ast_schema(parse(schema_text)))
コード例 #48
0
 def test_complex_query_structure(self):
     query_str = dedent("""\
         {
           Animal {
             color @output(out_name: "color")
             out_Animal_Creature {
               age @output(out_name: "age")
               in_Animal_Creature {
                 description @output(out_name: "description")
               }
               friend {
                 in_Animal_Creature {
                   description @output(out_name: "friend_description")
                 }
               }
             }
           }
         }
     """)
     query_piece1_str = dedent("""\
         {
           Animal {
             color @output(out_name: "color")
             uuid @output(out_name: "__intermediate_output_0")
           }
         }
     """)
     query_piece2_str = dedent("""\
         {
           Creature {
             age @output(out_name: "age")
             id @output(out_name: "__intermediate_output_1")
             friend {
               id @output(out_name: "__intermediate_output_3")
             }
           }
         }
     """)
     query_piece3_str = dedent("""\
         {
           Animal {
             description @output(out_name: "description")
             uuid @output(out_name: "__intermediate_output_2")
           }
         }
     """)
     query_piece4_str = dedent("""\
         {
           Animal {
             description @output(out_name: "friend_description")
             uuid @output(out_name: "__intermediate_output_4")
           }
         }
     """)
     expected_query_node = ExpectedQueryNode(
         query_str=query_piece1_str,
         schema_id="first",
         child_query_nodes_and_out_names=[
             (
                 ExpectedQueryNode(
                     query_str=query_piece2_str,
                     schema_id="second",
                     child_query_nodes_and_out_names=[
                         (
                             ExpectedQueryNode(
                                 query_str=query_piece3_str,
                                 schema_id="first",
                                 child_query_nodes_and_out_names=[],
                             ),
                             "__intermediate_output_1",
                             "__intermediate_output_2",
                         ),
                         (
                             ExpectedQueryNode(
                                 query_str=query_piece4_str,
                                 schema_id="first",
                                 child_query_nodes_and_out_names=[],
                             ),
                             "__intermediate_output_3",
                             "__intermediate_output_4",
                         ),
                     ],
                 ),
                 "__intermediate_output_0",
                 "__intermediate_output_1",
             ),
         ],
     )
     query_node, intermediate_outputs = split_query(parse(query_str),
                                                    basic_merged_schema)
     self._check_query_node_structure(query_node, expected_query_node)
     self.assertEqual(intermediate_outputs,
                      self._get_intermediate_outputs_set(5))
コード例 #49
0
    def test_basic_make_query_plan(self):
        query_str = dedent('''\
            {
              Animal {
                out_Animal_Creature {
                  age @output(out_name: "age")
                }
              }
            }
        ''')
        parent_str = dedent('''\
            {
              Animal {
                uuid @output(out_name: "__intermediate_output_0")
              }
            }
        ''')
        child_str_no_filter = dedent('''\
            {
              Creature {
                age @output(out_name: "age")
                id @output(out_name: "__intermediate_output_1")
              }
            }
        ''')
        child_str_with_filter = dedent('''\
            {
              Creature {
                age @output(out_name: "age")
                id @output(out_name: "__intermediate_output_1") \
@filter(op_name: "in_collection", value: ["$__intermediate_output_0"])
              }
            }
        ''')
        query_node, intermediate_outputs = split_query(parse(query_str),
                                                       basic_merged_schema)
        query_plan_descriptor = make_query_plan(query_node,
                                                intermediate_outputs)
        # Check the child ASTs in the input query node are unchanged (@filter not added))
        child_query_node = query_node.child_query_connections[
            0].sink_query_node
        self.assertEqual(print_ast(child_query_node.query_ast),
                         child_str_no_filter)
        # Check the query plan
        parent_sub_query_plan = query_plan_descriptor.root_sub_query_plan
        self.assertEqual(print_ast(parent_sub_query_plan.query_ast),
                         parent_str)
        self.assertEqual(parent_sub_query_plan.schema_id, 'first')
        self.assertIsNone(parent_sub_query_plan.parent_query_plan)
        self.assertEqual(len(parent_sub_query_plan.child_query_plans), 1)
        # Check the child query plan
        child_sub_query_plan = parent_sub_query_plan.child_query_plans[0]
        self.assertEqual(print_ast(child_sub_query_plan.query_ast),
                         child_str_with_filter)
        self.assertEqual(child_sub_query_plan.schema_id, 'second')
        self.assertIs(child_sub_query_plan.parent_query_plan,
                      parent_sub_query_plan)
        self.assertEqual(len(child_sub_query_plan.child_query_plans), 0)
        # Check the output join descriptors
        output_join_descriptors = query_plan_descriptor.output_join_descriptors
        self.assertEqual(len(output_join_descriptors), 1)
        output_join_descriptor = output_join_descriptors[0]
        self.assertEqual(
            output_join_descriptor.output_names,
            ('__intermediate_output_0', '__intermediate_output_1'))
        # Check set of intermediate output names
        self.assertEqual(
            query_plan_descriptor.intermediate_output_names,
            {'__intermediate_output_0', '__intermediate_output_1'})
コード例 #50
0
def get_schema():
    """Get a schema object for testing."""
    # This schema isn't meant to be a paragon of good schema design.
    # Instead, it aims to capture as many real-world edge cases as possible,
    # without requiring a massive number of types and interfaces.
    schema_text = '''
        schema {
            query: RootSchemaQuery
        }

        directive @recurse(depth: Int!) on FIELD

        directive @filter(op_name: String!, value: [String!]!) on FIELD | INLINE_FRAGMENT

        directive @tag(tag_name: String!) on FIELD

        directive @output(out_name: String!) on FIELD

        directive @output_source on FIELD

        directive @optional on FIELD

        directive @fold on FIELD

        scalar DateTime

        scalar Date

        interface Entity {
            name: String
            description: String
            uuid: ID
            in_Entity_Related: [Entity]
            out_Entity_Related: [Entity]
        }

        type Animal implements Entity {
            name: String
            color: String
            description: String
            alias: [String]
            birthday: Date
            uuid: ID
            out_Animal_ParentOf: [Animal]
            in_Animal_ParentOf: [Animal]
            out_Animal_OfSpecies: [Species]
            out_Animal_FedAt: [Event]
            out_Animal_BornAt: [BirthEvent]
            out_Animal_ImportantEvent: [EventOrBirthEvent]
            in_Entity_Related: [Entity]
            out_Entity_Related: [Entity]
        }

        type Species implements Entity {
            name: String
            description: String
            alias: [String]
            uuid: ID
            out_Species_Eats: [FoodOrSpecies]
            in_Species_Eats: [Species]
            in_Animal_OfSpecies: [Animal]
            in_Entity_Related: [Entity]
            out_Entity_Related: [Entity]
        }

        type Food implements Entity {
            name: String
            origin: String
            description: String
            alias: [String]
            uuid: ID
            in_Species_Eats: [Species]
            in_Entity_Related: [Entity]
            out_Entity_Related: [Entity]
        }

        union FoodOrSpecies = Food | Species

        type Event implements Entity {
            name: String
            description: String
            uuid: ID
            event_date: DateTime
            in_Animal_FedAt: [Animal]
            in_Animal_ImportantEvent: [Animal]
            in_Entity_Related: [Entity]
            out_Entity_Related: [Entity]
        }

        # Assume that in the database, the below type is actually a subclass of Event.
        type BirthEvent implements Entity {
            name: String
            description: String
            uuid: ID
            event_date: DateTime
            in_Animal_FedAt: [Animal]
            in_Animal_BornAt: [Animal]
            in_Animal_ImportantEvent: [Animal]
            in_Entity_Related: [Entity]
            out_Entity_Related: [Entity]
        }

        # Because of the above, the base type for this union is Event.
        union EventOrBirthEvent = Event | BirthEvent

        type RootSchemaQuery {
            Animal: Animal
            BirthEvent: BirthEvent
            Entity: Entity
            Event: Event
            Food: Food
            Species: Species
        }
    '''

    ast = parse(schema_text)
    return build_ast_schema(ast)
def test_should_not_allow_inline_fragments(validation_schema):
    sub = 'subscription S4{ ...on Subscription { test1 } }'
    errors = validate(validation_schema, parse(sub),
                      [SubscriptionHasSingleRootField])
    assert len(errors) == 1
    assert errors[0].message == 'Apollo subscriptions do not support\
コード例 #52
0
 def test_illegal_rename_to_double_underscore(self) -> None:
     with self.assertRaises(InvalidTypeNameError):
         rename_schema(parse(ISS.basic_schema), {"Human": "__Human"})
def test_should_allow_another_valid_subscription(validation_schema):
    sub = 'subscription S1{ test1 } subscription S2{ test2 }'
    errors = validate(validation_schema, parse(sub),
                      [SubscriptionHasSingleRootField])
    assert len(errors) == 0
コード例 #54
0
 def test_builtin_rename(self) -> None:
     with self.assertRaises(NotImplementedError):
         rename_schema(
             parse(ISS.list_schema),
             {"String": "NewString"},
         )
コード例 #55
0
 def cached_parser(query, schema):
     source = Source(query, name='GraphQL request')
     ast = parse(source)
     validation_errors = validate(schema, ast)
     return ast, validation_errors
コード例 #56
0
 def test_two_children_stitch_on_same_field(self):
     query_str = dedent("""\
         {
           Animal {
             out_Animal_Creature {
               age @output(out_name: "age1")
             }
             out_Animal_ParentOf {
               out_Animal_Creature {
                 age @output(out_name: "age2")
               }
             }
           }
         }
     """)
     parent_str = dedent("""\
         {
           Animal {
             uuid @output(out_name: "__intermediate_output_0")
             out_Animal_ParentOf {
               uuid @output(out_name: "__intermediate_output_2")
             }
           }
         }
     """)
     child_str1 = dedent("""\
         {
           Creature {
             age @output(out_name: "age1")
             id @output(out_name: "__intermediate_output_1")
           }
         }
     """)
     child_str2 = dedent("""\
         {
           Creature {
             age @output(out_name: "age2")
             id @output(out_name: "__intermediate_output_3")
           }
         }
     """)
     expected_query_node = ExpectedQueryNode(
         query_str=parent_str,
         schema_id="first",
         child_query_nodes_and_out_names=[
             (
                 ExpectedQueryNode(query_str=child_str1,
                                   schema_id="second",
                                   child_query_nodes_and_out_names=[]),
                 "__intermediate_output_0",
                 "__intermediate_output_1",
             ),
             (
                 ExpectedQueryNode(query_str=child_str2,
                                   schema_id="second",
                                   child_query_nodes_and_out_names=[]),
                 "__intermediate_output_2",
                 "__intermediate_output_3",
             ),
         ],
     )
     query_node, intermediate_outputs = split_query(parse(query_str),
                                                    basic_merged_schema)
     self._check_query_node_structure(query_node, expected_query_node)
     self.assertEqual(intermediate_outputs,
                      self._get_intermediate_outputs_set(4))
コード例 #57
0
 def test_illegal_rename_start_with_number(self) -> None:
     with self.assertRaises(InvalidTypeNameError):
         rename_schema(parse(ISS.basic_schema), {"Human": "0Human"})
コード例 #58
0
def test_execute_introspection_query(benchmark, big_schema_sdl):  # noqa: F811
    schema = build_schema(big_schema_sdl, assume_valid=True)
    document = parse(get_introspection_query())
    result = benchmark(lambda: execute_sync(schema=schema, document=document))
    assert result.errors is None
コード例 #59
0
ファイル: test_scalars.py プロジェクト: zhangxsgit/prefect
 async def test_datetime_scalar_as_output(self):
     query = "query { datetime_output }"
     result = graphql.execute(schema, graphql.parse(query))
     assert (result.data["datetime_output"] == pendulum.datetime(
         2020, 1, 1, 1, tz="EST").isoformat())
コード例 #60
0
def test_parse_kitchen_sink(benchmark, kitchen_sink_query):  # noqa: F811
    query = benchmark(lambda: parse(kitchen_sink_query))
    assert isinstance(query, DocumentNode)