def test_add_id_1(self): schema_in = build_schema(''' type Human ''') config = {'generation': {'field_for_id': True}} expected = build_schema(''' type Human { id: ID! } ''') schema_out = generator.run(schema_in, config) assert compare.is_equals_schema(schema_out, expected)
def test_add_lastUpdateDate_1(self): schema_in = build_schema(''' type Test ''') config = {'generation': {'field_for_last_update_date': True}} expected = build_schema(''' scalar DateTime type Test { _lastUpdateDate: DateTime } ''') schema_out = generator.run(schema_in, config) assert compare.is_equals_schema(schema_out, expected)
def generate(input_file, output_file): # load schema with open(input_file, 'r') as f: schema_string = f.read() schema = build_schema(schema_string) data = {'types': [], 'interfaces': [], 'unions': []} # get list of types for type_name, _type in schema.type_map.items(): if is_interface_type(_type): t = {'name': type_name, 'possible_types': []} for possible_type in schema.get_possible_types(_type): t['possible_types'].append(possible_type.name) data['interfaces'].append(t) if is_union_type(_type): t = {'name': type_name, 'possible_types': []} for possible_type in schema.get_possible_types(_type): t['possible_types'].append(possible_type.name) data['unions'].append(t) if is_schema_defined_object_type(_type): t = {'name': type_name, 'fields': []} # add object fields for field_name, field_type in _type.fields.items(): inner_field_type = get_named_type(field_type.type) if is_schema_defined_object_type(inner_field_type) or \ is_interface_type(inner_field_type) or \ is_union_type(inner_field_type): t['fields'].append(field_name) sort_before_rendering(t) data['types'].append(t) # sort data['types'].sort(key=lambda x: x['name']) data['interfaces'].sort() data['unions'].sort() # apply template template = Template(filename=f'resources/resolver.template') if output_file is None: print(template.render(data=data)) else: with open(output_file, 'w') as f: updated_schema_string = template.render(data=data) api_schema = build_schema(schema_string) assert_valid_schema(api_schema) f.write(updated_schema_string)
def make_schema( type_defs: Union[str, List[str]], assume_valid: bool = False, assume_valid_sdl: bool = False, no_location: bool = False, experimental_fragment_variables: bool = False, federation: bool = False, directives: Dict[str, Type[SchemaDirectiveVisitor]] = None, ) -> GraphQLSchema: if isinstance(type_defs, list): type_defs = join_type_defs(type_defs) if federation: # Remove custom schema directives (to avoid apollo-gateway crashes). sdl = purge_schema_directives(type_defs) # remove subscription because Apollo Federation not support subscription yet. # type_defs = remove_subscription(type_defs) type_defs = join_type_defs([type_defs, federation_service_type_defs]) schema = build_schema( type_defs, assume_valid, assume_valid_sdl, no_location, experimental_fragment_variables ) entity_types = get_entity_types(schema) if entity_types: schema = extend_schema(schema, parse(federation_entity_type_defs)) # Add _entities query. entity_type = schema.get_type("_Entity") if entity_type: entity_type = cast(GraphQLUnionType, entity_type) entity_type.types = entity_types query_type = schema.get_type("Query") if query_type: query_type = cast(GraphQLObjectType, query_type) query_type.fields["_entities"].resolve = resolve_entities # Add _service query. query_type = schema.get_type("Query") if query_type: query_type = cast(GraphQLObjectType, query_type) query_type.fields["_service"].resolve = lambda _service, info: {"sdl": sdl} else: schema = build_schema( type_defs, assume_valid, assume_valid_sdl, no_location, experimental_fragment_variables ) if directives: SchemaDirectiveVisitor.visit_schema_directives(schema, directives) return schema
def test_get_fake_graphql_schema(self): # use the test module for basic graphQLSchema generation mod = reflect.namedModule('buildbot.test.fake.endpoint') self.data._scanModule(mod) schema = self.data.get_graphql_schema() self.assertEqual(schema, mod.graphql_schema) schema = graphql.build_schema(schema)
def from_file( file: Union[IO[str], str], *, app: Any = None, base_url: Optional[str] = None, data_generation_methods: DataGenerationMethodInput = DEFAULT_DATA_GENERATION_METHODS, code_sample_style: str = CodeSampleStyle.default().name, location: Optional[str] = None, ) -> GraphQLSchema: """Load GraphQL schema from a file descriptor or a string. :param file: Could be a file descriptor, string or bytes. """ if isinstance(file, str): data = file else: data = file.read() document = graphql.build_schema(data) result = graphql.execute(document, INTROSPECTION_QUERY_AST) # TYPES: We don't pass `is_awaitable` above, therefore `result` is of the `ExecutionResult` type result = cast(ExecutionResult, result) # TYPES: # - `document` is a valid schema, because otherwise `build_schema` will rise an error; # - `INTROSPECTION_QUERY` is a valid query - it is known upfront; # Therefore the execution result is always valid at this point and `result.data` is not `None` raw_schema = cast(Dict[str, Any], result.data) return from_dict( raw_schema, app=app, base_url=base_url, data_generation_methods=data_generation_methods, code_sample_style=code_sample_style, location=location, )
def test_get_graphql_schema(self): if not graphql: raise unittest.SkipTest('Test requires graphql') # use the test module for basic graphQLSchema generation mod = reflect.namedModule('buildbot.test.unit.data.test_connector') self.data._scanModule(mod) schema = self.data.get_graphql_schema() self.assertEqual( schema, textwrap.dedent(""" # custom scalar types for buildbot data model scalar Date # stored as utc unix timestamp scalar Binary # arbitrary data stored as base85 scalar JSON # arbitrary json stored as string, mainly used for properties values type Query { tests(testid: Int, testid__contains: Int, testid__eq: Int, testid__ge: Int, testid__gt: Int, testid__le: Int, testid__lt: Int, testid__ne: Int, order: String, limit: Int, offset: Int): [Test]! test(testid: Int): Test } type Test { testid: Int! } """)) schema = graphql.build_schema(schema)
def test_scalar_literal_parser_can_be_set_on_initialization(): schema = build_schema(type_defs) scalar = ScalarType("DateInput", literal_parser=parse_date_literal) scalar.bind_to_schema(schema) schema_scalar = schema.type_map.get("DateInput") assert schema_scalar.parse_literal is parse_date_literal
def prepare_graphql_query_for(schema_str: str, query_str: str) -> QueryContext: """ Given a schema and a query against it, prepare everything needed to do low-level testing Args: schema_str: GraphQL Schema string query_str: User query string Returns: schema: compiled schema query: compiled query execution_context: the low-level object for query execution info: ResolveInfo to be passed to resolvers """ # Prepare the schema and the query document schema = graphql.build_schema(schema_str) query = graphql.parse(query_str) # Validate graphql.validate(schema, query) # Prepare ResolveInfo for the top-level object (query) execution_context: graphql.ExecutionContext = graphql.ExecutionContext.build( schema=schema, document=query) # type: ignore[assignment] info = build_resolve_info_for(schema, query, execution_context) # Done return QueryContext(schema=schema, query=query, execution_context=execution_context, info=info)
def test_scalar_value_parser_can_be_set_on_initialization(): schema = build_schema(type_defs) scalar = ScalarType("DateReadOnly", value_parser=parse_date_str) scalar.bind_to_schema(schema) schema_scalar = schema.type_map.get("DateReadOnly") assert schema_scalar.parse_value is parse_date_str
def test_scalar_serializer_can_be_set_on_initialization(): schema = build_schema(type_defs) scalar = ScalarType("DateInput", serializer=serialize_date) scalar.bind_to_schema(schema) schema_scalar = schema.type_map.get("DateInput") assert schema_scalar.serialize is serialize_date
def test_get_graphql_schema(self): if not graphql: raise unittest.SkipTest('Test requires graphql') schema = self.data.get_graphql_schema() # graphql parses the schema and raise an error if it is incorrect # or incoherent (e.g. missing type definition) schema = graphql.build_schema(schema)
def test_scalar_value_parser_can_be_set_with_setter(): schema = build_schema(type_defs) scalar = ScalarType("DateReadOnly") scalar.set_value_parser(parse_date_str) scalar.bind_to_schema(schema) schema_scalar = schema.type_map.get("DateReadOnly") assert schema_scalar.parse_value is parse_date_str
def test_scalar_serializer_can_be_set_with_setter(): schema = build_schema(type_defs) scalar = ScalarType("DateInput") scalar.set_serializer(serialize_date) scalar.bind_to_schema(schema) schema_scalar = schema.type_map.get("DateInput") assert schema_scalar.serialize is serialize_date
def test_scalar_literal_parser_can_be_set_with_setter(): schema = build_schema(type_defs) scalar = ScalarType("DateInput") scalar.set_literal_parser(parse_date_literal) scalar.bind_to_schema(schema) schema_scalar = schema.type_map.get("DateInput") assert schema_scalar.parse_literal is parse_date_literal
def schema(): return build_schema(""" type Query { hello: String } scalar Date """)
def schema(): return build_schema(""" type Query { hello: Boolean snake_case: Boolean Camel: Boolean camelCase: Boolean } """)
def create_schema(extension): schema = graphql.build_schema(""" type Query { node (id: String!): Node edge (id: String!): Edge attr (id: String!, type: String!): Attributes types (id: String): [String!]! } type Mutation { addLink (source: String!, link: String!, target: String!): String } interface Attributes { id: String! } type Node { id: String! outgoing: [Edge!] incoming: [Edge!] attr (type: String!): Attributes! } type Edge { id: String! source: Node! target: Node! attr (type: String!): Attributes! } """) schema.get_type('Attributes').resolve_type = lambda attr, *_: attr['type'] fields = schema.query_type.fields fields['node'].resolve = resolvers.query_node fields['edge'].resolve = resolvers.query_edge fields['attr'].resolve = resolvers.query_attr fields['types'].resolve = resolvers.query_types fields = schema.mutation_type.fields fields['addLink'].resolve = resolvers.create_link fields = schema.get_type('Node').fields fields['id'].resolve = resolvers.field_id fields['outgoing'].resolve = resolvers.field_outgoing fields['incoming'].resolve = resolvers.field_incoming fields['attr'].resolve = resolvers.field_attr fields = schema.get_type('Edge').fields fields['id'].resolve = resolvers.field_id fields['source'].resolve = resolvers.field_source fields['target'].resolve = resolvers.field_target fields['attr'].resolve = resolvers.field_attr return update_schema(schema, extension)
def test_add_id_2(self): schema_in = build_schema(''' type Human { id: ID! } ''') config = {'generation': {'field_for_id': True}} try: generator.run(schema_in, config) assert False except: assert True
def reconstruct(string) -> Tree: try: from graphql import build_schema, DocumentNode from .print_graphql_schema import print_schema except ImportError: raise Exception('need graphql-core package to reconstruct graphql') schema = build_schema(string) skema = print_schema(schema) # print(skema.replace(' ', '.')) return skema
def test_add_input_to_create_1(self): schema_in = build_schema(''' type Human { id: ID! name: String! } ''') config = {'generation': {'input_to_create_objects': True}} expected = build_schema(''' type Human { id: ID! name: String! } input _InputToCreateHuman { name: String! } ''') schema_out = generator.run(schema_in, config) assert compare.is_equals_schema(schema_out, expected)
def test_datetime_scalar_1(self): schema_in = build_schema(''' type datetime_test ''') config = {'generation': {'generate_datetime': True}} try: run(schema_in, config) assert True except: assert False
def test_get_fake_graphql_schema(self): if not graphql: raise unittest.SkipTest('Test requires graphql') # use the test module for basic graphQLSchema generation mod = reflect.namedModule('buildbot.test.fake.endpoint') self.data._scanModule(mod) schema = self.data.get_graphql_schema() self.assertEqual(schema, mod.graphql_schema) schema = graphql.build_schema(schema)
def test_setting_scalar_value_parser_sets_default_literal_parsers_if_none_is_set( ): schema = build_schema(type_defs) scalar = ScalarType("DateInput") scalar.set_value_parser(parse_date_value) scalar.bind_to_schema(schema) schema_scalar = schema.type_map.get("DateInput") assert schema_scalar.parse_value is parse_date_value assert schema_scalar.parse_literal
def query(schema: Union[str, graphql.GraphQLSchema]) -> st.SearchStrategy[str]: """A strategy for generating valid queries for the given GraphQL schema.""" if isinstance(schema, str): parsed_schema = graphql.build_schema(schema) else: parsed_schema = schema if parsed_schema.query_type is None: raise ValueError("Query type is not defined in the schema") return fields(parsed_schema.query_type).map(make_query).map( graphql.print_ast)
def test_datetime_scalar_2(self): schema_in = build_schema(''' scalar DateTime type datetime_test { date: DateTime! } ''') config = {'generation': {'generate_datetime': True}} try: generator.run(schema_in, config) assert True except: assert False
def get_schema(): global graphql_schemas if frappe.local.site in graphql_schemas: return graphql_schemas.get(frappe.local.site) schema = graphql.build_schema(get_typedefs()) execute_schema_processors(schema=schema) graphql_schemas[frappe.local.site] = schema return schema
def test_setting_scalar_value_parser_doesnt_override_already_set_literal_parser( ): schema = build_schema(type_defs) scalar = ScalarType("DateInput") scalar.set_literal_parser(parse_date_literal) scalar.set_value_parser(parse_date_str) scalar.bind_to_schema(schema) schema_scalar = schema.type_map.get("DateInput") assert schema_scalar.parse_value is parse_date_str assert schema_scalar.parse_literal is parse_date_literal
def get_schema(): global graphql_schema if graphql_schema is not None: return graphql_schema schema = graphql.build_schema(get_typedefs()) execute_schema_processors(schema=schema) graphql_schema = schema return schema
def schema(): return build_schema( """ type Query { hello: String } type Subscription { message: String! } """ )