async def test_tartiflette_non_introspectable_execution_directive(): schema = """ type Query { fieldNormal: Int fieldHiddendToIntrospactable: Int @nonIntrospectable } """ @Resolver( "Query.fieldNormal", schema_name="test_tartiflette_non_introspectable_execution_directive", ) async def func_field_resolver4(parent, arguments, request_ctx, info): return 42 @Resolver( "Query.fieldHiddendToIntrospactable", schema_name="test_tartiflette_non_introspectable_execution_directive", ) async def func_field_resolver5(parent, arguments, request_ctx, info): return 42 ttftt = await create_engine( schema, schema_name="test_tartiflette_non_introspectable_execution_directive", ) assert (SchemaRegistry.find_schema( "test_tartiflette_non_introspectable_execution_directive"). find_directive("nonIntrospectable") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_non_introspectable_execution_directive"). find_directive("nonIntrospectable").implementation is not None) result = await ttftt.execute( """ query Test{ __type(name: "Query") { fields { name isDeprecated deprecationReason } } } """, operation_name="Test", ) assert { "data": { "__type": { "fields": [{ "name": "fieldNormal", "isDeprecated": False, "deprecationReason": None, }] } } } == result
def __call__(self, implementation): if isclass(implementation): self._implementation = implementation() else: self._implementation = implementation SchemaRegistry.register_directive(self._schema_name, self) return implementation
async def test_tartiflette_deprecated_execution_directive(): schema = """ type Query { fieldNormal: Int fieldDeprecatedDefault: Int @deprecated fieldDeprecatedCustom: Int @deprecated(reason: "Unused anymore") } """ @Resolver( "Query.fieldNormal", schema_name="test_tartiflette_deprecated_execution_directive", ) async def func_field_resolver4(parent, arguments, request_ctx, info): return 42 @Resolver( "Query.fieldDeprecatedDefault", schema_name="test_tartiflette_deprecated_execution_directive", ) async def func_field_resolver5(parent, arguments, request_ctx, info): return 42 @Resolver( "Query.fieldDeprecatedCustom", schema_name="test_tartiflette_deprecated_execution_directive", ) async def func_field_resolver6(parent, arguments, request_ctx, info): return 42 ttftt = await create_engine( schema, schema_name="test_tartiflette_deprecated_execution_directive") assert (SchemaRegistry.find_schema( "test_tartiflette_deprecated_execution_directive").find_directive( "deprecated") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_deprecated_execution_directive").find_directive( "deprecated").implementation is not None) result = await ttftt.execute( """ query Test{ fieldNormal fieldDeprecatedDefault fieldDeprecatedCustom } """, operation_name="Test", ) assert { "data": { "fieldNormal": 42, "fieldDeprecatedDefault": 42, "fieldDeprecatedCustom": 42, } } == result
def __call__(self, implementation: Callable) -> Callable: if not isasyncgenfunction(implementation): raise NonAsyncGeneratorSubscription( "The subscription `{}` given is not an awaitable " "generator.".format(repr(implementation))) SchemaRegistry.register_subscription(self._schema_name, self) self._implementation = implementation return implementation
def test_schema_registry_register(clean_registry, schema_name, where, obj): SchemaRegistry._register(schema_name, where, obj) with pytest.raises(ImproperlyConfigured) as excinfo: SchemaRegistry._register(schema_name, where, obj) assert str(excinfo.value) == ( "Can't register < %s > to < %s > %s because it's already registered" % (obj.name, schema_name, where))
def __call__(self, implementation): if not iscoroutinefunction(implementation.on_field_execution): raise NonAwaitableDirective("%s is not awaitable" % repr(implementation)) SchemaRegistry.register_directive(self._schema_name, self) self._implementation = implementation return implementation
async def bake( self, custom_default_resolver: Optional[Callable] = None, custom_default_type_resolver: Optional[Callable] = None, ) -> None: """ Bake the final schema (it should not change after this) used for execution. :param custom_default_resolver: callable that will replace the builtin default_resolver :param custom_default_type_resolver: callable that will replace the tartiflette `default_type_resolver` (will be called on abstract types to deduct the type of a result) :type custom_default_resolver: Optional[Callable] :type custom_default_type_resolver: Optional[Callable] """ self.default_type_resolver = (custom_default_type_resolver or default_type_resolver) self._inject_introspection_fields() self._validate_extensions() # Validate this before bake # TODO maybe a pre_bake/post_bake thing try: self._bake_extensions() except Exception: # pylint: disable=broad-except # Exceptions should be collected at validation time pass SchemaRegistry.bake_registered_objects(self) try: await self._bake_types(custom_default_resolver) except Exception: # pylint: disable=broad-except # Exceptions should be collected at validation time pass self._validate() # Bake introspection attributes self._operation_types = { "query": self.type_definitions.get(self.query_operation_name), "mutation": self.type_definitions.get(self.mutation_operation_name), "subscription": self.type_definitions.get(self.subscription_operation_name), } self.queryType = self._operation_types["query"] self.mutationType = self._operation_types["mutation"] self.subscriptionType = self._operation_types["subscription"] self.directives = list(self._directive_definitions.values()) for type_name, type_definition in self.type_definitions.items(): if not type_name.startswith("__"): self.types.append(type_definition)
def __call__(self, resolver: Callable) -> Callable: """ Registers the type resolver into the schema. :param resolver: implementation of the type resolver :type resolver: Callable :return: the implementation of the type resolver :rtype: Callable """ SchemaRegistry.register_type_resolver(self._schema_name, self) self._implementation = resolver return resolver
async def cook( self, sdl: Union[str, List[str]] = None, error_coercer: Callable[[Exception], dict] = None, custom_default_resolver: Optional[Callable] = None, modules: Optional[Union[str, List[str]]] = None, schema_name: str = None, ): """ Cook the tartiflette, i.e. prepare the engine by binding it to given modules using the schema_name as a key. You won't be able to execute a request if the engine hasn't been cooked. Has no effect if the engine has already been cooked. Keyword Arguments: sdl {Union[str, List[str]]} -- The SDL to work with. schema_name {str} -- The name of the SDL (default: {"default"}) error_coercer {Callable[[Exception, dict], dict]} -- An optional callable in charge of transforming a couple Exception/error into an error dict (default: {default_error_coercer}) custom_default_resolver {Optional[Callable]} -- An optional callable that will replace the tartiflette default_resolver (Will be called like a resolver for each UNDECORATED field) (default: {None}) modules {Optional[Union[str, List[str]]]} -- An optional list of string containing the name of the modules you want the engine to import, usually this modules contains your Resolvers, Directives, Scalar or Subscription code (default: {None}) """ if self._cooked: return if modules is None: modules = self._modules or [] if isinstance(modules, str): modules = [modules] sdl = sdl or self._sdl if not sdl: raise Exception("Please provide a SDL") schema_name = schema_name or self._schema_name or "default" custom_default_resolver = (custom_default_resolver or self._custom_default_resolver) if custom_default_resolver and not iscoroutinefunction( custom_default_resolver): raise Exception(f"Given custom_default_resolver " f"{custom_default_resolver} " f"is not a coroutine function") self._error_coercer = error_coercer_factory(error_coercer or self._error_coercer or default_error_coercer) self._modules, modules_sdl = await _import_modules( modules, schema_name) SchemaRegistry.register_sdl(schema_name, sdl, modules_sdl) self._schema = SchemaBakery.bake(schema_name, custom_default_resolver) self._cooked = True
def __call__(self, resolver: Callable) -> Callable: if not iscoroutinefunction(resolver): raise NonAwaitableResolver( "The resolver `{}` given is not awaitable.".format( repr(resolver) ) ) SchemaRegistry.register_resolver(self._schema_name, self) self._implementation = resolver return resolver
def __call__(self, implementation: type) -> Any: """ Registers the directive into the schema. :param implementation: implementation of the directive :type implementation: type :return: the implementation of the directive :rtype: Any """ if isclass(implementation): self._implementation = implementation() else: self._implementation = implementation SchemaRegistry.register_directive(self._schema_name, self) return implementation
async def test_tartiflette_engine_initialization_with_sdl_folder(path): schema_name = ( f"{path}_test_tartiflette_engine_initialization_with_sdl_folder") engine = await create_engine(path, schema_name=schema_name) assert (SchemaRegistry.find_schema(schema_name).find_type("Author") is not None) assert (SchemaRegistry.find_schema(schema_name).find_type("Blog") is not None) assert (SchemaRegistry.find_schema(schema_name).find_type("Post") is not None) assert (SchemaRegistry.find_schema(schema_name).find_type( "Query").find_field("blogs") is not None)
async def test_tartiflette_engine_initialization_with_string_schema(): engine = await create_engine( """ directive @relation(name: String!) on FIELD_DEFINITION directive @default(value: Int!) on FIELD_DEFINITION type Post { id: ID! title: String! publishedAt: Int! likes: Int! @default(value: 0) author: Author! @relation(name: "Posts") blog: Blog @relation(name: "Posts") } type Author { id: ID! name: String! posts: [Post!]! @relation(name: "Author") } type Blog { id: ID! name: String! description: String, authors: [Author!]! posts: [Post!]! @relation(name: "Posts") } type Query { authors: [Author!]! blogs: [Blog!]! } """, schema_name="test_tartiflette_engine_initialization_with_string_schema", ) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_string_schema").find_type( "Author") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_string_schema").find_type( "Blog") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_string_schema").find_type( "Post") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_string_schema").find_type( "Query").find_field("blogs") is not None)
def __call__(self, implementation: Callable) -> Callable: """ Registers the subscription generator into the schema. :param implementation: implementation of the subscription generator :type implementation: Callable :return: the implementation of the subscription generator :rtype: Callable """ if not is_valid_async_generator(implementation): raise NonAsyncGeneratorSubscription( "The subscription < {} > given is not an awaitable " "generator.".format(repr(implementation))) SchemaRegistry.register_subscription(self._schema_name, self) self._implementation = implementation return implementation
async def test_engine(clean_registry): from tartiflette.schema.registry import SchemaRegistry e = await create_engine("type Query { a:String }") s = SchemaRegistry.find_schema() assert s is not None assert s.name == "default" ee = await create_engine("type Query { a:String }", "Bob") ss = SchemaRegistry.find_schema("Bob") assert ss is not None assert ss.name == "Bob" assert ss != s
async def test_tartiflette_engine_initialization_with_single_sdl_file(): engine = await create_engine( _curr_path + "/data/simple_full_sdl/simple_full.sdl", schema_name= "test_tartiflette_engine_initialization_with_single_sdl_file", ) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_single_sdl_file"). find_type("Author") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_single_sdl_file"). find_type("Blog") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_single_sdl_file"). find_type("Post") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_single_sdl_file"). find_type("Query").find_field("blogs") is not None)
def __call__(self, implementation: Callable) -> Callable: """ Registers the resolver into the schema. :param implementation: implementation of the resolver :type implementation: Callable :return: the implementation of the resolver :rtype: Callable """ if not is_valid_coroutine(implementation): raise NonAwaitableResolver( f"The resolver `{repr(implementation)}` given is not awaitable." ) if self._type_resolver is not None and not callable( self._type_resolver): raise NonCallable( "The < type_resolver > parameter of the resolver " f"`{repr(implementation)}` has to be a callable.") SchemaRegistry.register_resolver(self._schema_name, self) self._implementation = implementation return implementation
def _preheat(schema_name: str) -> "GraphQLSchema": """ Loads the SDL and converts it to a GraphQLSchema instance before baking each registered objects of this schema. :param schema_name: name of the schema to treat :type schema_name: str :return: a pre-baked GraphQLSchema instance :rtype: GraphQLSchema """ schema_info = SchemaRegistry.find_schema_info(schema_name) sdl = schema_info["sdl"] schema = schema_from_sdl(sdl, schema_name=schema_name) schema_info["inst"] = schema return schema
def _preheat(schema_name: str) -> GraphQLSchema: schema_info = SchemaRegistry.find_schema_info(schema_name) schema = schema_info.get("inst", GraphQLSchema(name=schema_name)) sdl = schema_info["sdl"] build_graphql_schema_from_sdl(sdl, schema=schema) for object_ids in _SCHEMA_OBJECT_IDS: for obj in schema_info.get(object_ids, {}).values(): obj.bake(schema) schema_info["inst"] = schema return schema
def __init__( self, sdl: Union[str, List[str]], schema_name: str = "default", error_coercer: Callable[[Exception], dict] = default_error_coercer, custom_default_resolver: Optional[Callable] = None, exclude_builtins_scalars: Optional[List[str]] = None, modules: Optional[Union[str, List[str]]] = None, ) -> None: """Create an engine by analyzing the SDL and connecting it with the imported Resolver, Mutation, Subscription, Directive and Scalar linking them through the schema_name. Then using `await an_engine.execute(query)` will resolve your GQL requests. Arguments: sdl {Union[str, List[str]]} -- The SDL to work with. Keyword Arguments: schema_name {str} -- The name of the SDL (default: {"default"}) error_coercer {Callable[[Exception], dict]} -- An optional callable in charge of transforming an Exception into an error dict (default: {default_error_coercer}) custom_default_resolver {Optional[Callable]} -- An optional callable that will replace the tartiflette default_resolver (Will be called like a resolver for each UNDECORATED field) (default: {None}) exclude_builtins_scalars {Optional[List[str]]} -- An optional list of string containing the names of the builtin scalar you don't want to be automatically included, usually it's Date, DateTime or Time scalars (default: {None}) modules {Optional[Union[str, List[str]]]} -- An optional list of string containing the name of the modules you want the engine to import, usually this modules contains your Resolvers, Directives, Scalar or Subscription code (default: {None}) """ if isinstance(modules, str): modules = [modules] self._modules = _import_modules(modules) self._error_coercer = error_coercer self._parser = TartifletteRequestParser() SchemaRegistry.register_sdl(schema_name, sdl, exclude_builtins_scalars) self._schema = SchemaBakery.bake( schema_name, custom_default_resolver, exclude_builtins_scalars )
async def test_tartiflette_engine_initialization_with_sdl_file_list(): engine = await create_engine( [ _curr_path + "/data/splitted_sdl/directives.sdl", _curr_path + "/data/splitted_sdl/author.sdl", _curr_path + "/data/splitted_sdl/blog.sdl", _curr_path + "/data/splitted_sdl/post.sdl", _curr_path + "/data/splitted_sdl/query.sdl", ], schema_name="test_tartiflette_engine_initialization_with_sdl_file_list", ) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_sdl_file_list").find_type( "Author") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_sdl_file_list").find_type( "Blog") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_sdl_file_list").find_type( "Post") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_engine_initialization_with_sdl_file_list").find_type( "Query").find_field("blogs") is not None)
def _preheat( schema_name: str, exclude_builtins_scalars: Optional[List[str]]) -> GraphQLSchema: schema_info = SchemaRegistry.find_schema_info(schema_name) schema = schema_info.get("inst", GraphQLSchema(name=schema_name)) sdl = schema_info["sdl"] build_graphql_schema_from_sdl(sdl, schema=schema) SchemaBakery._inject_default_object(schema_name, exclude_builtins_scalars) for object_ids in _SCHEMA_OBJECT_IDS: for obj in schema_info.get(object_ids, {}).values(): obj.bake(schema) schema_info["inst"] = schema return schema
def clean_registry(): SchemaRegistry.clean() yield SchemaRegistry SchemaRegistry.clean()
async def cook( self, sdl: Union[str, List[str]] = None, error_coercer: Callable[[Exception, Dict[str, Any]], Dict[str, Any]] = None, custom_default_resolver: Optional[Callable] = None, custom_default_type_resolver: Optional[Callable] = None, modules: Optional[Union[str, List[str], List[Dict[str, Any]]]] = None, query_cache_decorator: Optional[Callable] = UNDEFINED_VALUE, json_loader: Optional[Callable[[str], Dict[str, Any]]] = None, custom_default_arguments_coercer: Optional[Callable] = None, coerce_list_concurrently: Optional[bool] = None, schema_name: Optional[str] = None, ) -> None: """ Cook the tartiflette, basically prepare the engine by binding it to given modules using the schema_name as a key. You wont be able to execute a request if the engine wasn't cooked. :param sdl: path or list of path to the files / directories containing the SDL :param error_coercer: callable in charge of transforming a couple Exception/error into an error dictionary :param custom_default_resolver: callable that will replace the builtin default_resolver (called as resolver for each UNDECORATED field) :param custom_default_type_resolver: callable that will replace the tartiflette `default_type_resolver` (will be called on abstract types to deduct the type of a result) :param modules: list of string containing the name of the modules you want the engine to import, usually this modules contains your Resolvers, Directives, Scalar or Subscription code :param query_cache_decorator: callable that will replace the tartiflette default lru_cache decorator to cache query parsing :param json_loader: A callable that will replace default python json module.loads for ast_json loading :param custom_default_arguments_coercer: callable that will replace the tartiflette `default_arguments_coercer` :param coerce_list_concurrently: whether or not list will be coerced concurrently :param schema_name: name of the SDL :type sdl: Union[str, List[str]] :type error_coercer: Callable[[Exception, Dict[str, Any]], Dict[str, Any]] :type custom_default_resolver: Optional[Callable] :type custom_default_type_resolver: Optional[Callable] :type modules: Optional[Union[str, List[str], List[Dict[str, Any]]]] :type query_cache_decorator: Optional[Callable] :type json_loader: Optional[Callable[[str], Dict[str, Any]]] :type custom_default_arguments_coercer: Optional[Callable] :type coerce_list_concurrently: Optional[bool] :type schema_name: Optional[str] """ # pylint: disable=too-many-arguments,too-many-locals if self._cooked: return if modules is None: modules = self._modules or [] if isinstance(modules, str): modules = [modules] sdl = sdl or self._sdl if not sdl: raise Exception("Please provide a SDL") schema_name = schema_name or self._schema_name or "default" custom_error_coercer = error_coercer or self._error_coercer if custom_error_coercer and not is_valid_coroutine( custom_error_coercer): raise NonCoroutine( "Given < error_coercer > is not a coroutine callable.") custom_default_resolver = (custom_default_resolver or self._custom_default_resolver) if custom_default_resolver and not is_valid_coroutine( custom_default_resolver): raise NonCoroutine( "Given < custom_default_resolver > is not a coroutine callable." ) custom_default_type_resolver = (custom_default_type_resolver or self._custom_default_type_resolver) if custom_default_type_resolver and not callable( custom_default_type_resolver): raise NonCallable( "Given < custom_default_type_resolver > is not a coroutine callable." ) custom_default_arguments_coercer = ( custom_default_arguments_coercer or self._custom_default_arguments_coercer) if custom_default_arguments_coercer and not is_valid_coroutine( custom_default_arguments_coercer): raise NonCoroutine( "Given < custom_default_arguments_coercer > is not a " "coroutine callable.") self._error_coercer = error_coercer_factory(custom_error_coercer or default_error_coercer) self._modules, modules_sdl = await _import_modules( modules, schema_name) SchemaRegistry.register_sdl(schema_name, sdl, modules_sdl) self._schema = await SchemaBakery.bake( schema_name, custom_default_resolver, custom_default_type_resolver, custom_default_arguments_coercer, (coerce_list_concurrently if coerce_list_concurrently is not None else self._coerce_list_concurrently), ) self._build_response = partial(build_response, error_coercer=self._error_coercer) ( self._query_executor, self._subscription_executor, ) = self._schema.bake_execute(self._perform_query, self._perform_subscription) if query_cache_decorator is UNDEFINED_VALUE: query_cache_decorator = self._query_cache_decorator self._cached_parse_and_validate_query = ( query_cache_decorator(parse_and_validate_query) if callable(query_cache_decorator) else parse_and_validate_query) self._schema.json_loader = json_loader or self._json_loader self._cooked = True
async def test_tartiflette_deprecated_introspection_directive(): schema = """ type Query { fieldNormal: Int fieldDeprecatedDefault: Int @deprecated fieldDeprecatedCustom: Int @deprecated(reason: "Unused anymore") } """ @Resolver( "Query.fieldNormal", schema_name="test_tartiflette_deprecated_introspection_directive", ) async def func_field_resolver4(parent, arguments, request_ctx, info): return 42 @Resolver( "Query.fieldDeprecatedDefault", schema_name="test_tartiflette_deprecated_introspection_directive", ) async def func_field_resolver5(parent, arguments, request_ctx, info): return 42 @Resolver( "Query.fieldDeprecatedCustom", schema_name="test_tartiflette_deprecated_introspection_directive", ) async def func_field_resolver6(parent, arguments, request_ctx, info): return 42 ttftt = await create_engine( schema, schema_name="test_tartiflette_deprecated_introspection_directive", ) assert (SchemaRegistry.find_schema( "test_tartiflette_deprecated_introspection_directive").find_directive( "deprecated") is not None) assert (SchemaRegistry.find_schema( "test_tartiflette_deprecated_introspection_directive").find_directive( "deprecated").implementation is not None) result = await ttftt.execute( """ query Test{ __type(name: "Query") { fields(includeDeprecated: true) { name isDeprecated deprecationReason } } } """, operation_name="Test", ) assert { "data": { "__type": { "fields": [ { "name": "fieldNormal", "isDeprecated": False, "deprecationReason": None, }, { "name": "fieldDeprecatedDefault", "isDeprecated": True, "deprecationReason": "No longer supported", }, { "name": "fieldDeprecatedCustom", "isDeprecated": True, "deprecationReason": "Unused anymore", }, ] } } } == result
def pytest_runtest_setup(item): marker = _get_ttftt_engine_marker(item) if not marker or not getattr(item, "allow_schema_bake", True): return resolvers = marker.kwargs.get("resolvers") or {} type_resolvers = marker.kwargs.get("type_resolvers") or {} subscriptions = marker.kwargs.get("subscriptions") or {} directives = marker.kwargs.get("directives") or {} if not (resolvers or subscriptions or directives): return schema_name = _get_schema_name_from_marker(marker) # Init schema definitions SchemaRegistry._schemas.setdefault(schema_name, {}) # Reset schema resolvers if resolvers: SchemaRegistry._schemas[schema_name]["resolvers"] = {} # Reset schema type_resolvers if type_resolvers: SchemaRegistry._schemas[schema_name]["type_resolvers"] = {} if subscriptions: SchemaRegistry._schemas[schema_name]["subscriptions"] = {} if directives: SchemaRegistry._schemas[schema_name]["directives"] = {} # Apply "Resolver" decorators to resolvers functions for name, implementation in resolvers.items(): Resolver(name, schema_name=schema_name)(implementation) # Apply "TypeResolver" decorators to type resolvers functions for name, implementation in type_resolvers.items(): TypeResolver(name, schema_name=schema_name)(implementation) # Apply "Subscription" decorators to resolvers functions for name, implementation in subscriptions.items(): Subscription(name, schema_name=schema_name)(implementation) # Apply "Directive" decorators to resolvers functions for name, implementation in directives.items(): Directive(name, schema_name=schema_name)(implementation) # Bake resolvers for resolver in (SchemaRegistry._schemas[schema_name].get("resolvers") or {}).values(): resolver.bake(_TTFTT_ENGINES[schema_name]._schema) # Bake type resolvers for type_resolver in ( SchemaRegistry._schemas[schema_name].get("type_resolvers") or {}).values(): type_resolver.bake(_TTFTT_ENGINES[schema_name]._schema) # Bake subscriptions for subscription in ( SchemaRegistry._schemas[schema_name].get("subscriptions") or {}).values(): subscription.bake(_TTFTT_ENGINES[schema_name]._schema) # Bake directives for directive in (SchemaRegistry._schemas[schema_name].get("directives") or {}).values(): directive.bake(_TTFTT_ENGINES[schema_name]._schema) # Re-bake engine schema SchemaRegistry.find_schema_info( schema_name=schema_name)["inst"] = _TTFTT_ENGINES[schema_name]._schema loop = asyncio.new_event_loop() loop.run_until_complete(_TTFTT_ENGINES[schema_name]._schema.bake())
def __call__(self, implementation): SchemaRegistry.register_scalar(self._schema_name, self) self._implementation = implementation return implementation
async def cook( self, sdl: Union[str, List[str]] = None, error_coercer: Callable[[Exception, Dict[str, Any]], Dict[str, Any]] = None, custom_default_resolver: Optional[Callable] = None, custom_default_type_resolver: Optional[Callable] = None, modules: Optional[Union[str, List[str], List[Dict[str, Any]]]] = None, schema_name: str = None, ) -> None: """ Cook the tartiflette, basically prepare the engine by binding it to given modules using the schema_name as a key. You wont be able to execute a request if the engine wasn't cooked. :param sdl: path or list of path to the files / directories containing the SDL :param error_coercer: callable in charge of transforming a couple Exception/error into an error dictionary :param custom_default_resolver: callable that will replace the builtin default_resolver (called as resolver for each UNDECORATED field) :param custom_default_type_resolver: callable that will replace the tartiflette `default_type_resolver` (will be called on abstract types to deduct the type of a result) :param modules: list of string containing the name of the modules you want the engine to import, usually this modules contains your Resolvers, Directives, Scalar or Subscription code :param schema_name: name of the SDL :type sdl: Union[str, List[str]] :type error_coercer: Callable[[Exception, Dict[str, Any]], Dict[str, Any]] :type custom_default_resolver: Optional[Callable] :type custom_default_type_resolver: Optional[Callable] :type modules: Optional[Union[str, List[str], List[Dict[str, Any]]]] :type schema_name: str """ if self._cooked: return if modules is None: modules = self._modules or [] if isinstance(modules, str): modules = [modules] sdl = sdl or self._sdl if not sdl: raise Exception("Please provide a SDL") schema_name = schema_name or self._schema_name or "default" custom_error_coercer = error_coercer or self._error_coercer if custom_error_coercer and not is_valid_coroutine( custom_error_coercer): raise NonCoroutine( "Given < error_coercer > is not a coroutine callable.") custom_default_resolver = (custom_default_resolver or self._custom_default_resolver) if custom_default_resolver and not is_valid_coroutine( custom_default_resolver): raise NonCoroutine( "Given < custom_default_resolver > is not a coroutine callable." ) custom_default_type_resolver = (custom_default_type_resolver or self._custom_default_type_resolver) if custom_default_type_resolver and not callable( custom_default_type_resolver): raise NonCallable( "Given < custom_default_type_resolver > is not a coroutine callable." ) self._error_coercer = error_coercer_factory(custom_error_coercer or default_error_coercer) self._modules, modules_sdl = await _import_modules( modules, schema_name) SchemaRegistry.register_sdl(schema_name, sdl, modules_sdl) self._schema = await SchemaBakery.bake(schema_name, custom_default_resolver, custom_default_type_resolver) self._build_response = partial(build_response, error_coercer=self._error_coercer) self._cooked = True
async def test_tartiflette_directive_declaration(): schema_sdl = """ directive @lol on FIELD_DEFINITION directive @lol2( value: Int ) on FIELD_DEFINITION type Query { fieldLoled1: Int @lol fieldLoled2: Int @lol @deprecated @lol2(value:2) fieldLoled3: Int @deprecated @lol @lol2(value:6) } """ # Execute directive @Directive("lol2", schema_name="test_tartiflette_directive_declaration") class Loled2: @staticmethod async def on_field_execution( directive_args: Dict[str, Any], next_resolver: Callable, parent: Optional[Any], args: Dict[str, Any], ctx: Optional[Any], info: "ResolveInfo", ): return (await next_resolver(parent, args, ctx, info)) + int( directive_args["value"]) @Resolver( "Query.fieldLoled1", schema_name="test_tartiflette_directive_declaration", ) async def func_field_resolver4(_parent, _arguments, _request_ctx, _info): return 42 @Resolver( "Query.fieldLoled2", schema_name="test_tartiflette_directive_declaration", ) async def func_field_resolver5(_parent, _arguments, _request_ctx, _info): return 42 @Resolver( "Query.fieldLoled3", schema_name="test_tartiflette_directive_declaration", ) async def func_field_resolver6(_parent, _arguments, _request_ctx, _info): return 42 @Directive("lol", schema_name="test_tartiflette_directive_declaration") class Loled: @staticmethod async def on_field_execution( directive_args: Dict[str, Any], next_resolver: Callable, parent: Optional[Any], args: Dict[str, Any], ctx: Optional[Any], info: "ResolveInfo", ): return (await next_resolver(parent, args, ctx, info)) + 1 ttftt = await create_engine( schema_sdl, schema_name="test_tartiflette_directive_declaration") assert (SchemaRegistry.find_schema( "test_tartiflette_directive_declaration").find_directive("lol") is not None) assert (SchemaRegistry.find_schema("test_tartiflette_directive_declaration" ).find_directive("lol").implementation is not None) result = await ttftt.execute( """ query Test{ fieldLoled1 fieldLoled2 fieldLoled3 } """, operation_name="Test", ) assert { "data": { "fieldLoled1": 43, "fieldLoled2": 45, "fieldLoled3": 49 } } == result