def graphql_match(query, template): query = graphql.parse(query) query = graphql.print_ast(query).strip() logging.info("Normalized query:\n%s", query) mapper = TemplateMapper() template = re.sub(r'({{[^{}]*}})', mapper, template) logging.info("Template after replace:\n%s", template) template = graphql.parse(template) template = graphql.print_ast(template).strip() logging.info("Template after normalized:\n%s", template) template = '^' + re.escape(template) + '$' for k, v in mapper.matches.items(): template = template.replace(k, v) template = template.replace("\\ ", " ") template = template.replace("\\\n", "\n") task = RenderingTask(PYBARS, template, inject_methods=[reg_ex], inject_objects={"scope": None, 'key': None}) template, context = task.render_handlebars() logging.info("Template after render:\n%s", template) return re.match(template, query)
def test_fragments(ds): query = """fragment NameAndAppearances on Character { name appearsIn } { hero { ...NameAndAppearances } }""" name_and_appearances = ( DSLFragment("NameAndAppearances") .on(ds.Character) .select(ds.Character.name, ds.Character.appearsIn) ) query_dsl = DSLQuery(ds.Query.hero.select(name_and_appearances)) document = dsl_gql(name_and_appearances, query_dsl) print(print_ast(document)) assert query == print_ast(document)
def test_get_introspection_query_ast(option): introspection_query = get_introspection_query( descriptions=option, specified_by_url=option, directive_is_repeatable=option, schema_description=option, ) dsl_introspection_query = get_introspection_query_ast( descriptions=option, specified_by_url=option, directive_is_repeatable=option, schema_description=option, ) assert print_ast(gql(introspection_query)) == print_ast(dsl_introspection_query)
def print_query_plan(query_plan_descriptor: QueryPlanDescriptor, indentation_depth: int = 4) -> str: """Return a string describing query plan.""" query_plan_strings = [""] plan_and_depth = _get_plan_and_depth_in_dfs_order( query_plan_descriptor.root_sub_query_plan) for query_plan, depth in plan_and_depth: line_separation = "\n" + " " * indentation_depth * depth query_plan_strings.append(line_separation) query_str = 'Execute in schema named "{}":\n'.format( query_plan.schema_id) query_str += print_ast(query_plan.query_ast) query_str = query_str.replace("\n", line_separation) query_plan_strings.append(query_str) query_plan_strings.append("\n\nJoin together outputs as follows: ") query_plan_strings.append( str(query_plan_descriptor.output_join_descriptors)) query_plan_strings.append("\n\nRemove the following outputs at the end: ") query_plan_strings.append( str(query_plan_descriptor.intermediate_output_names) + "\n") return "".join(query_plan_strings)
def test_dsl_root_type_not_default(): from graphql import parse, build_ast_schema schema_str = """ schema { query: QueryNotDefault } type QueryNotDefault { version: String } """ type_def_ast = parse(schema_str) schema = build_ast_schema(type_def_ast) ds = DSLSchema(schema) query = dsl_gql(DSLQuery(ds.QueryNotDefault.version)) expected_query = """ { version } """ assert print_ast(query) == expected_query.strip() with pytest.raises(GraphQLError) as excinfo: DSLSubscription(ds.QueryNotDefault.version) assert ( "Invalid field for <DSLSubscription>: <DSLField QueryNotDefault::version>" ) in str(excinfo.value)
async def _send_query( self, document: DocumentNode, variable_values: Optional[Dict[str, str]] = None, operation_name: Optional[str] = None, ) -> int: """Send a query to the provided websocket connection. We use an incremented id to reference the query. Returns the used id for this query. """ query_id = self.next_query_id self.next_query_id += 1 payload: Dict[str, Any] = {"query": print_ast(document)} if variable_values: payload["variables"] = variable_values if operation_name: payload["operationName"] = operation_name query_str = json.dumps({ "id": str(query_id), "type": "start", "payload": payload }) await self._send(query_str) return query_id
async def _send_query( self, document: DocumentNode, variable_values: Optional[Dict[str, str]] = None, operation_name: Optional[str] = None, ) -> int: """Send a query to the provided websocket connection. We use an incremented id to reference the query. Returns the used id for this query. """ query_id = self.next_query_id self.next_query_id += 1 query_str = json.dumps({ "topic": self.channel_name, "event": "doc", "payload": { "query": print_ast(document), "variables": variable_values or {}, }, "ref": query_id, }) await self._send(query_str) return query_id
def test_use_variable_definition_multiple_times(ds): var = DSLVariableDefinitions() # `episode` variable is used in both fields op = DSLMutation( ds.Mutation.createReview.alias("badReview") .args(review=var.badReview, episode=var.episode) .select(ds.Review.stars, ds.Review.commentary), ds.Mutation.createReview.alias("goodReview") .args(review=var.goodReview, episode=var.episode) .select(ds.Review.stars, ds.Review.commentary), ) op.variable_definitions = var query = dsl_gql(op) assert ( print_ast(query) == """mutation ($badReview: ReviewInput, $episode: Episode, $goodReview: ReviewInput) { badReview: createReview(review: $badReview, episode: $episode) { stars commentary } goodReview: createReview(review: $goodReview, episode: $episode) { stars commentary } }""" )
def test_multiple_operations(ds): query = dsl_gql( GetHeroName=DSLQuery(ds.Query.hero.select(ds.Character.name)), CreateReviewMutation=DSLMutation( ds.Mutation.createReview.args(episode=6, review={ "stars": 5, "commentary": "This is a great movie!" }).select(ds.Review.stars, ds.Review.commentary)), ) assert (print_ast(query) == """query GetHeroName { hero { name } } mutation CreateReviewMutation { createReview(episode: JEDI, review: {stars: 5, \ commentary: "This is a great movie!"}) { stars commentary } } """)
async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, str]] = None, operation_name: Optional[str] = None, extra_args: Dict[str, Any] = {}, ) -> ExecutionResult: """Execute the provided document AST against the configured remote server. This uses the aiohttp library to perform a HTTP POST request asynchronously to the remote server. The result is sent as an ExecutionResult object. """ query_str = print_ast(document) payload: Dict[str, Any] = { "query": query_str, } if variable_values: payload["variables"] = variable_values if operation_name: payload["operationName"] = operation_name post_args = { "json": payload, } # Pass post_args to aiohttp post method post_args.update(extra_args) if self.session is None: raise TransportClosed("Transport is not connected") async with self.session.post(self.url, ssl=self.ssl, **post_args) as resp: try: result = await resp.json() except Exception: # We raise a TransportServerError if the status code is 400 or higher # We raise a TransportProtocolError in the other cases try: # Raise a ClientResponseError if response status is 400 or higher resp.raise_for_status() except ClientResponseError as e: raise TransportServerError from e raise TransportProtocolError( "Server did not return a GraphQL result") if "errors" not in result and "data" not in result: raise TransportProtocolError( "Server did not return a GraphQL result") return ExecutionResult(errors=result.get("errors"), data=result.get("data"))
def test_operation_name(ds): query = dsl_gql( GetHeroName=DSLQuery(ds.Query.hero.select(ds.Character.name), )) assert (print_ast(query) == """query GetHeroName { hero { name } } """)
def set_query_info(self, insert_var, query): insert_var["query"] = { "data": { "name": query.name.value, "query": graphql.print_ast(query) }, "on_conflict": { "constraint": "gql_query_query_key", "update_columns": "query" } }
def build_func(provider, definition, schema, is_async): """ Builds a python function from a GraphQL AST definition """ name = definition.name.value source = graphql.print_ast(definition) assert definition.operation != graphql.OperationType.SUBSCRIPTION params = [build_param(var) for var in definition.variable_definitions] query_func = '__aquery__' if is_async else '__query__' # TODO: Line numbers if sys.version_info >= (3, 8): py38 = { 'posonlyargs': [], } else: py38 = {} return ast.FunctionDef( name=name, args=ast.arguments(args=[], defaults=[], kwonlyargs=[ ast.arg(arg=name, annotation=None) for name, _ in params ], kw_defaults=[val for _, val in params], vararg=None, kwarg=None, **py38), body=[ ast.Return(value=ast.Call( func=ast.Name(id=query_func, ctx=ast.Load()), args=[ value2pyliteral(provider), value2pyliteral(source), ], keywords=[ ast.keyword(arg=name, value=ast.Name(id=name, ctx=ast.Load())) for name, _ in params ] + [ ast.keyword(arg=name, value=(val if isinstance(val, ast.AST) else value2pyliteral(val))) for name, val in get_additional_kwargs( provider, definition, schema).items() ], ), ), ], decorator_list=[], )
def test_subscription(ds): query = dsl_gql( DSLSubscription( ds.Subscription.reviewAdded(episode=6).select( ds.Review.stars, ds.Review.commentary))) assert (print_ast(query) == """subscription { reviewAdded(episode: JEDI) { stars commentary } } """)
def tokenize_graphql_parameters(query): """ Parses a GraphQL query and replaces parameter values with tokens to be used for targeted injections """ document = parse(query) for arg in document.definitions[0].selection_set.selections[0].arguments: try: if arg.value: arg.value.value = "*" except AttributeError: error("Node did not have a parameter value to replace") return print_ast(document)
def print_with_reduced_whitespace(ast: Document) -> str: """ Like the graphql-js print function, but deleting whitespace wherever feasible. Specifically, all whitespace (outside of string literals) is reduced to at most one space, and even that space is removed anywhere except for between two alphanumerics. """ visit(ast, _HEX_CONVERSION_VISITOR) val = re.sub(r'\s+', ' ', print_ast(ast)) val = re.sub(r'([^_a-zA-Z0-9]) ', _replace_with_first_group, val) val = re.sub(r' ([^_a-zA-Z0-9])', _replace_with_first_group, val) val = re.sub(r'"([a-f0-9]+)"', _from_hex, val) return val
def input_value_info(graph, fields, ids): nodes_map = _nodes_map(graph) for ident in ids: node = nodes_map[ident.node] field = node.fields_map[ident.field] option = field.options_map[ident.name] if option.default is Nothing: default = None elif option.default is None: # graphql-core currently can't parse/print "null" values default = 'null' else: default = print_ast(ast_from_value(option.default)) info = {'id': ident, 'name': option.name, 'description': option.description, 'defaultValue': default} yield [info[f.name] for f in fields]
def execution_node_for_group( context: 'QueryPlanningContext', group: 'FetchGroup', parent_type: Optional[GraphQLCompositeType] = None, ) -> PlanNode: selection_set = selection_set_from_field_set(group.fields, parent_type) requires = ( selection_set_from_field_set(group.required_fields) if len(group.required_fields) > 0 else None ) variable_usages = context.get_variable_usages(selection_set, group.internal_fragments) operation = ( operation_for_entities_fetch(selection_set, variable_usages, group.internal_fragments) if requires is not None else operation_for_root_fetch( selection_set, variable_usages, group.internal_fragments, context.operation.operation ) ) fetch_node = FetchNode( service_name=group.service_name, requires=trim_selection_nodes(requires.selections) if requires is not None else None, variable_usages=list(variable_usages.keys()), operation=strip_ignored_characters(print_ast(operation)), ) node: PlanNode = ( FlattenNode(path=group.merge_at, node=fetch_node) if group.merge_at is not None and len(group.merge_at) > 0 else fetch_node ) if len(group.dependent_groups) > 0: dependent_nodes = [ execution_node_for_group(context, dependent_group) for dependent_group in group.dependent_groups ] return flat_wrap('Sequence', [node, flat_wrap('Parallel', dependent_nodes)]) else: return node
def test_add_variable_definitions_with_default_value_enum(ds): var = DSLVariableDefinitions() op = DSLMutation( ds.Mutation.createReview.args( review=var.review, episode=var.episode.default(4) ).select(ds.Review.stars, ds.Review.commentary) ) op.variable_definitions = var query = dsl_gql(op) assert ( print_ast(query) == """mutation ($review: ReviewInput, $episode: Episode = NEWHOPE) { createReview(review: $review, episode: $episode) { stars commentary } }""" )
async def _send_query( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, ) -> int: query_id = self.next_query_id self.next_query_id += 1 data: Dict = {"query": print_ast(document)} if variable_values: data["variables"] = variable_values if operation_name: data["operationName"] = operation_name serialized_data = json.dumps(data, separators=(",", ":")) payload = {"data": serialized_data} message: Dict = { "id": str(query_id), "type": "start", "payload": payload, } assert self.auth is not None message["payload"]["extensions"] = { "authorization": self.auth.get_headers(serialized_data) } await self._send(json.dumps( message, separators=(",", ":"), )) return query_id
def test_add_variable_definitions_with_default_value_input_object(ds): var = DSLVariableDefinitions() op = DSLMutation( ds.Mutation.createReview.args( review=var.review.default({"stars": 5, "commentary": "Wow!"}), episode=var.episode, ).select(ds.Review.stars, ds.Review.commentary) ) op.variable_definitions = var query = dsl_gql(op) assert ( print_ast(query) == """ mutation ($review: ReviewInput = {stars: 5, commentary: "Wow!"}, $episode: Episode) { createReview(review: $review, episode: $episode) { stars commentary } }""".strip() )
def test_type_hero_query(ds): query = """{ __type(name: "Hero") { kind name ofType { kind name } } }""" type_hero = DSLMetaField("__type")(name="Hero") type_hero.select( ds.__Type.kind, ds.__Type.name, ds.__Type.ofType.select(ds.__Type.kind, ds.__Type.name), ) query_dsl = DSLQuery(type_hero) assert query == str(print_ast(dsl_gql(query_dsl))).strip()
def test_add_variable_definitions_in_input_object(ds): var = DSLVariableDefinitions() op = DSLMutation( ds.Mutation.createReview.args( review={"stars": var.stars, "commentary": var.commentary}, episode=var.episode, ).select(ds.Review.stars, ds.Review.commentary) ) op.variable_definitions = var query = dsl_gql(op) assert ( print_ast(query) == """mutation ($stars: Int, $commentary: String, $episode: Episode) { createReview( review: {stars: $stars, commentary: $commentary} episode: $episode ) { stars commentary } }""" )
def max_rps_test(self, query): query_str = graphql.print_ast(query) print( Fore.GREEN + "(Compute maximum Request per second) Running wrk benchmark for query\n", query_str + Style.RESET_ALL) self.hge.graphql_q(query_str) # Test query once for errors bench_script = os.path.join(self.lua_dir + '/bench-wrk.lua') graphql_url = self.hge.url + '/v1/graphql' params = self.get_wrk2_params() duration = 30 wrk_command = [ 'wrk', '-t', str(params['threads']), '-c', str(params['connections']), '-d', str(duration), '--latency', '-s', bench_script, graphql_url, query_str ] self.docker_client = docker.from_env() result = self.docker_client.containers.run( self.wrk_docker_image, detach=False, stdout=False, stderr=True, command=wrk_command, network_mode='host', environment=self.get_lua_env(), volumes=self.get_scripts_vol(), remove=True, user=self.get_current_user()) summary = json.loads(result)['summary'] # TODO explain this calculation. Why aren't we using wrk's reported 'max'? Should we call this avg_sustained_rps or something? max_rps = round(summary['requests'] / float(duration)) self.insert_max_rps_result(query, max_rps) print("Max RPS", max_rps) return max_rps
def print_query_plan(query_plan_descriptor, indentation_depth=4): """Return a string describing query plan.""" query_plan_strings = [u''] plan_and_depth = _get_plan_and_depth_in_dfs_order( query_plan_descriptor.root_sub_query_plan) for query_plan, depth in plan_and_depth: line_separation = u'\n' + u' ' * indentation_depth * depth query_plan_strings.append(line_separation) query_str = u'Execute in schema named "{}":\n'.format( query_plan.schema_id) query_str += print_ast(query_plan.query_ast) query_str = query_str.replace(u'\n', line_separation) query_plan_strings.append(query_str) query_plan_strings.append(u'\n\nJoin together outputs as follows: ') query_plan_strings.append( str(query_plan_descriptor.output_join_descriptors)) query_plan_strings.append(u'\n\nRemove the following outputs at the end: ') query_plan_strings.append( str(query_plan_descriptor.intermediate_output_names) + u'\n') return ''.join(query_plan_strings)
def _check_query_node_structure_helper(self, query_node, expected_query_node): """Check query_node has the same structure as expected_query_node.""" # Check AST and id of the parent self.assertEqual(print_ast(query_node.query_ast), expected_query_node.query_str) self.assertEqual(query_node.schema_id, expected_query_node.schema_id) # Check number of children matches child_query_connections = query_node.child_query_connections expected_child_data = expected_query_node.child_query_nodes_and_out_names self.assertEqual(len(child_query_connections), len(expected_child_data)) for i, (child_query_connection, expected_child_data_piece) in enumerate( six.moves.zip(child_query_connections, expected_child_data)): # Check child and parent connections child_query_node = child_query_connection.sink_query_node child_expected_query_node, parent_out_name, child_out_name = expected_child_data_piece self._check_query_node_edge(query_node, i, child_query_node, parent_out_name, child_out_name) # Recurse self._check_query_node_structure_helper(child_query_node, child_expected_query_node)
def root_resolver(obj, info, **inputs): query = graphql.print_ast(info.operation) # TODO: add graphql context delegated_result = client.execute(query=query, variables=info.variable_values) return delegated_result.data[info.field_name]
def __str__(self): return print_ast(self.ast_field)
async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, str]] = None, operation_name: Optional[str] = None, extra_args: Dict[str, Any] = None, upload_files: bool = False, ) -> ExecutionResult: """Execute the provided document AST against the configured remote server using the current session. This uses the aiohttp library to perform a HTTP POST request asynchronously to the remote server. Don't call this coroutine directly on the transport, instead use :code:`execute` on a client or a session. :param document: the parsed GraphQL request :param variables_values: An optional Dict of variable values :param operation_name: An optional Operation name for the request :param extra_args: additional arguments to send to the aiohttp post method :param upload_files: Set to True if you want to put files in the variable values :returns: an ExecutionResult object. """ query_str = print_ast(document) payload: Dict[str, Any] = { "query": query_str, } if operation_name: payload["operationName"] = operation_name if upload_files: # If the upload_files flag is set, then we need variable_values assert variable_values is not None # If we upload files, we will extract the files present in the # variable_values dict and replace them by null values nulled_variable_values, files = extract_files( variables=variable_values, file_classes=self.file_classes, ) # Save the nulled variable values in the payload payload["variables"] = nulled_variable_values # Prepare aiohttp to send multipart-encoded data data = aiohttp.FormData() # Generate the file map # path is nested in a list because the spec allows multiple pointers # to the same file. But we don't support that. # Will generate something like {"0": ["variables.file"]} file_map = {str(i): [path] for i, path in enumerate(files)} # Enumerate the file streams # Will generate something like {'0': <_io.BufferedReader ...>} file_streams = {str(i): files[path] for i, path in enumerate(files)} # Add the payload to the operations field operations_str = json.dumps(payload) log.debug("operations %s", operations_str) data.add_field( "operations", operations_str, content_type="application/json" ) # Add the file map field file_map_str = json.dumps(file_map) log.debug("file_map %s", file_map_str) data.add_field("map", file_map_str, content_type="application/json") # Add the extracted files as remaining fields for k, v in file_streams.items(): data.add_field(k, v, filename=k) post_args: Dict[str, Any] = {"data": data} else: if variable_values: payload["variables"] = variable_values if log.isEnabledFor(logging.INFO): log.info(">>> %s", json.dumps(payload)) post_args = {"json": payload} # Pass post_args to aiohttp post method if extra_args: post_args.update(extra_args) if self.session is None: raise TransportClosed("Transport is not connected") async with self.session.post(self.url, ssl=self.ssl, **post_args) as resp: try: result = await resp.json() if log.isEnabledFor(logging.INFO): result_text = await resp.text() log.info("<<< %s", result_text) except Exception: # We raise a TransportServerError if the status code is 400 or higher # We raise a TransportProtocolError in the other cases try: # Raise a ClientResponseError if response status is 400 or higher resp.raise_for_status() except ClientResponseError as e: raise TransportServerError(str(e)) from e result_text = await resp.text() raise TransportProtocolError( f"Server did not return a GraphQL result: {result_text}" ) if "errors" not in result and "data" not in result: result_text = await resp.text() raise TransportProtocolError( "Server did not return a GraphQL result: " 'No "data" or "error" keys in answer: ' f"{result_text}" ) return ExecutionResult(errors=result.get("errors"), data=result.get("data"))
def wrk2_test(self, query, rps): def upload_files(files): if self.upload_root_uri: p = urlparse(self.upload_root_uri) if p.scheme == 's3': bucket = p.netloc key = p.path.lstrip('/') s3_client = boto3.client('s3') for (f, f_key) in files: s3_client.upload_file(f, bucket, os.path.join(key, f_key)) query_str = graphql.print_ast(query) params = self.get_wrk2_params() print( Fore.GREEN + "Running benchmark wrk2 for at {} req/s (duration: {}) for query\n" .format(rps, params['duration']), query_str + Style.RESET_ALL) bench_script = os.path.join(self.lua_dir, 'bench-wrk2.lua') graphql_url = self.hge.url + '/v1/graphql' timestamp = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') results_dir = self.results_root_dir tests_path = [str(rps), timestamp] results_dir = os.path.join(results_dir, *tests_path) os.makedirs(results_dir, exist_ok=True) wrk2_command = [ 'wrk2', '-R', str(rps), '-t', str(params['threads']), '-c', str(params['connections']), '-d', str(params['duration']), '--latency', '-s', bench_script, graphql_url, query_str, results_dir ] volumes = self.get_scripts_vol() volumes[results_dir] = {'bind': results_dir, 'mode': 'rw'} self.docker_client = docker.from_env() result = self.docker_client.containers.run( self.wrk_docker_image, detach=False, stdout=True, stderr=False, command=wrk2_command, network_mode='host', environment=self.get_lua_env(), volumes=volumes, remove=True, user=self.get_current_user()).decode('ascii') histogram_file = os.path.join(results_dir, 'latencies.hgrm') histogram = self.get_latency_histogram(result, histogram_file) summary_file = os.path.join(results_dir, 'summary.json') with open(summary_file) as f: summary = json.load(f) latencies_file = os.path.join(results_dir, 'latencies') def extract_data(v): return v['data'] if isinstance(v, dict) and 'data' in v else v tests_info = { k: extract_data(v) for (k, v) in self.gen_test_info(query, rps).items() } tests_setup_file = os.path.join(results_dir, 'test_setup.json') with open(tests_setup_file, 'w') as f: f.write(json.dumps(tests_info, indent=2)) upload_files([(x, os.path.join(*tests_path, y)) for (x, y) in [( summary_file, 'summary.json'), ( latencies_file, 'latencies'), ( histogram_file, 'latencies.hgrm'), (tests_setup_file, 'test_setup.json')]]) if self.upload_root_uri: latencies_uri = uri_path_join(self.upload_root_uri, *tests_path, 'latencies') else: latencies_uri = pathlib.Path(latencies_file).as_uri() self.insert_result(query, rps, summary, histogram, latencies_uri) return (summary, histogram)