def _get_query_document(root_vertex_field_name, root_selections): """Return a Document representing a query with the specified name and selections.""" return DocumentNode(definitions=[ OperationDefinitionNode( operation=OperationType.QUERY, selection_set=SelectionSetNode(selections=[ FieldNode( name=NameNode(value=root_vertex_field_name), selection_set=SelectionSetNode(selections=root_selections, ), directives=[], ) ]), ) ])
def _split_query_ast_one_level_recursive( query_node, ast, type_info, edge_to_stitch_fields, name_assigner ): """Return an AST node with which to replace the input AST in the selections that contain it. This function examines the selections of the input AST, and recursively calls either _split_query_ast_one_level_recursive_type_coercion or _split_query_ast_one_level_recursive_normal_fields depending on whether the selections contains a single InlineFragment or a number of normal fields. Args: query_node: SubQueryNode, whose list of child query connections may be modified to include new children ast: Field, InlineFragment, or OperationDefinition, the AST that we are trying to split into child components. It is not modified by this function type_info: TypeInfo, used to get information about the types of fields while traversing the query AST edge_to_stitch_fields: Dict[Tuple(str, str), Tuple(str, str)], mapping (type name, vertex field name) to (source field name, sink field name) used in the @stitch directive for each cross schema edge name_assigner: IntermediateOutNameAssigner, object used to generate and keep track of names of newly created @output directives Returns: Field, InlineFragment, or OperationDefinition, the AST with which to replace the input AST in the selections that contain it """ type_info.enter(ast.selection_set) selections = ast.selection_set.selections type_coercion = try_get_inline_fragment(selections) if type_coercion is not None: # Case 1: type coercion type_info.enter(type_coercion) new_type_coercion = _split_query_ast_one_level_recursive( query_node, type_coercion, type_info, edge_to_stitch_fields, name_assigner ) type_info.leave(type_coercion) if new_type_coercion is type_coercion: new_selections = selections else: new_selections = [new_type_coercion] else: # Case 2: normal fields new_selections = _split_query_ast_one_level_recursive_normal_fields( query_node, selections, type_info, edge_to_stitch_fields, name_assigner ) type_info.leave(ast.selection_set) # Return input, or make copy if new_selections is not selections: new_ast = copy(ast) new_ast.selection_set = SelectionSetNode(selections=new_selections) return new_ast else: return ast
def _get_minimal_query_ast_from_macro_ast(macro_ast): """Get a query that should successfully compile to IR if the macro is valid.""" ast_without_macro_directives = remove_directives_from_ast( macro_ast, DIRECTIVES_REQUIRED_IN_MACRO_EDGE_DEFINITION) # We will add this output directive to make the ast a valid query output_directive = DirectiveNode( name=NameNode(value="output"), arguments=[ ArgumentNode(name=NameNode(value="out_name"), value=StringValueNode(value="dummy_output_name")) ], ) # Shallow copy everything on the path to the first level selection list query_ast = copy(ast_without_macro_directives) root_level_selection = copy( get_only_selection_from_ast(query_ast, GraphQLInvalidMacroError)) first_level_selections = list( root_level_selection.selection_set.selections) # Add an output to a new or existing __typename field existing_typename_field = None for idx, selection in enumerate(first_level_selections): if isinstance(selection, FieldNode): if selection.name.value == "__typename": # We have a copy of the list, but the elements are references to objects # in macro_ast that we don't want to mutate. So the following copy is necessary. existing_typename_field = copy(selection) existing_typename_field.directives = copy( existing_typename_field.directives) existing_typename_field.directives.append(output_directive) first_level_selections[idx] = existing_typename_field if existing_typename_field is None: first_level_selections.insert( 0, FieldNode(name=NameNode(value="__typename"), directives=[output_directive])) # Propagate the changes back to the result_ast root_level_selection.selection_set = SelectionSetNode( selections=first_level_selections) query_ast.selection_set = SelectionSetNode( selections=[root_level_selection]) return DocumentNode(definitions=[query_ast])
def find_target_and_copy_path_to_it(ast): """Copy the AST objects on the path to the target, returning the copied AST and the target AST. This function makes it easy to make changes to the AST at the macro edge target directive while using structural sharing, i.e. without mutating the original object while doing the minimum amount of copying necessary: - If called with an AST that does not contain a macro edge target directive, it is guaranteed to produce the original AST input object as part of the result, instead of making a copy. - If called with an AST that does contain that directive, it will return a new AST object that has copies for all AST objects on the traversal path toward the AST containing the directive, together with a shallow copy of the AST object that contains the directive itself. Args: ast: GraphQL library AST object Returns: tuple containing: - GraphQL library AST object equivalent to the input AST. Objects on the path to the macro edge target directive are shallow-copied. - GraphQL library AST object at the macro edge target directive of the resulting AST, or None if there was no such directive in the AST. """ # Base case for directive in ast.directives: if directive.name.value == MacroEdgeTargetDirective.name: target_ast = copy(ast) return target_ast, target_ast # Recurse new_selections = [] target_ast = None if isinstance(ast, (FieldNode, InlineFragmentNode, OperationDefinitionNode)): if ast.selection_set is not None: for selection in ast.selection_set.selections: new_selection, possible_target_ast = find_target_and_copy_path_to_it( selection) new_selections.append(new_selection) if possible_target_ast is not None: target_ast = possible_target_ast else: raise AssertionError("Unexpected AST type received: {} {}".format( type(ast), ast)) if target_ast is None: return ast, None else: new_ast = copy(ast) new_ast.selection_set = SelectionSetNode(selections=new_selections) return new_ast, target_ast
def replace_tag_names(name_change_map, ast): """Return a new ast with tag names replaced according to the name_change_map. Args: name_change_map: Dict[str, str] mapping all tag names in the ast to new names ast: GraphQL library AST object, such as a Field, InlineFragment, or OperationDefinition This ast is not mutated. Returns: GraphQL library AST object, equivalent to the input one, with all tag names replaced according to the name_change_map. If no changes were made, this is the same object as the input. """ if not isinstance( ast, (FieldNode, InlineFragmentNode, OperationDefinitionNode)): return ast made_changes = False # Recurse into selections. new_selection_set = None if ast.selection_set is not None: new_selections = [] for selection_ast in ast.selection_set.selections: new_selection_ast = replace_tag_names(name_change_map, selection_ast) if selection_ast is not new_selection_ast: # Since we did not get the exact same object as the input, changes were made. # That means this call will also need to make changes and return a new object. made_changes = True new_selections.append(new_selection_ast) new_selection_set = SelectionSetNode(selections=new_selections) # Process the current node's directives. directives = ast.directives new_directives = _replace_tag_names_in_directives(name_change_map, directives) made_changes = made_changes or (directives is not new_directives) if not made_changes: # We didn't change anything, return the original input object. return ast new_ast = copy(ast) new_ast.selection_set = new_selection_set new_ast.directives = new_directives return new_ast
def expand_macros_in_query_ast(macro_registry, query_ast, query_args): """Return (new_query_ast, new_query_args) containing the GraphQL after macro expansion. Args: macro_registry: MacroRegistry, the registry of macro descriptors used for expansion query_ast: GraphQL query AST object that potentially requires macro expansion query_args: dict mapping strings to any type, containing the arguments for the query Returns: tuple (new_query_ast, new_graphql_args) containing a potentially-rewritten GraphQL query AST and its new args. If the input GraphQL query AST did not make use of any macros, the returned values are guaranteed to be the exact same objects as the input ones. """ definition_ast = get_only_query_definition(query_ast, GraphQLInvalidMacroError) base_ast = get_only_selection_from_ast(definition_ast, GraphQLInvalidMacroError) base_start_type_name = get_ast_field_name(base_ast) query_type = macro_registry.schema_without_macros.query_type base_start_type = query_type.fields[base_start_type_name].type tag_names = get_all_tag_names(base_ast) # Allow list types at the query root in the schema. base_start_type = strip_non_null_and_list_from_type(base_start_type) new_base_ast, new_query_args = _expand_macros_in_inner_ast( macro_registry, base_start_type, base_ast, query_args, tag_names) if new_base_ast is base_ast: # No macro expansion happened. if new_query_args != query_args: raise AssertionError( "No macro expansion happened, but the query args object changed: " "{} vs {}. This should be impossible. GraphQL query AST: {}". format(query_args, new_query_args, query_ast)) new_query_ast = query_ast new_query_args = query_args else: new_definition = copy(definition_ast) new_definition.selection_set = SelectionSetNode( selections=[new_base_ast]) new_query_ast = copy(query_ast) new_query_ast.definitions = [new_definition] return new_query_ast, new_query_args
def remove_directives_from_ast(ast, directive_names_to_omit): """Return an equivalent AST to the input, but with instances of the named directives omitted. Args: ast: GraphQL library AST object, such as a Field, InlineFragment, or OperationDefinition directive_names_to_omit: set of strings describing the names of the directives to omit Returns: GraphQL library AST object, equivalent to the input one, with all instances of the named directives omitted. If the specified directives do not appear in the input AST, the returned object is the exact same object as the input. """ if not isinstance( ast, (FieldNode, InlineFragmentNode, OperationDefinitionNode)): return ast made_changes = False new_selection_set = None if ast.selection_set is not None: new_selections = [] for selection_ast in ast.selection_set.selections: new_selection_ast = remove_directives_from_ast( selection_ast, directive_names_to_omit) if selection_ast is not new_selection_ast: # Since we did not get the exact same object as the input, changes were made. # That means this call will also need to make changes and return a new object. made_changes = True new_selections.append(new_selection_ast) new_selection_set = SelectionSetNode(selections=new_selections) directives_to_keep = FrozenList([ directive for directive in ast.directives if directive.name.value not in directive_names_to_omit ]) if len(directives_to_keep) != len(ast.directives): made_changes = True if not made_changes: # We didn't change anything, return the original input object. return ast new_ast = copy(ast) new_ast.selection_set = new_selection_set new_ast.directives = directives_to_keep return new_ast
def _add_pagination_filter_recursively( query_analysis: QueryPlanningAnalysis, node_ast: DocumentNode, full_query_path: VertexPath, query_path: VertexPath, pagination_field: str, directive_to_add: DirectiveNode, extended_parameters: Dict[str, Any], ) -> Tuple[DocumentNode, Dict[str, Any]]: """Add the filter to the target field, returning a query and its new parameters. Args: query_analysis: the entire query with any query analysis needed for pagination node_ast: Part of the entire query, the location where we are adding a filter full_query_path: path to the pagination vertex from the root query_path: path to the pagination vertex from this node (node_ast) pagination_field: The field on which we are adding a filter directive_to_add: The filter directive to add extended_parameters: The original parameters of the query along with the parameter used in directive_to_add Returns: tuple (new_ast, removed_parameters) new_ast: A query with the filter inserted, and any filters on the same location with the same operation removed. new_parameters: The parameters to use with the new_ast """ if not isinstance( node_ast, (FieldNode, InlineFragmentNode, OperationDefinitionNode)): raise AssertionError( f'Input AST is of type "{type(node_ast).__name__}", which should not be a selection.' ) if len(query_path) == 0: return _add_pagination_filter_at_node( query_analysis, full_query_path, node_ast, pagination_field, directive_to_add, extended_parameters, ) if node_ast.selection_set is None: raise AssertionError(f"Invalid query path {query_path} {node_ast}.") found_field = False new_selections = [] for selection_ast in node_ast.selection_set.selections: new_selection_ast = selection_ast field_name = get_ast_field_name(selection_ast) if field_name == query_path[0]: found_field = True new_selection_ast, new_parameters = _add_pagination_filter_recursively( query_analysis, selection_ast, full_query_path, query_path[1:], pagination_field, directive_to_add, extended_parameters, ) new_selections.append(new_selection_ast) if not found_field: raise AssertionError(f"Invalid query path {query_path} {node_ast}.") new_ast = copy(node_ast) new_ast.selection_set = SelectionSetNode(selections=new_selections) return new_ast, new_parameters
def _add_pagination_filter_at_node( query_analysis: QueryPlanningAnalysis, node_vertex_path: VertexPath, node_ast: DocumentNode, pagination_field: str, directive_to_add: DirectiveNode, extended_parameters: Dict[str, Any], ) -> Tuple[DocumentNode, Dict[str, Any]]: """Add the filter to the target field, returning a query and its new parameters. Args: query_analysis: the entire query with any query analysis needed for pagination node_vertex_path: path to the node_ast from the query root node_ast: part of the entire query, rooted at the location where we are adding a filter. pagination_field: field on which we are adding a filter directive_to_add: filter directive to add extended_parameters: original parameters of the query along with the parameter used in directive_to_add Returns: tuple (new_ast, removed_parameters) new_ast: A query with the filter inserted, and any filters on the same location with the same operation removed. new_parameters: The parameters to use with the new_ast """ if not isinstance( node_ast, (FieldNode, InlineFragmentNode, OperationDefinitionNode)): raise AssertionError( f'Input AST is of type "{type(node_ast).__name__}", which should not be a selection.' ) new_directive_operation = _get_filter_node_operation(directive_to_add) new_directive_parameter_name = _get_binary_filter_node_parameter( directive_to_add) new_directive_parameter_value = extended_parameters[ new_directive_parameter_name] # If the field exists, add the new filter and remove redundant filters. new_parameters = dict(extended_parameters) new_selections = [] found_field = False for selection_ast in node_ast.selection_set.selections: new_selection_ast = selection_ast field_name = get_ast_field_name(selection_ast) if field_name == pagination_field: found_field = True new_selection_ast = copy(selection_ast) new_selection_ast.directives = copy(selection_ast.directives) new_directives = [] for directive in selection_ast.directives: operation = _get_filter_node_operation(directive) if _are_filter_operations_equal_and_possible_to_eliminate( new_directive_operation, operation): parameter_name = _get_binary_filter_node_parameter( directive) parameter_value = new_parameters[parameter_name] if not _is_new_filter_stronger( query_analysis, PropertyPath(node_vertex_path, pagination_field), operation, new_directive_parameter_value, parameter_value, ): logger.error( "Pagination filter %(new_filter)s on %(pagination_field)s with param " "%(new_filter_param)s is not stronger than existing filter " "%(existing_filter)s with param %(existing_param)s. This is either a " "bug in parameter generation, or this assertion is outdated. " "Query: %(query)s", { "new_filter": print_ast(directive_to_add), "pagination_field": pagination_field, "new_filter_param": new_directive_parameter_value, "existing_filter": print_ast(directive), "existing_param": parameter_value, "query": query_analysis.query_string_with_parameters, }, ) del new_parameters[parameter_name] else: new_directives.append(directive) new_directives.append(directive_to_add) new_selection_ast.directives = new_directives new_selections.append(new_selection_ast) # If field didn't exist, create it and add the new directive to it. if not found_field: new_selections.insert( 0, FieldNode(name=NameNode(value=pagination_field), directives=[directive_to_add])) new_ast = copy(node_ast) new_ast.selection_set = SelectionSetNode(selections=new_selections) return new_ast, new_parameters
def _merge_selection_into_target(subclass_sets, target_ast, target_class_name, selection_ast): """Add the selections, directives, and coercions from the selection_ast to the target_ast. Mutate the target_ast, merging into it everything from the selection_ast. If the target is at a type coercion and the selection_ast starts with a type coercion, combine them into one coercion that preserves the semantics while avoiding nested coercions, which are disallowed. For details on how fields and directives are merged, see merge_selection_sets(). Args: subclass_sets: dict mapping class names to the set of names of their subclasses target_ast: AST at the @macro_edge_target directive target_class_name: str, the name of the GraphQL type to which the macro edge points selection_ast: AST to merge inside the target. Required to have a nonempty selection set. """ if selection_ast.selection_set is None or not selection_ast.selection_set.selections: raise AssertionError( "Precondition violated. selection_ast is expected to be nonempty {}" .format(selection_ast)) # Remove @macro_edge_target directive. new_target_directives = [ directive for directive in target_ast.directives if directive.name.value != MacroEdgeTargetDirective.name ] if len(target_ast.directives) != len(new_target_directives) + 1: raise AssertionError( "Expected the target_ast to contain a single @macro_edge_target " "directive, but that was unexpectedly not the case: " "{} {}".format(target_ast, new_target_directives)) target_ast.directives = new_target_directives # See if there's a type coercion in the selection_ast. coercion = None for selection in selection_ast.selection_set.selections: if isinstance(selection, InlineFragmentNode): if len(selection_ast.selection_set.selections) != 1: raise GraphQLCompilationError( "Found selections outside type coercion. " "Please move them inside the coercion. " "Error near field name: {}".format( get_ast_field_name(selection_ast))) else: coercion = selection continuation_ast = selection_ast # Deal with type coercions immediately within the macro edge, if any. if coercion is not None: coercion_class = coercion.type_condition.name.value # Ensure the coercion is valid. It may only go to a subtype of the type of the vertex field # created by the macro edge, where we allow subtypes to be defined by subclass_sets # to work around the limitations of the GraphQL type system. If the user's coercion # is to a subtype of the macro edge target's type, then this is a narrowing conversion and # we simply add the user's coercion, or replace any existing coercion if one is present. if coercion_class != target_class_name: if coercion_class not in subclass_sets.get(target_class_name, set()): raise GraphQLCompilationError( "Attempting to use a type coercion to coerce a value of type {field_type} " "(from field named {field_name}) to incompatible type {coercion_type}, which " "is not a subtype of {field_type}. Only coercions " "to a subtype are allowed.".format( field_type=target_class_name, coercion_type=coercion_class, field_name=get_ast_field_name(selection_ast), )) continuation_ast = coercion if isinstance(target_ast, InlineFragmentNode): # The macro edge definition has a type coercion as well, replace it with the user's one. target_ast.type_condition = coercion.type_condition else: # No coercion in the macro edge definition, # slip the user's type coercion inside the target AST. new_coercion = InlineFragmentNode( type_condition=coercion.type_condition, selection_set=target_ast.selection_set, directives=[], ) target_ast.selection_set = SelectionSetNode( selections=[new_coercion]) target_ast = new_coercion # Merge the continuation into the target # target_ast.directives += continuation_ast.directives target_ast.directives = list(target_ast.directives) + list( continuation_ast.directives) target_ast.selection_set = merge_selection_sets( target_ast.selection_set, continuation_ast.selection_set)
def _expand_macros_in_inner_ast(macro_registry, current_schema_type, ast, query_args, tag_names): """Return (new_ast, new_query_args) containing the AST after macro expansion. Args: macro_registry: MacroRegistry, the registry of macro descriptors used for expansion current_schema_type: GraphQL type object describing the current type at the given AST node ast: GraphQL AST object that potentially requires macro expansion query_args: dict mapping strings to any type, containing the arguments for the query tag_names: set of names of tags currently in use. The set is mutated in this function. Returns: tuple (new_ast, new_graphql_args) containing a potentially-rewritten GraphQL AST object and its new args. If the input GraphQL AST did not make use of any macros, the returned values are guaranteed to be the exact same objects as the input ones. """ if ast.selection_set is None: # No macro expansion happens at this level if there are no selections. return ast, query_args schema = macro_registry.schema_without_macros made_changes = False new_selection_set = None new_query_args = query_args for selection_ast in ast.selection_set.selections: new_selection_ast = selection_ast prefix_selections = [ ] # Selections from macro expansion to be added before this selection suffix_selections = [ ] # Selections from macro expansion to be added after this selection if isinstance(selection_ast, InlineFragmentNode): vertex_field_type = schema.get_type( selection_ast.type_condition.name.value) new_selection_ast, new_query_args = _expand_macros_in_inner_ast( macro_registry, vertex_field_type, selection_ast, new_query_args, tag_names) else: field_name = get_ast_field_name(selection_ast) if is_vertex_field_name(field_name): ( new_selection_ast, new_query_args, prefix_selections, suffix_selections, ) = expand_potential_macro_edge(macro_registry, current_schema_type, selection_ast, new_query_args, tag_names) if new_selection_ast is not selection_ast: # We expanded a macro edge, make sure the field name stays in sync. field_name = get_ast_field_name(new_selection_ast) # Recurse on the new_selection_ast, to expand any macros # that exist at a deeper level. # TODO(predrag): Move get_vertex_field_type() to the top-level schema.py file, # instead of reaching into the compiler.helpers module. vertex_field_type = get_vertex_field_type( current_schema_type, field_name) new_selection_ast, new_query_args = _expand_macros_in_inner_ast( macro_registry, vertex_field_type, new_selection_ast, new_query_args, tag_names) if new_selection_ast is selection_ast and (prefix_selections or suffix_selections): raise AssertionError( "No macro expansion happened but unexpectedly there are " "prefix or suffix selections to expand: {} {}." "current_schema_type: {}, ast: {}, field_name: {}".format( prefix_selections, suffix_selections, current_schema_type, ast, field_name)) if new_selection_ast is not selection_ast: made_changes = True new_selection_set = merge_selection_sets( new_selection_set, SelectionSetNode(selections=list( chain(prefix_selections, [new_selection_ast], suffix_selections))), ) if made_changes: result_ast = copy(ast) result_ast.selection_set = new_selection_set result_query_args = new_query_args else: if new_query_args is not query_args: raise AssertionError( "No changes made during macro expansion, but query args changed: " "{} vs {}. AST: {}".format(query_args, new_query_args, ast)) result_ast = ast result_query_args = query_args return result_ast, result_query_args
def _add_filter_at_field_with_output(ast, field_out_name, input_filter_name): """Return an AST with @filter added at the field with the specified @output, if found. Args: ast: Field, InlineFragment, or OperationDefinition, an AST Node type that occurs in the selections of a SelectionSet. It is not modified by this function field_out_name: str, the out_name of an @output directive. This function will create a new @filter directive on the field that has an @output directive with this out_name input_filter_name: str, the name of the local variable in the new @filter directive created Returns: Field, InlineFragment, or OperationDefinition, identical to the input ast except with an @filter added at the specified field if such a field is found. If no changes were made, this is the same object as the input """ if not isinstance( ast, (FieldNode, InlineFragmentNode, OperationDefinitionNode)): raise AssertionError( u'Input AST is of type "{}", which should not be a selection.' u"".format(type(ast).__name__)) if isinstance(ast, FieldNode): # Check whether this field has the expected directive, if so, modify and return if ast.directives is not None and any( _is_output_directive_with_name(directive, field_out_name) for directive in ast.directives): new_directives = list(ast.directives) new_directives.append( _get_in_collection_filter_directive(input_filter_name)) new_ast = copy(ast) new_ast.directives = new_directives return new_ast if ast.selection_set is None: # Nothing to recurse on return ast # Otherwise, recurse and look for field with desired out_name made_changes = False new_selections = [] for selection in ast.selection_set.selections: new_selection = _add_filter_at_field_with_output( selection, field_out_name, input_filter_name) if new_selection is not selection: # Changes made somewhere down the line if not made_changes: made_changes = True else: # Change has already been made, but there is a new change. Implies that multiple # fields have the @output directive with the desired name raise GraphQLValidationError( u'There are multiple @output directives with the out_name "{}"' .format(field_out_name)) new_selections.append(new_selection) if made_changes: new_ast = copy(ast) new_ast.selection_set = SelectionSetNode(selections=new_selections) return new_ast else: return ast
def merge_selection_sets(selection_set_a, selection_set_b): """Merge selection sets, merging directives on name conflict. Create a selection set that contains the selections of both inputs. If there is a name collision on a property field, we take the directives from both inputs on that field and merge them. We disallow name collision on a vertex field. The value None represents an empty SelectionSet. The order of selections in the resulting SelectionSet has the following properties: - property fields are before vertex fields. - property fields in selection_set_b come later than other property fields. - vertex fields in selection_set_b come later than other vertex fields. - ties are resolved by respecting the ordering of fields in the input arguments. Args: selection_set_a: SelectionSet or None to be merged with the other selection_set_b: SelectionSet or None to be merged with the other Returns: SelectionSet or None with contents from both input selection sets """ if selection_set_a is None: return selection_set_b if selection_set_b is None: return selection_set_a # Convert to dict selection_dict_a = get_uniquely_named_objects_by_name( selection_set_a.selections) selection_dict_b = get_uniquely_named_objects_by_name( selection_set_b.selections) # Compute intersection by name common_selection_dict = dict() common_fields = set(selection_dict_a.keys()) & set(selection_dict_b.keys()) for field_name in common_fields: field_a = selection_dict_a[field_name] field_b = selection_dict_b[field_name] if field_a.selection_set is not None or field_b.selection_set is not None: raise GraphQLCompilationError( "Macro edge expansion results in a query traversing the " "same edge {} twice, which is disallowed.".format(field_name)) # TODO(predrag): Find a way to avoid this situation by making the rewriting smarter. field_a_has_tag_directive = any( (directive.name.value == TagDirective.name for directive in field_a.directives)) field_b_has_tag_directive = any( (directive.name.value == TagDirective.name for directive in field_b.directives)) if field_a_has_tag_directive and field_b_has_tag_directive: raise GraphQLCompilationError( "Macro edge expansion results in field {} having two " "@tag directives, which is disallowed.".format(field_name)) merged_field = copy(field_a) merged_field.directives = list( chain(field_a.directives, field_b.directives)) common_selection_dict[field_name] = merged_field # Merge dicts, using common_selection_dict for keys present in both selection sets. merged_selection_dict = copy(selection_dict_a) merged_selection_dict.update(selection_dict_b) merged_selection_dict.update( common_selection_dict) # Overwrite keys in the intersection. # The macro or the user code could have an unused (pro-forma) field for the sake of not # having an empty selection in a vertex field. We remove pro-forma fields if they are # no longer necessary. if len(merged_selection_dict) > 1: # Otherwise we need a pro-forma field non_pro_forma_fields = { name: ast for name, ast in six.iteritems(merged_selection_dict) if ast.selection_set is not None or len(ast.directives) > 0 # If there's selections or directives under the field, it is not pro-forma. } if non_pro_forma_fields: merged_selection_dict = non_pro_forma_fields else: # There's multiple pro-forma fields. Pick one of them (the one with smallest name). lexicographically_first_name = min(merged_selection_dict.keys()) merged_selection_dict = { lexicographically_first_name: merged_selection_dict[lexicographically_first_name] } # Get a deterministic ordering of the merged selections selection_name_order = list( chain( (ast.name.value for ast in selection_set_a.selections if ast.name.value not in selection_dict_b), (ast.name.value for ast in selection_set_b.selections), )) # Make sure that all property fields come before all vertex fields. Note that sort is stable. merged_selections = [ merged_selection_dict[name] for name in selection_name_order if name in merged_selection_dict ] return SelectionSetNode(selections=sorted( merged_selections, key=lambda ast: ast.selection_set is not None))