def generate_parameterized_queries(
    query_analysis: QueryPlanningAnalysis,
    vertex_partition: VertexPartitionPlan,
    parameter_value: Any,
) -> Tuple[ASTWithParameters, ASTWithParameters]:
    """Generate two parameterized queries that can be used to paginate over a given query.

    The first query is produced by adding a "<" filter to a field in the original, and the
    second by adding a ">=" filter. The parameter value given is used in this filter. This
    function will potentially remove any existing filters that are no longer needed after
    the new filter is inserted.

    If the parameter_value is set such that the newly produced query is equivalent to the
    original query, an AssertionError is raised. Therefore, the parameter_value should be
    a value inside the range of initial possible values for that field.

    Args:
        query_analysis: the query with any query analysis needed for pagination
        vertex_partition: pagination plan dictating where to insert the filter
        parameter_value: the value of the parameter used for pagination

    Returns:
        tuple (next_page, remainder)
        next_page: AST and params for next page.
        remainder: AST and params for the remainder query that returns all results
                   not on the next page.
    """
    query = query_analysis.ast_with_parameters
    query_root = get_only_query_definition(query.query_ast, GraphQLError)

    # Create extended parameters that include the pagination parameter value
    param_name = _generate_new_name("__paged_param",
                                    set(query.parameters.keys()))
    extended_parameters = dict(query.parameters)
    extended_parameters[param_name] = parameter_value

    next_page_root, next_page_parameters = _add_pagination_filter_recursively(
        query_analysis,
        query_root,
        vertex_partition.query_path,
        vertex_partition.query_path,
        vertex_partition.pagination_field,
        _make_binary_filter_directive_node("<", param_name),
        extended_parameters,
    )
    remainder_root, remainder_parameters = _add_pagination_filter_recursively(
        query_analysis,
        query_root,
        vertex_partition.query_path,
        vertex_partition.query_path,
        vertex_partition.pagination_field,
        _make_binary_filter_directive_node(">=", param_name),
        extended_parameters,
    )

    next_page = ASTWithParameters(DocumentNode(definitions=[next_page_root]),
                                  next_page_parameters)
    remainder = ASTWithParameters(DocumentNode(definitions=[remainder_root]),
                                  remainder_parameters)
    return next_page, remainder
Exemple #2
0
def _get_basic_schema_ast(query_type: str) -> DocumentNode:
    """Create a basic AST Document representing a nearly blank schema.

    The output AST contains a single query type, whose name is the input string. The query type
    is guaranteed to be the second entry of Document definitions, after the schema definition.
    The query type has no fields.

    Args:
        query_type: name of the query type for the schema

    Returns:
        DocumentNode, representing a nearly blank schema
    """
    blank_ast = DocumentNode(definitions=[
        SchemaDefinitionNode(
            operation_types=[
                OperationTypeDefinitionNode(
                    operation=OperationType.QUERY,
                    type=NamedTypeNode(name=NameNode(value=query_type)),
                )
            ],
            directives=[],
        ),
        ObjectTypeDefinitionNode(
            name=NameNode(value=query_type),
            fields=[],
            interfaces=[],
            directives=[],
        ),
    ])
    return blank_ast
def _make_query_plan_recursive(
    sub_query_node: SubQueryNode,
    sub_query_plan: SubQueryPlan,
    output_join_descriptors: List[OutputJoinDescriptor],
) -> None:
    """Recursively copy the structure of sub_query_node onto sub_query_plan.

    For each child connection contained in sub_query_node, create a new SubQueryPlan for
    the corresponding child SubQueryNode, add appropriate @filter directive to the child AST,
    and attach the new SubQueryPlan to the list of children of the input sub-query plan.

    Args:
        sub_query_node: SubQueryNode, whose child_query_connections are copied over onto
                        sub_query_plan. It is not modified by this function.
        sub_query_plan: SubQueryPlan, whose list of child query plans and query AST are
                        modified.
        output_join_descriptors: describing which outputs should be joined and how.

    """
    # Iterate through child connections of query node
    for child_query_connection in sub_query_node.child_query_connections:
        child_sub_query_node = child_query_connection.sink_query_node
        parent_out_name = child_query_connection.source_field_out_name
        child_out_name = child_query_connection.sink_field_out_name

        child_query_type = get_only_query_definition(
            child_sub_query_node.query_ast, GraphQLValidationError)
        child_query_type_with_filter = _add_filter_at_field_with_output(
            child_query_type, child_out_name, parent_out_name
            # @filter's local variable is named the same as the out_name of the parent's @output
        )
        if child_query_type is child_query_type_with_filter:
            raise AssertionError(
                'An @output directive with out_name "{}" is unexpectedly not found in the '
                'AST "{}".'.format(child_out_name, child_query_type))
        else:
            new_child_query_ast = DocumentNode(
                definitions=[child_query_type_with_filter])

        # Create new SubQueryPlan for child
        child_sub_query_plan = SubQueryPlan(
            query_ast=new_child_query_ast,
            schema_id=child_sub_query_node.schema_id,
            parent_query_plan=sub_query_plan,
            child_query_plans=[],
        )

        # Add new SubQueryPlan to parent's child list
        sub_query_plan.child_query_plans.append(child_sub_query_plan)

        # Add information about this edge
        new_output_join_descriptor = OutputJoinDescriptor(
            output_names=(parent_out_name, child_out_name), )
        output_join_descriptors.append(new_output_join_descriptor)

        # Recursively repeat on child SubQueryPlans
        _make_query_plan_recursive(child_sub_query_node, child_sub_query_plan,
                                   output_join_descriptors)
Exemple #4
0
def _get_query_document(root_vertex_field_name, root_selections):
    """Return a Document representing a query with the specified name and selections."""
    return DocumentNode(definitions=[
        OperationDefinitionNode(
            operation=OperationType.QUERY,
            selection_set=SelectionSetNode(selections=[
                FieldNode(
                    name=NameNode(value=root_vertex_field_name),
                    selection_set=SelectionSetNode(selections=root_selections,
                                                   ),
                    directives=[],
                )
            ]),
        )
    ])
def _get_minimal_query_ast_from_macro_ast(macro_ast):
    """Get a query that should successfully compile to IR if the macro is valid."""
    ast_without_macro_directives = remove_directives_from_ast(
        macro_ast, DIRECTIVES_REQUIRED_IN_MACRO_EDGE_DEFINITION)

    # We will add this output directive to make the ast a valid query
    output_directive = DirectiveNode(
        name=NameNode(value="output"),
        arguments=[
            ArgumentNode(name=NameNode(value="out_name"),
                         value=StringValueNode(value="dummy_output_name"))
        ],
    )

    # Shallow copy everything on the path to the first level selection list
    query_ast = copy(ast_without_macro_directives)
    root_level_selection = copy(
        get_only_selection_from_ast(query_ast, GraphQLInvalidMacroError))
    first_level_selections = list(
        root_level_selection.selection_set.selections)

    # Add an output to a new or existing __typename field
    existing_typename_field = None
    for idx, selection in enumerate(first_level_selections):
        if isinstance(selection, FieldNode):
            if selection.name.value == "__typename":
                # We have a copy of the list, but the elements are references to objects
                # in macro_ast that we don't want to mutate. So the following copy is necessary.
                existing_typename_field = copy(selection)
                existing_typename_field.directives = copy(
                    existing_typename_field.directives)
                existing_typename_field.directives.append(output_directive)
                first_level_selections[idx] = existing_typename_field
    if existing_typename_field is None:
        first_level_selections.insert(
            0,
            FieldNode(name=NameNode(value="__typename"),
                      directives=[output_directive]))

    # Propagate the changes back to the result_ast
    root_level_selection.selection_set = SelectionSetNode(
        selections=first_level_selections)
    query_ast.selection_set = SelectionSetNode(
        selections=[root_level_selection])
    return DocumentNode(definitions=[query_ast])
Exemple #6
0
def _process_generic_type_definition(
    generic_type: Union[EnumTypeDefinitionNode, InterfaceTypeDefinitionNode,
                        ObjectTypeDefinitionNode, UnionTypeDefinitionNode, ],
    schema_id: str,
    existing_scalars: Set[str],
    type_name_to_schema_id: Dict[str, str],
    merged_schema_ast: DocumentNode,
) -> Tuple[Dict[str, str], DocumentNode]:
    """Compare new type against existing scalars and types, update records and schema.

    Args:
        generic_type: AST node representing the definition of a type.
        schema_id: identifier of the schema that this type came from.
        existing_scalars: set of names of all existing scalars.
        type_name_to_schema_id: mapping names of types to the identifier of the schema that they
                                came from.
        merged_schema_ast: AST representing a schema.

    Returns:
        tuple (new_type_name_to_schema_id, new_merged_schema_ast) with the following information:
            new_type_name_to_schema_id: type_name_to_schema_id updated with the new generic_type.
            new_merged_schema_ast: merged_schema_ast with new generic_type added to its definitions.
    """
    type_name = generic_type.name.value
    if type_name in existing_scalars:
        raise SchemaMergeNameConflictError(
            f'New type "{type_name}" in schema "{schema_id}" clashes with existing scalar. '
            f'Consider renaming type "{type_name}" in schema "{schema_id}" '
            f"before merging, to avoid conflicts.")
    if type_name in type_name_to_schema_id:
        raise SchemaMergeNameConflictError(
            f'New type "{type_name}" in schema "{schema_id}" clashes with existing type '
            f'"{type_name}" in schema "{type_name_to_schema_id[type_name]}". Consider renaming '
            f'type "{type_name}" in either schema before merging, to avoid conflicts.'
        )

    new_definitions = list(merged_schema_ast.definitions)
    new_definitions.append(generic_type)
    new_merged_schema_ast = DocumentNode(definitions=new_definitions)
    new_type_name_to_schema_id = dict(type_name_to_schema_id)
    new_type_name_to_schema_id[type_name] = schema_id
    return new_type_name_to_schema_id, new_merged_schema_ast
Exemple #7
0
def _process_directive_definition(
    directive: DirectiveDefinitionNode,
    existing_directives: Dict[str, DirectiveDefinitionNode],
    merged_schema_ast: DocumentNode,
) -> Tuple[Dict[str, DirectiveDefinitionNode], DocumentNode]:
    """Compare new directive against existing directives, update records and schema.

    Args:
        directive: AST node representing the definition of a directive.
        existing_directives: mapping the name of each existing directive to the AST node
                             defining it.
        merged_schema_ast: AST representing a schema.

    Returns:
        tuple (new_existing_directives, new_merged_schema_ast) with the following information:
            new_existing_directives: existing_directives updated with the directive.
            new_merged_schema_ast: merged_schema_ast with new directive added to its definitions.
    """
    directive_name = directive.name.value
    if directive_name in existing_directives:
        if print_ast(directive) == print_ast(
                existing_directives[directive_name]):
            return existing_directives, merged_schema_ast
        else:
            raise SchemaMergeNameConflictError(
                'Directive "{}" with definition "{}" has already been defined with '
                'definition "{}".'.format(
                    directive_name,
                    print_ast(directive),
                    print_ast(existing_directives[directive_name]),
                ))
    # new directive
    new_definitions = list(merged_schema_ast.definitions)
    new_definitions.append(directive)
    new_merged_schema_ast = DocumentNode(definitions=new_definitions)
    new_existing_directives = dict(existing_directives)
    new_existing_directives[directive_name] = directive
    return new_existing_directives, new_merged_schema_ast
Exemple #8
0
def _process_scalar_definition(
    scalar: ScalarTypeDefinitionNode,
    existing_scalars: Set[str],
    type_name_to_schema_id: Dict[str, str],
    merged_schema_ast: DocumentNode,
) -> Tuple[Set[str], DocumentNode]:
    """Compare new scalar against existing scalars and types, update records and schema.

    Args:
        scalar: AST node representing the definition of a scalar.
        existing_scalars: set of names of all existing scalars.
        type_name_to_schema_id: mapping names of types to the identifier of the schema from which
                                they came.
        merged_schema_ast: AST representing a schema.

    Returns:
        tuple (new_existing_scalars, new_merged_schema_ast) with the following information:
            new_existing_scalars: existing_scalars updated with the name of the scalar added.
            new_merged_schema_ast: merged_schema_ast with new scalar added to its definitions.
    """
    scalar_name = scalar.name.value
    if scalar_name in existing_scalars:
        return existing_scalars, merged_schema_ast
    if scalar_name in type_name_to_schema_id:
        raise SchemaMergeNameConflictError(
            f'New scalar "{scalar_name}" clashes with existing type "{scalar_name}" in '
            f'schema "{type_name_to_schema_id[scalar_name]}". Consider '
            f'renaming type "{scalar_name}" in schema "{type_name_to_schema_id[scalar_name]}" '
            f"before merging, to avoid conflicts.")

    # new, valid scalar
    new_definitions = list(merged_schema_ast.definitions)
    new_definitions.append(scalar)
    new_merged_schema_ast = DocumentNode(definitions=new_definitions)
    new_existing_scalars = set(existing_scalars)
    new_existing_scalars.add(scalar_name)
    return new_existing_scalars, new_merged_schema_ast
Exemple #9
0
def get_schema_with_macros(macro_registry):
    """Get a new GraphQLSchema with fields where macro edges can be used.

    Preconditions:
    1. No macro in the registry has the same name as a field on the vertex where it applies.
    2. Members of a union type do not have outgoing macros with the same name.

    An easy way to satisfy the preconditions is to create the macro_registry using
    create_macro_registry, and only update it with register_macro_edge, which does all
    the necessary validation.

    Postconditions:
    1. Every GraphQLQuery that uses macros from this registry appropriately should
       successfully type-check against the schema generated from this function.
    2. A GraphQLQuery that uses macros not present in the registry, or uses valid
       macros but on types they are not defined at should fail schema validation with
       the schema generated from this function.
    3. This function is total -- A valid macro registry should not fail to create a
       GraphQL schema with macros.

    Args:
        macro_registry: MacroRegistry object containing a schema and macro descriptors
                        we want to add to the schema.

    Returns:
        GraphQLSchema with additional fields where macro edges can be used.
    """
    # The easiest way to manipulate the schema is through its AST. The easiest
    # way to get an AST is to print it and parse it.
    schema_ast = parse(print_schema(macro_registry.schema_without_macros))

    fields_by_definition_name = {}
    for definition in schema_ast.definitions:
        if isinstance(definition, (ObjectTypeDefinitionNode, InterfaceTypeDefinitionNode)):
            # Cast to list (from FrozenList) to allow for updates.
            fields_by_definition_name[definition.name.value] = list(definition.fields)

    for class_name, macros_for_class in six.iteritems(macro_registry.macro_edges_at_class):
        for macro_edge_name, macro_edge_descriptor in six.iteritems(macros_for_class):
            list_type_at_target = ListTypeNode(
                type=NamedTypeNode(name=NameNode(value=macro_edge_descriptor.target_class_name))
            )
            arguments = []
            directives = [DirectiveNode(name=NameNode(value=MacroEdgeDirective.name))]
            fields_by_definition_name[class_name].append(
                FieldDefinitionNode(
                    name=NameNode(value=macro_edge_name),
                    arguments=arguments,
                    type=list_type_at_target,
                    directives=directives,
                )
            )

    new_definitions = []
    for definition in schema_ast.definitions:
        # Create new (Object)/(Interface)TypeDefinitionNode based on the updated fields.
        if isinstance(definition, ObjectTypeDefinitionNode):
            new_definitions.append(
                ObjectTypeDefinitionNode(
                    interfaces=definition.interfaces,
                    description=definition.description,
                    name=definition.name,
                    directives=definition.directives,
                    loc=definition.loc,
                    fields=FrozenList(fields_by_definition_name[definition.name.value]),
                )
            )
        elif isinstance(definition, InterfaceTypeDefinitionNode):
            new_definitions.append(
                InterfaceTypeDefinitionNode(
                    description=definition.description,
                    name=definition.name,
                    directives=definition.directives,
                    loc=definition.loc,
                    fields=FrozenList(fields_by_definition_name[definition.name.value]),
                )
            )
        else:
            new_definitions.append(definition)

    new_schema_ast = DocumentNode(definitions=new_definitions)
    return build_ast_schema(new_schema_ast)
Exemple #10
0
document = DocumentNode(definitions=[
    ObjectTypeDefinitionNode(
        name=NameNode(value="Query"),
        fields=[
            FieldDefinitionNode(
                name=NameNode(value="me"),
                type=NamedTypeNode(name=NameNode(value="User")),
                arguments=[],
                directives=[],
            )
        ],
        directives=[],
        interfaces=[],
    ),
    ObjectTypeDefinitionNode(
        name=NameNode(value="User"),
        fields=[
            FieldDefinitionNode(
                name=NameNode(value="id"),
                type=NamedTypeNode(name=NameNode(value="ID")),
                arguments=[],
                directives=[],
            ),
            FieldDefinitionNode(
                name=NameNode(value="name"),
                type=NamedTypeNode(name=NameNode(value="String")),
                arguments=[],
                directives=[],
            ),
        ],
        directives=[],
        interfaces=[],
    ),
])
def _split_query_one_level(
    query_node: SubQueryNode,
    merged_schema_descriptor: MergedSchemaDescriptor,
    edge_to_stitch_fields: Dict[Tuple[str, str], Tuple[str, str]],
    name_assigner: IntermediateOutNameAssigner,
) -> None:
    """Split the query node, creating children out of all branches across cross schema edges.

    The input query_node will be modified. Its query_ast will be replaced by a new AST with
    branches leading out of cross schema edges removed, and new property fields and @output
    directives added as necessary. Its child_query_connections will be modified by tacking
    on SubQueryNodes created from these cut-off branches.

    Args:
        query_node: Query to be split into its child components. Its query_ast
                    will be replaced (but the original AST will not be modified) and its
                    child_query_connections will be modified.
        merged_schema_descriptor: The schema that the query AST contained in the input
                                  query_node targets.
        edge_to_stitch_fields: Mapping (type name, vertex field name) to
                               (source field name, sink field name) used in the @stitch directive
                               for each cross schema edge.
        name_assigner: Object used to generate and keep track of names of newly created
                       @output directive.

    Raises:
        - GraphQLValidationError if the query AST contained in the input query_node is invalid,
          for example, having an @output directive on a cross schema edge
        - SchemaStructureError if the merged_schema_descriptor provided appears to be invalid
          or inconsistent
    """
    type_info = TypeInfo(merged_schema_descriptor.schema)

    operation_definition = get_only_query_definition(query_node.query_ast,
                                                     GraphQLValidationError)
    if not isinstance(operation_definition, OperationDefinitionNode):
        raise AssertionError(
            f"Expected operation_definition to be an OperationDefinitionNode, but it was of"
            f"type {type(operation_definition)}. This should be impossible.")

    type_info.enter(operation_definition)
    new_operation_definition = _split_query_ast_one_level_recursive(
        query_node, operation_definition, type_info, edge_to_stitch_fields,
        name_assigner)
    type_info.leave(operation_definition)

    if new_operation_definition is not operation_definition:
        query_node.query_ast = DocumentNode(
            definitions=[new_operation_definition])

    # Check resulting AST is valid
    validation_errors = validate(merged_schema_descriptor.schema,
                                 query_node.query_ast)
    if len(validation_errors) > 0:
        raise AssertionError(
            'The resulting split query "{}" is invalid, with the following error messages: {}'
            "".format(query_node.query_ast, validation_errors))

    # Set schema id, check for consistency
    visitor = TypeInfoVisitor(
        type_info,
        SchemaIdSetterVisitor(type_info, query_node,
                              merged_schema_descriptor.type_name_to_schema_id),
    )
    visit(query_node.query_ast, visitor)

    if query_node.schema_id is None:
        raise AssertionError(
            'Unreachable code reached. The schema id of query piece "{}" has not been '
            "determined.".format(query_node.query_ast))
def get_and_validate_macro_edge_info(schema,
                                     subclass_sets,
                                     ast,
                                     macro_edge_args,
                                     type_equivalence_hints=None):
    """Return a MacroEdgeDescriptor for the specified macro edge, after ensuring its validity.

    Args:
        schema: GraphQL schema object, created using the GraphQL library
        subclass_sets: Dict[str, Set[str]] mapping class names to the set of its subclass names.
                       A class in this context means the name of a GraphQLObjectType,
                       GraphQLUnionType or GraphQLInterface.
        ast: GraphQL library AST OperationDefinition object, describing the GraphQL that is defining
             the macro edge.
        macro_edge_args: dict mapping strings to any type, containing any arguments the macro edge
                         requires in order to function.
        type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
                                Used as a workaround for GraphQL's lack of support for
                                inheritance across "types" (i.e. non-interfaces), as well as a
                                workaround for Gremlin's total lack of inheritance-awareness.
                                The key-value pairs in the dict specify that the "key" type
                                is equivalent to the "value" type, i.e. that the GraphQL type or
                                interface in the key is the most-derived common supertype
                                of every GraphQL type in the "value" GraphQL union.
                                Recursive expansion of type equivalence hints is not performed,
                                and only type-level correctness of this argument is enforced.
                                See README.md for more details on everything this parameter does.
                                *****
                                Be very careful with this option, as bad input here will
                                lead to incorrect output queries being generated.
                                *****

    Returns:
        MacroEdgeDescriptor containing the base type name where the macro edge is defined, the name
        of the macro edge, and the macro AST and arguments.
    """
    macro_directives = get_directives_for_ast(ast)

    _validate_query_definition(ast)
    _validate_ast_with_builtin_graphql_validation(
        schema, DocumentNode(definitions=[ast]))
    _validate_that_macro_edge_definition_and_target_directives_appear_once(
        macro_directives)
    _validate_non_required_macro_definition_directives(ast)

    # Guaranteed to only have one macro definition directive,
    # otherwise validation should have failed in the previous steps.
    macro_defn_ast, macro_defn_directive = get_only_element_from_collection(
        macro_directives[MacroEdgeDefinitionDirective.name])

    # Ensure that the macro successfully compiles to IR.
    _, input_metadata, _, _ = ast_to_ir(
        schema,
        _get_minimal_query_ast_from_macro_ast(ast),
        type_equivalence_hints=type_equivalence_hints,
    )
    ensure_arguments_are_provided(input_metadata, macro_edge_args)

    _validate_that_macro_edge_definition_is_only_top_level_field_directive(
        get_only_selection_from_ast(ast, GraphQLInvalidMacroError),
        macro_defn_ast)
    class_name = get_ast_field_name(macro_defn_ast)
    macro_edge_name = get_only_element_from_collection(
        macro_defn_directive.arguments).value.value

    _validate_macro_edge_name_for_class_name(schema, subclass_sets, class_name,
                                             macro_edge_name)

    target_class_name = get_type_at_macro_edge_target(schema,
                                                      macro_defn_ast).name
    _validate_reversed_macro_edge(schema, subclass_sets, target_class_name,
                                  macro_edge_name)

    descriptor = create_descriptor_from_ast_and_args(class_name,
                                                     target_class_name,
                                                     macro_edge_name,
                                                     macro_defn_ast,
                                                     macro_edge_args)

    return descriptor
Exemple #13
0
def _add_cross_schema_edges(
    schema_ast: DocumentNode,
    type_name_to_schema_id: Dict[str, str],
    scalars: Set[str],
    cross_schema_edges: List[CrossSchemaEdgeDescriptor],
    type_equivalence_hints: TypeEquivalenceHintsType,
    query_type: str,
) -> DocumentNode:
    """Add cross-schema edges into the schema AST.

    Each cross-schema edge will be incorporated into the schema by adding vertex fields
    with a @stitch directive to relevant vertex types. The new fields corresponding to the
    added cross-schema edges will have names constructed from the edge name, prefixed with
    "out_" on the edge's outbound side, and "in_" on the edge's inbound side.

    The type of the new field will either be the type of the opposing vertex specified in
    the cross-schema edge, or the equivalent union type of the type of the opposing vertex
    if such a union type is specified by type_equivalence_hints.

    New vertex fields will be added to not only each vertex specified by the cross-schema
    edge, but to all of their subclass vertices as well.

    For examples demonstrating the above behaviors, see tests in test_merge_schemas.py that
    involve subclasses.

    Args:
        schema_ast: representing a schema, satisfying various structural requirements as demanded
                    by `check_ast_schema_is_valid` in utils.py.
        type_name_to_schema_id: mapping type name to the id of the schema that the type is from.
                                Contains all Interface, Object, Union, and Enum types.
        scalars: names of all scalars in the merged_schema so far.
        cross_schema_edges: containing all edges connecting fields in multiple schemas to be added
                            to the merged schema.
        type_equivalence_hints: Used as a workaround for GraphQL's lack of support for
                                inheritance across "types" (i.e. non-interfaces).
                                The key-value pairs in the dict specify that the "key" type
                                is equivalent to the "value" type, i.e. that the GraphQL type or
                                interface in the key is the most-derived common supertype
                                of every GraphQL type in the "value" GraphQL union
        query_type: name of the query type in the merged schema

    Returns:
        schema_ast with added edges from cross_schema_edges

    Raises:
        - SchemaNameConflictError if any cross-schema edge name causes a name conflict with
          existing fields, or with fields created by previous cross-schema edges
        - InvalidCrossSchemaEdgeError if any cross-schema edge lies within one schema, refers
          to nonexistent schemas, types, or fields, refers to Union types, stitches together
          fields that are not of a scalar type, or stitches together fields that are of
          different scalar types
    """
    # Build map of definitions for ease of modification
    type_name_to_definition = {}  # Dict[str, (Interface/Object)TypeDefinition]
    union_type_names = set(
    )  # Set[str], contains names of union types, used for error messages

    for definition in schema_ast.definitions:
        if (isinstance(definition, ObjectTypeDefinitionNode) and
                definition.name.value == query_type):  # query type definition
            continue
        if isinstance(
                definition,
            (
                InterfaceTypeDefinitionNode,
                ObjectTypeDefinitionNode,
            ),
        ):
            type_name_to_definition[definition.name.value] = definition
        elif isinstance(definition, (UnionTypeDefinitionNode, )):
            union_type_names.add(definition.name.value)

    # NOTE: All merge_schemas needs is the dict mapping names to names, not the dict mapping
    # GraphQLObjects to GraphQLObjects. However, elsewhere in the repo, type_equivalence_hints
    # is a map of objects to objects, and thus we use that same input for consistency
    equivalent_type_names = {
        object_type.name: union_type.name
        for object_type, union_type in six.iteritems(type_equivalence_hints)
    }
    subclass_sets = compute_subclass_sets(build_ast_schema(schema_ast),
                                          type_equivalence_hints)

    # Iterate through edges list, incorporate each edge on one or both sides
    for cross_schema_edge in cross_schema_edges:
        _check_cross_schema_edge_is_valid(
            type_name_to_definition,
            type_name_to_schema_id,
            scalars,
            union_type_names,
            cross_schema_edge,
        )

        edge_name = cross_schema_edge.edge_name
        outbound_field_reference = cross_schema_edge.outbound_field_reference
        inbound_field_reference = cross_schema_edge.inbound_field_reference

        # Get name of the type referenced by the edges in either direction
        # This is equal to the sink side's equivalent union type if it has one
        outbound_edge_sink_type_name = equivalent_type_names.get(
            inbound_field_reference.type_name,
            inbound_field_reference.type_name)
        inbound_edge_sink_type_name = equivalent_type_names.get(
            outbound_field_reference.type_name,
            outbound_field_reference.type_name)

        # Get set of all the types that need the new edge field
        outbound_edge_source_type_names = subclass_sets[
            outbound_field_reference.type_name]
        for outbound_edge_source_type_name in outbound_edge_source_type_names:
            source_type_node = type_name_to_definition[
                outbound_edge_source_type_name]
            new_source_type_node = _add_edge_field(
                source_type_node,
                outbound_edge_sink_type_name,
                outbound_field_reference.field_name,
                inbound_field_reference.field_name,
                edge_name,
                OUTBOUND_EDGE_DIRECTION,
            )
            type_name_to_definition[
                outbound_edge_source_type_name] = new_source_type_node

        if not cross_schema_edge.out_edge_only:
            inbound_edge_source_type_names = subclass_sets[
                inbound_field_reference.type_name]
            for inbound_edge_source_type_name in inbound_edge_source_type_names:
                source_type_node = type_name_to_definition[
                    inbound_edge_source_type_name]
                new_source_type_node = _add_edge_field(
                    source_type_node,
                    inbound_edge_sink_type_name,
                    inbound_field_reference.field_name,
                    outbound_field_reference.field_name,
                    edge_name,
                    INBOUND_EDGE_DIRECTION,
                )
                type_name_to_definition[
                    inbound_edge_source_type_name] = new_source_type_node

    new_definitions: List[DefinitionNode] = []
    for definition in schema_ast.definitions:
        if (isinstance(definition, ObjectTypeDefinitionNode) and
                definition.name.value == query_type):  # query type definition
            new_definitions.append(definition)
        elif isinstance(
                definition,
            (
                InterfaceTypeDefinitionNode,
                ObjectTypeDefinitionNode,
            ),
        ):
            new_definitions.append(
                type_name_to_definition[definition.name.value])
        else:
            new_definitions.append(definition)

    return DocumentNode(definitions=FrozenList(new_definitions))
Exemple #14
0
def _accumulate_types(
    merged_schema_ast: DocumentNode,
    merged_query_type_name: str,
    type_name_to_schema_id: Dict[str, str],
    scalars: Set[str],
    directives: Dict[str, DirectiveDefinitionNode],
    current_schema_id: str,
    current_ast: DocumentNode,
) -> Tuple[DocumentNode, Dict[str, str], Set[str], Dict[
        str, DirectiveDefinitionNode]]:
    """Add all types and query type fields of current_ast into merged_schema_ast.

    Args:
        merged_schema_ast: representing the schema into which current_ast will be merged.
        merged_query_type_name: name of the query type in the merged_schema_ast.
        type_name_to_schema_id: mapping type name to the id of the schema that the type is from.
        scalars: names of all scalars in the merged_schema so far.
        directives: mapping directive name to definition.
        current_schema_id: identifier of the schema being merged.
        current_ast: representing the schema being merged into merged_schema_ast.

    Returns:
        tuple (new_merged_schema_ast, type_name_to_schema_id, scalars, directives) with the
        following information:
            new_merged_schema_ast: updated version of merged_schema_ast with current_ast
                                   incorporated.
            type_name_to_schema_id: updated version of type_name_to_schema_id input.
            scalars: potentially updated version of scalars input.
            directives: potentially updated version of directives input.

    Raises:
        - ValueError if the schema identifier is not a nonempty string of alphanumeric
          characters and underscores
        - SchemaStructureError if the schema does not have the expected form; in particular, if
          the AST does not represent a valid schema, if any query type field does not have the
          same name as the type that it queries, if the schema contains type extensions or
          input object definitions, or if the schema contains mutations or subscriptions
        - SchemaNameConflictError if there are conflicts between the names of
          types/interfaces/enums/scalars, or conflicts between the definition of directives
          with the same name
    """
    # Check input schema identifier is a string of alphanumeric characters and underscores
    check_schema_identifier_is_valid(current_schema_id)
    # Check input schema satisfies various structural requirements
    check_ast_schema_is_valid(current_ast)

    current_schema = build_ast_schema(current_ast)
    current_query_type = get_query_type_name(current_schema)

    # Merge current_ast into merged_schema_ast.
    # Concatenate new scalars, new directives, and type definitions other than the query
    # type to definitions list.
    # Raise errors for conflicting scalars, directives, or types.
    new_definitions = list(current_ast.definitions)  # List[Node]
    new_query_type_fields = None  # List[FieldDefinition]

    for new_definition in new_definitions:
        if isinstance(new_definition, SchemaDefinitionNode):
            continue
        elif (isinstance(new_definition, ObjectTypeDefinitionNode)
              and new_definition.name.value
              == current_query_type):  # query type definition
            new_query_type_fields = new_definition.fields  # List[FieldDefinitionNode]
        elif isinstance(new_definition, DirectiveDefinitionNode):
            directives, merged_schema_ast = _process_directive_definition(
                new_definition, directives, merged_schema_ast)
        elif isinstance(new_definition, ScalarTypeDefinitionNode):
            scalars, merged_schema_ast = _process_scalar_definition(
                new_definition, scalars, type_name_to_schema_id,
                merged_schema_ast)
        elif isinstance(
                new_definition,
            (
                EnumTypeDefinitionNode,
                InterfaceTypeDefinitionNode,
                ObjectTypeDefinitionNode,
                UnionTypeDefinitionNode,
            ),
        ):
            type_name_to_schema_id, merged_schema_ast = _process_generic_type_definition(
                new_definition,
                current_schema_id,
                scalars,
                type_name_to_schema_id,
                merged_schema_ast,
            )
        else:  # All definition types should've been covered
            raise AssertionError(
                "Unreachable code reached. Missed definition type: "
                '"{}"'.format(type(new_definition).__name__))

    # Concatenate all query type fields.
    # Since query_type was taken from the schema built from the input AST, the query type
    # should never be not found.
    if new_query_type_fields is None:
        raise AssertionError(
            'Unreachable code reached. Query type "{}" field definitions '
            "unexpectedly not found.".format(current_query_type))

    # Note that as field names and type names have been confirmed to match up, and types
    # were merged without name conflicts, query type fields can also be safely merged.
    #
    # Query type is the second entry in the list of definitions of the merged_schema_ast,
    # as guaranteed by _get_basic_schema_ast()
    query_type_index = 1
    new_definitions = list(merged_schema_ast.definitions)
    merged_query_type_definition = new_definitions[query_type_index]
    if not isinstance(merged_query_type_definition, ObjectTypeDefinitionNode):
        raise AssertionError(
            "Unreachable code reached. The second definition in the schema is unexpectedly "
            'not an ObjectTypeDefinitionNode, but is instead "{}".'.format(
                type(merged_query_type_definition)))
    if merged_query_type_definition.name.value != merged_query_type_name:
        raise AssertionError(
            "Unreachable code reached. The second definition in the schema is unexpectedly "
            'not the query type "{}", but is instead "{}".'.format(
                merged_query_type_name,
                merged_query_type_definition.name.value))
    new_fields = list(merged_query_type_definition.fields)
    new_fields.extend(new_query_type_fields)
    new_merged_query_type_definition = ObjectTypeDefinitionNode(
        name=merged_query_type_definition.name,
        interfaces=merged_query_type_definition.interfaces,
        fields=new_fields,
        directives=merged_query_type_definition.directives,
    )
    new_definitions[query_type_index] = new_merged_query_type_definition
    new_merged_schema_ast = DocumentNode(definitions=new_definitions)
    return new_merged_schema_ast, type_name_to_schema_id, scalars, directives