コード例 #1
0
    def test_edgeql_ir_pathid_namespace_01(self):
        User = self.schema.get('default::User')
        deck_ptr = User.getptr(self.schema, s_name.UnqualName('deck'))
        deck_ptr_ref = irtyputils.ptrref_from_ptrcls(
            schema=self.schema,
            ptrcls=deck_ptr,
        )
        count_prop = deck_ptr.getptr(self.schema, s_name.UnqualName('count'))
        count_prop_ref = irtyputils.ptrref_from_ptrcls(
            schema=self.schema,
            ptrcls=count_prop,
        )

        ns = frozenset(('foo', ))
        pid_1 = pathid.PathId.from_type(self.schema, User, namespace=ns)
        pid_2 = pid_1.extend(ptrref=deck_ptr_ref)
        ptr_pid = pid_2.ptr_path()
        prop_pid = ptr_pid.extend(ptrref=count_prop_ref)

        self.assertEqual(pid_1.namespace, ns)
        self.assertEqual(pid_2.namespace, ns)
        self.assertEqual(ptr_pid.namespace, ns)
        self.assertEqual(prop_pid.namespace, ns)

        pid_1_no_ns = pathid.PathId.from_type(self.schema, User)
        self.assertNotEqual(pid_1, pid_1_no_ns)
コード例 #2
0
    def test_edgeql_ir_pathid_startswith(self):
        User = self.schema.get('default::User')
        deck_ptr = User.getptr(self.schema, s_name.UnqualName('deck'))
        deck_ptr_ref = irtyputils.ptrref_from_ptrcls(
            schema=self.schema,
            ptrcls=deck_ptr,
        )
        count_prop = deck_ptr.getptr(self.schema, s_name.UnqualName('count'))
        count_prop_ref = irtyputils.ptrref_from_ptrcls(
            schema=self.schema,
            ptrcls=count_prop,
        )

        pid_1 = pathid.PathId.from_type(self.schema, User)
        pid_2 = pid_1.extend(ptrref=deck_ptr_ref)
        ptr_pid = pid_2.ptr_path()
        prop_pid = ptr_pid.extend(ptrref=count_prop_ref)

        self.assertTrue(pid_2.startswith(pid_1))
        self.assertFalse(pid_1.startswith(pid_2))

        self.assertTrue(ptr_pid.startswith(pid_1))
        self.assertTrue(prop_pid.startswith(pid_1))

        self.assertFalse(ptr_pid.startswith(pid_2))
        self.assertFalse(prop_pid.startswith(pid_2))

        self.assertTrue(prop_pid.startswith(ptr_pid))
コード例 #3
0
    def test_schema_on_target_delete_02(self):
        schema = self.load_schema("""
            type Object {
                link foo -> Object {
                    on target delete allow
                }
            };

            type Object2 extending Object {
                overloaded link foo -> Object {
                    annotation title := "Foo"
                }
            };

            type Object3 extending Object {
                overloaded link foo -> Object {
                    on target delete restrict
                }
            };
        """)

        obj2 = schema.get('test::Object2')
        self.assertEqual(
            obj2.getptr(schema,
                        s_name.UnqualName('foo')).get_on_target_delete(schema),
            s_links.LinkTargetDeleteAction.Allow)

        obj3 = schema.get('test::Object3')
        self.assertEqual(
            obj3.getptr(schema,
                        s_name.UnqualName('foo')).get_on_target_delete(schema),
            s_links.LinkTargetDeleteAction.Restrict)
コード例 #4
0
    def test_edgeql_ir_pathid_basic(self):
        User = self.schema.get('default::User')
        deck_ptr = User.getptr(self.schema, s_name.UnqualName('deck'))
        count_prop = deck_ptr.getptr(self.schema, s_name.UnqualName('count'))

        pid_1 = pathid.PathId.from_type(self.schema, User)
        self.assertEqual(str(pid_1), '(default::User)')

        self.assertTrue(pid_1.is_objtype_path())
        self.assertFalse(pid_1.is_scalar_path())

        self.assertIsNone(pid_1.rptr())
        self.assertIsNone(pid_1.rptr_dir())
        self.assertIsNone(pid_1.rptr_name())
        self.assertIsNone(pid_1.src_path())

        deck_ptr_ref = irtyputils.ptrref_from_ptrcls(
            schema=self.schema,
            ptrcls=deck_ptr,
        )
        pid_2 = pid_1.extend(ptrref=deck_ptr_ref)
        self.assertEqual(str(pid_2), '(default::User).>deck[IS default::Card]')

        self.assertEqual(pid_2.rptr().name, deck_ptr.get_name(self.schema))
        self.assertEqual(pid_2.rptr_dir(),
                         s_pointers.PointerDirection.Outbound)
        self.assertEqual(pid_2.rptr_name().name, 'deck')
        self.assertEqual(pid_2.src_path(), pid_1)

        ptr_pid = pid_2.ptr_path()
        self.assertEqual(str(ptr_pid),
                         '(default::User).>deck[IS default::Card]@')

        self.assertTrue(ptr_pid.is_ptr_path())
        self.assertFalse(ptr_pid.is_objtype_path())
        self.assertFalse(ptr_pid.is_scalar_path())

        self.assertEqual(ptr_pid.tgt_path(), pid_2)

        count_prop_ref = irtyputils.ptrref_from_ptrcls(
            schema=self.schema,
            ptrcls=count_prop,
        )
        prop_pid = ptr_pid.extend(ptrref=count_prop_ref)
        self.assertEqual(
            str(prop_pid),
            '(default::User).>deck[IS default::Card]@count[IS std::int64]')

        self.assertFalse(prop_pid.is_ptr_path())
        self.assertFalse(prop_pid.is_objtype_path())
        self.assertTrue(prop_pid.is_scalar_path())
        self.assertTrue(prop_pid.is_linkprop_path())
        self.assertEqual(prop_pid.src_path(), ptr_pid)
コード例 #5
0
    def test_edgeql_ir_pathid_namespace_02(self):
        # Test cases where the prefix is in a different namespace

        Card = self.schema.get('test::Card')
        User = self.schema.get('test::User')
        owners_ptr = Card.getptr(self.schema, s_name.UnqualName('owners'))
        owners_ptr_ref = irtyputils.ptrref_from_ptrcls(
            schema=self.schema,
            ptrcls=owners_ptr,
        )
        deck_ptr = User.getptr(self.schema, s_name.UnqualName('deck'))
        deck_ptr_ref = irtyputils.ptrref_from_ptrcls(
            schema=self.schema,
            ptrcls=deck_ptr,
        )
        count_prop = deck_ptr.getptr(self.schema, s_name.UnqualName('count'))
        count_prop_ref = irtyputils.ptrref_from_ptrcls(
            schema=self.schema,
            ptrcls=count_prop,
        )

        ns_1 = frozenset(('foo', ))
        ns_2 = frozenset(('bar', ))

        pid_1 = pathid.PathId.from_type(self.schema, Card)
        pid_2 = pid_1.extend(ptrref=owners_ptr_ref,
                             ns=ns_1,
                             schema=self.schema)
        pid_2_no_ns = pid_1.extend(ptrref=owners_ptr_ref, schema=self.schema)

        self.assertNotEqual(pid_2, pid_2_no_ns)
        self.assertEqual(pid_2.src_path(), pid_1)

        pid_3 = pid_2.extend(ptrref=deck_ptr_ref, ns=ns_2, schema=self.schema)
        ptr_pid = pid_3.ptr_path()
        prop_pid = ptr_pid.extend(ptrref=count_prop_ref, schema=self.schema)

        self.assertEqual(prop_pid.src_path().namespace, ns_1 | ns_2)
        self.assertEqual(prop_pid.src_path().src_path().namespace, ns_1)
        self.assertFalse(prop_pid.src_path().src_path().src_path().namespace)

        prefixes = [str(p) for p in pid_3.iter_prefixes()]

        self.assertEqual(prefixes, [
            '(test::Card)',
            'foo@@(test::Card).>owners[IS test::User]',
            'bar@foo@@(test::Card).>owners[IS test::User]'
            '.>deck[IS test::Card]',
        ])
コード例 #6
0
ファイル: stmt.py プロジェクト: signupsi/edgedb
def process_with_block(
        edgeql_tree: qlast.Statement, *,
        ctx: context.ContextLevel,
        parent_ctx: context.ContextLevel) -> List[irast.Set]:
    results = []
    for with_entry in edgeql_tree.aliases:
        if isinstance(with_entry, qlast.ModuleAliasDecl):
            ctx.modaliases[with_entry.alias] = with_entry.module

        elif isinstance(with_entry, qlast.AliasedExpr):
            with ctx.new() as scopectx:
                scopectx.expr_exposed = False
                results.append(
                    stmtctx.declare_view(
                        with_entry.expr,
                        s_name.UnqualName(with_entry.alias),
                        must_be_used=True,
                        ctx=scopectx,
                    ),
                )

        else:
            raise RuntimeError(
                f'unexpected expression in WITH block: {with_entry}')

    return results
コード例 #7
0
ファイル: stmt.py プロジェクト: signupsi/edgedb
def compile_insert_unless_conflict_select(
    stmt: irast.InsertStmt,
    insert_subject: qlast.Path,
    subject_typ: s_objtypes.ObjectType,
    *,
    obj_constrs: Sequence[s_constr.Constraint],
    constrs: Dict[str, Tuple[s_pointers.Pointer, List[s_constr.Constraint]]],
    parser_context: pctx.ParserContext,
    ctx: context.ContextLevel,
) -> irast.Set:
    """Synthesize a select of conflicting objects for UNLESS CONFLICT

    `cnstrs` contains the constraints to consider.
    """
    # Find which pointers we need to grab
    needed_ptrs = set(constrs)
    for constr in obj_constrs:
        subjexpr = constr.get_subjectexpr(ctx.env.schema)
        assert subjexpr
        needed_ptrs |= qlutils.find_subject_ptrs(subjexpr.qlast)

    wl = list(needed_ptrs)
    ptr_anchors = {}
    while wl:
        p = wl.pop()
        ptr = subject_typ.getptr(ctx.env.schema, s_name.UnqualName(p))
        if expr := ptr.get_expr(ctx.env.schema):
            assert isinstance(expr.qlast, qlast.Expr)
            ptr_anchors[p] = expr.qlast
            for ref in qlutils.find_subject_ptrs(expr.qlast):
                if ref not in needed_ptrs:
                    wl.append(ref)
                    needed_ptrs.add(ref)
コード例 #8
0
def extract_filters(
    result_set: irast.Set,
    filter_set: irast.Set,
    scope_tree: irast.ScopeTreeNode,
    ctx: inference_context.InfCtx,
) -> Sequence[Tuple[Sequence[s_pointers.Pointer], irast.Set]]:

    env = ctx.env
    schema = env.schema
    scope_tree = inf_utils.get_set_scope(filter_set, scope_tree, ctx=ctx)

    expr = filter_set.expr
    if isinstance(expr, irast.OperatorCall):
        if str(expr.func_shortname) == 'std::=':
            left, right = (a.expr for a in expr.args)

            op_card = _common_cardinality([left, right],
                                          scope_tree=scope_tree,
                                          ctx=ctx)
            result_stype = env.set_types[result_set]

            if op_card.is_multi():
                pass

            elif ((left_matches := _is_ptr_or_self_ref(left, result_set, env))
                  or _is_ptr_or_self_ref(right, result_set, env)):
                # If the match was on the right, flip the args
                if not left_matches:
                    left, right = right, left

                if infer_cardinality(
                        right,
                        scope_tree=scope_tree,
                        ctx=ctx,
                ).is_single():
                    ptrs = []
                    left_stype = env.set_types[left]
                    if left_stype == result_stype:
                        assert isinstance(left_stype, s_objtypes.ObjectType)
                        _ptr = left_stype.getptr(schema, sn.UnqualName('id'))
                        ptrs.append(_ptr)
                    else:
                        while left.path_id != result_set.path_id:
                            assert left.rptr is not None
                            _ptr = env.schema.get(left.rptr.ptrref.name,
                                                  type=s_pointers.Pointer)
                            ptrs.append(_ptr)
                            left = left.rptr.source
                        ptrs.reverse()

                    return [(ptrs, right)]

        elif str(expr.func_shortname) == 'std::AND':
            left, right = (a.expr for a in expr.args)

            left_filters = extract_filters(result_set, left, scope_tree, ctx)
            right_filters = extract_filters(result_set, right, scope_tree, ctx)

            return [*left_filters, *right_filters]
コード例 #9
0
ファイル: stmtctx.py プロジェクト: stjordanis/edgedb
def init_context(
    *,
    schema: s_schema.Schema,
    options: coptions.CompilerOptions,
) -> context.ContextLevel:

    if not schema.get_global(s_mod.Module, '__derived__', None):
        schema, _ = s_mod.Module.create_in_schema(
            schema,
            name=s_name.UnqualName('__derived__'),
        )

    env = context.Environment(
        schema=schema,
        options=options,
        alias_result_view_name=options.result_view_name,
    )
    ctx = context.ContextLevel(None, context.ContextSwitchMode.NEW, env=env)
    _ = context.CompilerContext(initial=ctx)

    if options.singletons:
        # The caller wants us to treat these type and pointer
        # references as singletons for the purposes of the overall
        # expression cardinality inference, so we set up the scope
        # tree in the necessary fashion.
        for singleton in options.singletons:
            path_id = compile_anchor('__', singleton, ctx=ctx).path_id
            ctx.env.path_scope.attach_path(path_id, context=None)
            ctx.env.singletons.append(path_id)

    ctx.modaliases.update(options.modaliases)

    if options.anchors:
        with ctx.newscope(fenced=True) as subctx:
            populate_anchors(options.anchors, ctx=subctx)

    if options.path_prefix_anchor is not None:
        path_prefix = options.anchors[options.path_prefix_anchor]
        ctx.partial_path_prefix = compile_anchor(options.path_prefix_anchor,
                                                 path_prefix,
                                                 ctx=ctx)
        ctx.partial_path_prefix.anchor = options.path_prefix_anchor
        ctx.partial_path_prefix.show_as_anchor = options.path_prefix_anchor

    ctx.derived_target_module = options.derived_target_module
    ctx.toplevel_result_view_name = options.result_view_name
    ctx.implicit_id_in_shapes = options.implicit_id_in_shapes
    ctx.implicit_tid_in_shapes = options.implicit_tid_in_shapes
    ctx.implicit_tname_in_shapes = options.implicit_tname_in_shapes
    ctx.implicit_limit = options.implicit_limit
    ctx.expr_exposed = context.Exposure.EXPOSED

    return ctx
コード例 #10
0
    def test_schema_on_target_delete_01(self):
        schema = self.load_schema("""
            type Object {
                link foo -> Object {
                    on target delete allow
                };

                link bar -> Object;
            };
        """)

        obj = schema.get('test::Object')

        self.assertEqual(
            obj.getptr(schema,
                       s_name.UnqualName('foo')).get_on_target_delete(schema),
            s_links.LinkTargetDeleteAction.Allow)

        self.assertEqual(
            obj.getptr(schema,
                       s_name.UnqualName('bar')).get_on_target_delete(schema),
            s_links.LinkTargetDeleteAction.Restrict)
コード例 #11
0
    def get_params(
        self,
        schema: s_schema.Schema,
    ) -> s_func.ParameterLikeList:
        from_type_param = s_func.ParameterDesc(
            num=0,
            name=sn.UnqualName('val'),
            type=self._cast.get_from_type(schema).as_shell(schema),
            typemod=ft.TypeModifier.SingletonType,
            kind=ft.ParameterKind.PositionalParam,
            default=None,
        )

        to_type_param = s_func.ParameterDesc(
            num=0,
            name=sn.UnqualName('_'),
            type=self._cast.get_to_type(schema).as_shell(schema),
            typemod=ft.TypeModifier.SingletonType,
            kind=ft.ParameterKind.PositionalParam,
            default=None,
        )

        return CastParamListWrapper((from_type_param, to_type_param))
コード例 #12
0
ファイル: conflicts.py プロジェクト: stjordanis/edgedb
def _get_needed_ptrs(
    subject_typ: s_objtypes.ObjectType,
    obj_constrs: Sequence[s_constr.Constraint],
    initial_ptrs: Iterable[str],
    ctx: context.ContextLevel,
) -> Tuple[Set[str], Dict[str, qlast.Expr]]:
    needed_ptrs = set(initial_ptrs)
    for constr in obj_constrs:
        subjexpr = constr.get_subjectexpr(ctx.env.schema)
        assert subjexpr
        needed_ptrs |= qlutils.find_subject_ptrs(subjexpr.qlast)

    wl = list(needed_ptrs)
    ptr_anchors = {}
    while wl:
        p = wl.pop()
        ptr = subject_typ.getptr(ctx.env.schema, s_name.UnqualName(p))
        if expr := ptr.get_expr(ctx.env.schema):
            assert isinstance(expr.qlast, qlast.Expr)
            ptr_anchors[p] = expr.qlast
            for ref in qlutils.find_subject_ptrs(expr.qlast):
                if ref not in needed_ptrs:
                    wl.append(ref)
                    needed_ptrs.add(ref)
コード例 #13
0
def extract_filters(
    result_set: irast.Set,
    filter_set: irast.Set,
    scope_tree: irast.ScopeTreeNode,
    ctx: inference_context.InfCtx,
    *,
    # When strict=False, ignore any clauses in the filter_set we don't
    # care about. If True, return None if any exist.
    strict: bool = False,
) -> Optional[Sequence[Tuple[s_pointers.Pointer, irast.Set]]]:

    env = ctx.env
    schema = env.schema
    scope_tree = _get_set_scope(filter_set, scope_tree)

    ptr: s_pointers.Pointer

    expr = filter_set.expr
    if isinstance(expr, irast.OperatorCall):
        if str(expr.func_shortname) == 'std::=':
            left, right = (a.expr for a in expr.args)

            op_card = _common_cardinality([left, right],
                                          scope_tree=scope_tree,
                                          ctx=ctx)
            result_stype = env.set_types[result_set]

            if op_card.is_multi():
                pass

            elif _is_ptr_or_self_ref(left, result_set, env):
                if infer_cardinality(
                        right,
                        scope_tree=scope_tree,
                        ctx=ctx,
                ).is_single():
                    left_stype = env.set_types[left]
                    if left_stype == result_stype:
                        assert isinstance(left_stype, s_objtypes.ObjectType)
                        _ptr = left_stype.getptr(schema, sn.UnqualName('id'))
                    else:
                        assert left.rptr is not None
                        _ptr = env.schema.get(left.rptr.ptrref.name,
                                              type=s_pointers.Pointer)

                    assert _ptr is not None
                    ptr = _ptr

                    return [(ptr, right)]

            elif _is_ptr_or_self_ref(right, result_set, env):
                if infer_cardinality(
                        left,
                        scope_tree=scope_tree,
                        ctx=ctx,
                ).is_single():
                    right_stype = env.set_types[right]
                    if right_stype == result_stype:
                        assert isinstance(right_stype, s_objtypes.ObjectType)
                        _ptr = right_stype.getptr(schema, sn.UnqualName('id'))
                    else:
                        assert right.rptr is not None
                        _ptr = env.schema.get(right.rptr.ptrref.name,
                                              type=s_pointers.Pointer)

                    assert _ptr is not None
                    ptr = _ptr

                    return [(ptr, right)]

        elif str(expr.func_shortname) == 'std::AND':
            left, right = (a.expr for a in expr.args)

            left_filters = extract_filters(result_set, left, scope_tree, ctx)
            right_filters = extract_filters(result_set, right, scope_tree, ctx)

            ptr_filters: List[Tuple[s_pointers.Pointer, irast.Set]] = []
            if left_filters is not None:
                ptr_filters.extend(left_filters)
            elif strict:
                return None
            if right_filters is not None:
                ptr_filters.extend(right_filters)
            elif strict:
                return None

            return ptr_filters

    return None
コード例 #14
0
def resolve_ptr(
    near_endpoint: s_obj.Object,
    pointer_name: str,
    *,
    upcoming_intersections: Sequence[s_types.Type] = (),
    far_endpoints: Iterable[s_obj.Object] = (),
    direction: s_pointers.PointerDirection = (
        s_pointers.PointerDirection.Outbound),
    source_context: Optional[parsing.ParserContext] = None,
    track_ref: Optional[Union[qlast.Base, Literal[False]]],
    ctx: context.ContextLevel,
) -> s_pointers.Pointer:

    if not isinstance(near_endpoint, s_sources.Source):
        # Reference to a property on non-object
        msg = 'invalid property reference on a primitive type expression'
        raise errors.InvalidReferenceError(msg, context=source_context)

    ptr: Optional[s_pointers.Pointer] = None

    if direction is s_pointers.PointerDirection.Outbound:
        ptr = near_endpoint.maybe_get_ptr(
            ctx.env.schema,
            s_name.UnqualName(pointer_name),
        )

        if ptr is not None:
            ref = ptr.get_nearest_non_derived_parent(ctx.env.schema)
            if track_ref is not False:
                ctx.env.add_schema_ref(ref, track_ref)

    else:
        ptrs = near_endpoint.getrptrs(ctx.env.schema,
                                      pointer_name,
                                      sources=far_endpoints)
        if ptrs:
            if track_ref is not False:
                # If this reverse pointer access is followed by
                # intersections, we filter out any pointers that
                # couldn't be picked up by the intersections. This avoids
                # creating spurious dependencies when reverse
                # links are used in schemas.
                dep_ptrs = {
                    ptr
                    for ptr in ptrs
                    if (src := ptr.get_source(ctx.env.schema)) and all(
                        src.issubclass(ctx.env.schema, typ) or any(
                            dsrc.issubclass(ctx.env.schema, typ)
                            for dsrc in src.descendants(ctx.env.schema))
                        for typ in upcoming_intersections)
                }

                for p in dep_ptrs:
                    ctx.env.add_schema_ref(
                        p.get_nearest_non_derived_parent(ctx.env.schema),
                        track_ref)

            opaque = not far_endpoints
            ctx.env.schema, ptr = s_pointers.get_or_create_union_pointer(
                ctx.env.schema,
                ptrname=s_name.UnqualName(pointer_name),
                source=near_endpoint,
                direction=direction,
                components=ptrs,
                opaque=opaque,
                modname=ctx.derived_target_module,
            )

    if ptr is not None:
        return ptr

    if isinstance(near_endpoint, s_links.Link):
        vname = near_endpoint.get_verbosename(ctx.env.schema, with_parent=True)
        msg = f'{vname} has no property {pointer_name!r}'

    elif direction == s_pointers.PointerDirection.Outbound:
        msg = (f'{near_endpoint.get_verbosename(ctx.env.schema)} '
               f'has no link or property {pointer_name!r}')

    else:
        nep_name = near_endpoint.get_displayname(ctx.env.schema)
        path = f'{nep_name}.{direction}{pointer_name}'
        msg = f'{path!r} does not resolve to any known path'

    err = errors.InvalidReferenceError(msg, context=source_context)

    if direction is s_pointers.PointerDirection.Outbound:
        near_enpoint_pointers = near_endpoint.get_pointers(ctx.env.schema)
        s_utils.enrich_schema_lookup_error(
            err,
            s_name.UnqualName(pointer_name),
            modaliases=ctx.modaliases,
            item_type=s_pointers.Pointer,
            collection=near_enpoint_pointers.objects(ctx.env.schema),
            schema=ctx.env.schema,
        )

    raise err
コード例 #15
0
ファイル: viewgen.py プロジェクト: sbdchd/edgedb
def _process_view(
    *,
    stype: s_objtypes.ObjectType,
    path_id: irast.PathId,
    path_id_namespace: Optional[irast.WeakNamespace] = None,
    elements: List[qlast.ShapeElement],
    view_rptr: Optional[context.ViewRPtr] = None,
    view_name: Optional[sn.QualName] = None,
    is_insert: bool = False,
    is_update: bool = False,
    is_delete: bool = False,
    parser_context: pctx.ParserContext,
    ctx: context.ContextLevel,
) -> s_objtypes.ObjectType:

    if (view_name is None and ctx.env.options.schema_view_mode
            and view_rptr is not None):
        # Make sure persistent schema expression aliases have properly formed
        # names as opposed to the usual mangled form of the ephemeral
        # aliases.  This is needed for introspection readability, as well
        # as helps in maintaining proper type names for schema
        # representations that require alphanumeric names, such as
        # GraphQL.
        #
        # We use the name of the source together with the name
        # of the inbound link to form the name, so in e.g.
        #    CREATE ALIAS V := (SELECT Foo { bar: { baz: { ... } })
        # The name of the innermost alias would be "__V__bar__baz".
        source_name = view_rptr.source.get_name(ctx.env.schema).name
        if not source_name.startswith('__'):
            source_name = f'__{source_name}'
        if view_rptr.ptrcls_name is not None:
            ptr_name = view_rptr.ptrcls_name.name
        elif view_rptr.ptrcls is not None:
            ptr_name = view_rptr.ptrcls.get_shortname(ctx.env.schema).name
        else:
            raise errors.InternalServerError(
                '_process_view in schema mode received view_rptr with '
                'neither ptrcls_name, not ptrcls'
            )

        name = f'{source_name}__{ptr_name}'
        view_name = sn.QualName(
            module=ctx.derived_target_module or '__derived__',
            name=name,
        )

    view_scls = schemactx.derive_view(
        stype,
        is_insert=is_insert,
        is_update=is_update,
        is_delete=is_delete,
        derived_name=view_name,
        ctx=ctx,
    )
    assert isinstance(view_scls, s_objtypes.ObjectType), view_scls
    is_mutation = is_insert or is_update
    is_defining_shape = ctx.expr_exposed or is_mutation

    if view_rptr is not None and view_rptr.ptrcls is None:
        derive_ptrcls(
            view_rptr, target_scls=view_scls,
            transparent=True, ctx=ctx)

    pointers = []

    for shape_el in elements:
        with ctx.newscope(fenced=True) as scopectx:
            pointer = _normalize_view_ptr_expr(
                shape_el, view_scls, path_id=path_id,
                path_id_namespace=path_id_namespace,
                is_insert=is_insert, is_update=is_update,
                view_rptr=view_rptr,
                ctx=scopectx)

            if pointer in pointers:
                schema = ctx.env.schema
                vnp = pointer.get_verbosename(schema, with_parent=True)

                raise errors.QueryError(
                    f'duplicate definition of {vnp}',
                    context=shape_el.context)

            pointers.append(pointer)

    if is_insert:
        explicit_ptrs = {
            ptrcls.get_local_name(ctx.env.schema)
            for ptrcls in pointers
        }
        scls_pointers = stype.get_pointers(ctx.env.schema)
        for pn, ptrcls in scls_pointers.items(ctx.env.schema):
            if (pn in explicit_ptrs or
                    ptrcls.is_pure_computable(ctx.env.schema)):
                continue

            default_expr = ptrcls.get_default(ctx.env.schema)
            if not default_expr:
                if (
                    ptrcls.get_required(ctx.env.schema)
                    and pn != sn.UnqualName('__type__')
                ):
                    if ptrcls.is_property(ctx.env.schema):
                        # If the target is a sequence, there's no need
                        # for an explicit value.
                        ptrcls_target = ptrcls.get_target(ctx.env.schema)
                        assert ptrcls_target is not None
                        if ptrcls_target.issubclass(
                                ctx.env.schema,
                                ctx.env.schema.get(
                                    'std::sequence',
                                    type=s_objects.SubclassableObject)):
                            continue
                    vn = ptrcls.get_verbosename(
                        ctx.env.schema, with_parent=True)
                    raise errors.MissingRequiredError(
                        f'missing value for required {vn}')
                else:
                    continue

            ptrcls_sn = ptrcls.get_shortname(ctx.env.schema)
            default_ql = qlast.ShapeElement(
                expr=qlast.Path(
                    steps=[
                        qlast.Ptr(
                            ptr=qlast.ObjectRef(
                                name=ptrcls_sn.name,
                                module=ptrcls_sn.module,
                            ),
                        ),
                    ],
                ),
                compexpr=qlast.DetachedExpr(
                    expr=default_expr.qlast,
                ),
            )

            with ctx.newscope(fenced=True) as scopectx:
                pointers.append(
                    _normalize_view_ptr_expr(
                        default_ql,
                        view_scls,
                        path_id=path_id,
                        path_id_namespace=path_id_namespace,
                        is_insert=is_insert,
                        is_update=is_update,
                        from_default=True,
                        view_rptr=view_rptr,
                        ctx=scopectx,
                    ),
                )

    elif (
        stype.get_name(ctx.env.schema).module == 'schema'
        and ctx.env.options.apply_query_rewrites
    ):
        explicit_ptrs = {
            ptrcls.get_local_name(ctx.env.schema)
            for ptrcls in pointers
        }
        scls_pointers = stype.get_pointers(ctx.env.schema)
        for pn, ptrcls in scls_pointers.items(ctx.env.schema):
            if (
                pn in explicit_ptrs
                or ptrcls.is_pure_computable(ctx.env.schema)
            ):
                continue

            schema_deflt = ptrcls.get_schema_reflection_default(ctx.env.schema)
            if schema_deflt is None:
                continue

            with ctx.newscope(fenced=True) as scopectx:
                ptr_ref = s_utils.name_to_ast_ref(pn)
                implicit_ql = qlast.ShapeElement(
                    expr=qlast.Path(steps=[qlast.Ptr(ptr=ptr_ref)]),
                    compexpr=qlast.BinOp(
                        left=qlast.Path(
                            partial=True,
                            steps=[
                                qlast.Ptr(
                                    ptr=ptr_ref,
                                    direction=(
                                        s_pointers.PointerDirection.Outbound
                                    ),
                                )
                            ],
                        ),
                        right=qlparser.parse_fragment(schema_deflt),
                        op='??',
                    ),
                )

                # Note: we only need to record the schema default
                # as a computable, but not include it in the type
                # shape, so we ignore the return value.
                _normalize_view_ptr_expr(
                    implicit_ql,
                    view_scls,
                    path_id=path_id,
                    path_id_namespace=path_id_namespace,
                    is_insert=is_insert,
                    is_update=is_update,
                    view_rptr=view_rptr,
                    ctx=scopectx,
                )

    for ptrcls in pointers:
        source: Union[s_types.Type, s_pointers.PointerLike]

        if ptrcls.is_link_property(ctx.env.schema):
            assert view_rptr is not None and view_rptr.ptrcls is not None
            source = view_rptr.ptrcls
        else:
            source = view_scls

        if is_defining_shape:
            cinfo = ctx.source_map.get(ptrcls)
            if cinfo is not None:
                shape_op = cinfo.shape_op
            else:
                shape_op = qlast.ShapeOp.ASSIGN

            ctx.env.view_shapes[source].append((ptrcls, shape_op))

    if (view_rptr is not None and view_rptr.ptrcls is not None and
            view_scls != stype):
        ctx.env.schema = view_scls.set_field_value(
            ctx.env.schema, 'rptr', view_rptr.ptrcls)

    return view_scls
コード例 #16
0
ファイル: config.py プロジェクト: dmgolembiowski/edgedb
def _validate_op(
        expr: qlast.ConfigOp, *,
        ctx: context.ContextLevel) -> SettingInfo:

    if expr.name.module and expr.name.module != 'cfg':
        raise errors.QueryError(
            'invalid configuration parameter name: module must be either '
            '\'cfg\' or empty', context=expr.name.context,
        )

    name = expr.name.name
    cfg_host_type = ctx.env.get_track_schema_type(
        sn.QualName('cfg', 'AbstractConfig'))
    assert isinstance(cfg_host_type, s_objtypes.ObjectType)
    cfg_type = None

    if isinstance(expr, (qlast.ConfigSet, qlast.ConfigReset)):
        # expr.name is the actual name of the property.
        ptr = cfg_host_type.maybe_get_ptr(ctx.env.schema, sn.UnqualName(name))
        if ptr is not None:
            cfg_type = ptr.get_target(ctx.env.schema)

    if cfg_type is None:
        if isinstance(expr, qlast.ConfigSet):
            raise errors.ConfigurationError(
                f'unrecognized configuration parameter {name!r}',
                context=expr.context
            )

        # expr.name is the name of the configuration type
        cfg_type = ctx.env.get_track_schema_type(
            sn.QualName('cfg', name), default=None)
        if cfg_type is None:
            raise errors.ConfigurationError(
                f'unrecognized configuration object {name!r}',
                context=expr.context
            )

        assert isinstance(cfg_type, s_objtypes.ObjectType)
        ptr_candidate: Optional[s_pointers.Pointer] = None

        mro = [cfg_type] + list(
            cfg_type.get_ancestors(ctx.env.schema).objects(ctx.env.schema))
        for ct in mro:
            ptrs = ctx.env.schema.get_referrers(
                ct, scls_type=s_links.Link, field_name='target')

            if ptrs:
                pointer_link = next(iter(ptrs))
                assert isinstance(pointer_link, s_links.Link)
                ptr_candidate = pointer_link
                break

        if (
            ptr_candidate is None
            or (ptr_source := ptr_candidate.get_source(ctx.env.schema)) is None
            or not ptr_source.issubclass(ctx.env.schema, cfg_host_type)
        ):
            raise errors.ConfigurationError(
                f'{name!r} cannot be configured directly'
            )

        ptr = ptr_candidate

        name = ptr.get_shortname(ctx.env.schema).name
コード例 #17
0
ファイル: stmt.py プロジェクト: signupsi/edgedb
def compile_ForQuery(
        qlstmt: qlast.ForQuery, *, ctx: context.ContextLevel) -> irast.Set:
    with ctx.subquery() as sctx:
        stmt = irast.SelectStmt(context=qlstmt.context)
        init_stmt(stmt, qlstmt, ctx=sctx, parent_ctx=ctx)

        # As an optimization, if the iterator is a singleton set, use
        # the element directly.
        iterator = qlstmt.iterator
        if isinstance(iterator, qlast.Set) and len(iterator.elements) == 1:
            iterator = iterator.elements[0]

        # Compile the iterator
        iterator_ctx = None
        if (ctx.expr_exposed and ctx.iterator_ctx is not None
                and ctx.iterator_ctx is not sctx):
            iterator_ctx = ctx.iterator_ctx

        ictx = iterator_ctx or sctx

        contains_dml = qlutils.contains_dml(qlstmt.result)
        # If the body contains DML, then we need to prohibit
        # correlation between the iterator and the enclosing
        # query, since the correlation imposes compilation issues
        # we aren't willing to tackle.
        if contains_dml:
            ictx.path_scope.factoring_allowlist.update(
                ctx.iterator_path_ids)
        iterator_view = stmtctx.declare_view(
            iterator,
            s_name.UnqualName(qlstmt.iterator_alias),
            factoring_fence=contains_dml,
            path_id_namespace=ictx.path_id_namespace,
            ctx=ictx,
        )

        iterator_stmt = setgen.new_set_from_set(
            iterator_view, preserve_scope_ns=True, ctx=sctx)
        stmt.iterator_stmt = iterator_stmt

        iterator_type = setgen.get_set_type(iterator_stmt, ctx=ctx)
        anytype = iterator_type.find_any(ctx.env.schema)
        if anytype is not None:
            raise errors.QueryError(
                'FOR statement has iterator of indeterminate type',
                context=ctx.env.type_origins.get(anytype),
            )

        if iterator_ctx is not None and iterator_ctx.stmt is not None:
            iterator_ctx.stmt.hoisted_iterators.append(iterator_stmt)

        view_scope_info = sctx.path_scope_map[iterator_view]

        pathctx.register_set_in_scope(
            iterator_stmt,
            path_scope=ictx.path_scope,
            ctx=sctx,
        )

        # Iterator symbol is, by construction, outside of the scope
        # of the UNION argument, but is perfectly legal to be referenced
        # inside a factoring fence that is an immediate child of this
        # scope.
        ictx.path_scope.factoring_allowlist.add(
            stmt.iterator_stmt.path_id)
        sctx.iterator_path_ids |= {stmt.iterator_stmt.path_id}
        node = ictx.path_scope.find_descendant(iterator_stmt.path_id)
        if node is not None:
            # See above about why we need a factoring fence.
            # We need to do this again when we move the branch so
            # as to preserve the fencing.
            # Do this by sticking the iterator subtree onto a branch
            # with a factoring fence.
            if contains_dml:
                node = node.attach_branch()
                node.factoring_fence = True
                node = node.attach_branch()

            node.attach_subtree(view_scope_info.path_scope,
                                context=iterator.context)

        # Compile the body
        with sctx.newscope(fenced=True) as bctx:
            stmt.result = setgen.scoped_set(
                compile_result_clause(
                    qlstmt.result,
                    view_scls=ctx.view_scls,
                    view_rptr=ctx.view_rptr,
                    result_alias=qlstmt.result_alias,
                    view_name=ctx.toplevel_result_view_name,
                    forward_rptr=True,
                    ctx=bctx,
                ),
                ctx=bctx,
            )

        # Inject an implicit limit if appropriate
        if ((ctx.expr_exposed or sctx.stmt is ctx.toplevel_stmt)
                and ctx.implicit_limit):
            stmt.limit = setgen.ensure_set(
                dispatch.compile(
                    qlast.IntegerConstant(value=str(ctx.implicit_limit)),
                    ctx=sctx,
                ),
                ctx=sctx,
            )

        result = fini_stmt(stmt, qlstmt, ctx=sctx, parent_ctx=ctx)

    return result
コード例 #18
0
ファイル: stmt.py プロジェクト: signupsi/edgedb
    for elem, _ in stmt.subject.shape:
        assert elem.rptr is not None
        name = elem.rptr.ptrref.shortname.name
        if name in needed_ptrs and name not in ptr_anchors:
            assert elem.expr
            # FIXME: The wrong thing will definitely happen if there are
            # volatile entries here
            source_alias = ctx.aliases.get(name)
            ctx.anchors[source_alias] = setgen.ensure_set(elem.expr, ctx=ctx)
            ptr_anchors[name] = (
                qlast.Path(steps=[qlast.ObjectRef(name=source_alias)]))

    # Fill in empty sets for pointers that are needed but not present
    present_ptrs = set(ptr_anchors)
    for p in (needed_ptrs - present_ptrs):
        ptr = subject_typ.getptr(ctx.env.schema, s_name.UnqualName(p))
        typ = ptr.get_target(ctx.env.schema)
        assert typ
        ptr_anchors[p] = qlast.TypeCast(
            expr=qlast.Set(elements=[]),
            type=typegen.type_to_ql_typeref(typ, ctx=ctx))

    if not ptr_anchors:
        raise errors.QueryError(
            'INSERT UNLESS CONFLICT property requires matching shape',
            context=parser_context,
        )

    conds: List[qlast.Expr] = []
    for ptrname, (ptr, ptr_cnstrs) in constrs.items():
        if ptrname not in present_ptrs:
コード例 #19
0
ファイル: structure.py プロジェクト: signupsi/edgedb
def generate_structure(schema: s_schema.Schema) -> SchemaReflectionParts:
    """Generate schema reflection structure from Python schema classes.

    Returns:
        A quadruple (as a SchemaReflectionParts instance) containing:
            - Delta, which, when applied to stdlib, yields an enhanced
              version of the `schema` module that contains all types
              and properties, not just those that are publicly exposed
              for introspection.
            - A mapping, containing type layout description for all
              schema classes.
            - A sequence of EdgeQL queries necessary to introspect
              a database schema.
            - A sequence of EdgeQL queries necessary to introspect
              global objects, such as roles and databases.
    """

    delta = sd.DeltaRoot()
    classlayout: Dict[Type[s_obj.Object], SchemaTypeLayout, ] = {}

    ordered_link = schema.get('schema::ordered', type=s_links.Link)

    py_classes = []

    schema = _run_ddl(
        '''
            CREATE FUNCTION sys::_get_pg_type_for_scalar_type(
                typeid: std::uuid
            ) -> std::int64 {
                USING SQL $$
                    SELECT
                        coalesce(
                            (
                                SELECT
                                    tn::regtype::oid
                                FROM
                                    edgedb._get_base_scalar_type_map()
                                        AS m(tid uuid, tn text)
                                WHERE
                                    m.tid = "typeid"
                            ),
                            (
                                SELECT
                                    typ.oid
                                FROM
                                    pg_catalog.pg_type typ
                                WHERE
                                    typ.typname = "typeid"::text || '_domain'
                            ),

                            edgedb.raise(
                                NULL::bigint,
                                'invalid_parameter_value',
                                msg => (
                                    'cannot determine OID of '
                                    || typeid::text
                                )
                            )
                        )::bigint
                $$;
                SET volatility := 'STABLE';
            };

            CREATE FUNCTION sys::_expr_from_json(
                data: json
            ) -> OPTIONAL tuple<text: str, refs: array<uuid>> {
                USING SQL $$
                    SELECT
                        "data"->>'text'                     AS text,
                        coalesce(r.refs, ARRAY[]::uuid[])   AS refs
                    FROM
                        (SELECT
                            array_agg(v::uuid) AS refs
                         FROM
                            jsonb_array_elements_text("data"->'refs') AS v
                        ) AS r
                    WHERE
                        jsonb_typeof("data") != 'null'
                $$;
                SET volatility := 'IMMUTABLE';
            };
        ''',
        schema=schema,
        delta=delta,
    )

    for py_cls in s_obj.ObjectMeta.get_schema_metaclasses():
        if isinstance(py_cls, adapter.Adapter):
            continue

        if py_cls is s_obj.GlobalObject:
            continue

        py_classes.append(py_cls)

    read_sets: Dict[Type[s_obj.Object], List[str]] = {}

    for py_cls in py_classes:
        rschema_name = get_schema_name_for_pycls(py_cls)
        schema_objtype = schema.get(
            rschema_name,
            type=s_objtypes.ObjectType,
            default=None,
        )

        bases = []
        for base in py_cls.__bases__:
            if base in py_classes:
                bases.append(get_schema_name_for_pycls(base))

        default_base = get_default_base_for_pycls(py_cls)
        if not bases and rschema_name != default_base:
            bases.append(default_base)

        reflection = py_cls.get_reflection_method()
        is_simple_wrapper = issubclass(py_cls, s_types.CollectionExprAlias)

        if schema_objtype is None:
            as_abstract = (reflection is s_obj.ReflectionMethod.REGULAR
                           and not is_simple_wrapper)

            schema = _run_ddl(
                f'''
                    CREATE {'ABSTRACT' if as_abstract else ''}
                    TYPE {rschema_name}
                    EXTENDING {', '.join(str(b) for b in bases)};
                ''',
                schema=schema,
                delta=delta,
            )

            schema_objtype = schema.get(rschema_name,
                                        type=s_objtypes.ObjectType)
        else:
            ex_bases = schema_objtype.get_bases(schema).names(schema)
            _, added_bases = s_inh.delta_bases(ex_bases, bases)

            if added_bases:
                for subset, position in added_bases:
                    if isinstance(position, tuple):
                        position_clause = (f'{position[0]} {position[1].name}')
                    else:
                        position_clause = position

                    bases_expr = ', '.join(str(t.name) for t in subset)

                    stmt = f'''
                        ALTER TYPE {rschema_name} {{
                            EXTENDING {bases_expr} {position_clause}
                        }}
                    '''

                    schema = _run_ddl(
                        stmt,
                        schema=schema,
                        delta=delta,
                    )

        if reflection is s_obj.ReflectionMethod.NONE:
            continue

        referrers = py_cls.get_referring_classes()

        if reflection is s_obj.ReflectionMethod.AS_LINK:
            if not referrers:
                raise RuntimeError(
                    f'schema class {py_cls.__name__} is declared with AS_LINK '
                    f'reflection method but is not referenced in any RefDict')

        is_concrete = not schema_objtype.get_abstract(schema)

        if (is_concrete and not is_simple_wrapper
                and any(not b.get_abstract(schema) for b in
                        schema_objtype.get_ancestors(schema).objects(schema))):
            raise RuntimeError(
                f'non-abstract {schema_objtype.get_verbosename(schema)} has '
                f'non-abstract ancestors')

        read_shape = read_sets[py_cls] = []

        if is_concrete:
            read_shape.append(
                '_tname := .__type__[IS schema::ObjectType].name')

        classlayout[py_cls] = {}
        ownfields = py_cls.get_ownfields()

        for fn, field in py_cls.get_fields().items():
            if (field.ephemeral or (field.reflection_method
                                    is not s_obj.ReflectionMethod.REGULAR)):
                continue

            storage = _classify_object_field(field)

            ptr = schema_objtype.maybe_get_ptr(schema, sn.UnqualName(fn))

            if fn in ownfields:
                qual = "REQUIRED" if field.required else "OPTIONAL"
                if ptr is None:
                    schema = _run_ddl(
                        f'''
                            ALTER TYPE {rschema_name} {{
                                CREATE {qual}
                                {storage.ptrkind} {fn} -> {storage.ptrtype};
                            }}
                        ''',
                        schema=schema,
                        delta=delta,
                    )
                    ptr = schema_objtype.getptr(schema, sn.UnqualName(fn))

                if storage.shadow_ptrkind is not None:
                    pn = f'{fn}__internal'
                    internal_ptr = schema_objtype.maybe_get_ptr(
                        schema, sn.UnqualName(pn))
                    if internal_ptr is None:
                        ptrkind = storage.shadow_ptrkind
                        ptrtype = storage.shadow_ptrtype
                        schema = _run_ddl(
                            f'''
                                ALTER TYPE {rschema_name} {{
                                    CREATE {qual}
                                    {ptrkind} {pn} -> {ptrtype};
                                }}
                            ''',
                            schema=schema,
                            delta=delta,
                        )

            else:
                assert ptr is not None

            if is_concrete:
                read_ptr = fn

                if field.type_is_generic_self:
                    read_ptr = f'{read_ptr}[IS {rschema_name}]'

                if field.reflection_proxy:
                    proxy_type, proxy_link = field.reflection_proxy
                    read_ptr = (
                        f'{read_ptr}: {{name, value := .{proxy_link}.id}}')

                if ptr.issubclass(schema, ordered_link):
                    read_ptr = f'{read_ptr} ORDER BY @index'

                read_shape.append(read_ptr)

                if storage.shadow_ptrkind is not None:
                    read_shape.append(f'{fn}__internal')

            if field.reflection_proxy:
                proxy_type_name, proxy_link_name = field.reflection_proxy
                proxy_obj = schema.get(proxy_type_name,
                                       type=s_objtypes.ObjectType)
                proxy_link_obj = proxy_obj.getptr(
                    schema, sn.UnqualName(proxy_link_name))
                tgt = proxy_link_obj.get_target(schema)
            else:
                tgt = ptr.get_target(schema)
            assert tgt is not None
            cardinality = ptr.get_cardinality(schema)
            assert cardinality is not None
            classlayout[py_cls][fn] = SchemaFieldDesc(
                fieldname=fn,
                type=tgt,
                cardinality=cardinality,
                properties={},
                storage=storage,
                is_ordered=ptr.issubclass(schema, ordered_link),
                reflection_proxy=field.reflection_proxy,
            )

    # Second pass: deal with RefDicts, which are reflected as links.
    for py_cls in py_classes:
        rschema_name = get_schema_name_for_pycls(py_cls)
        schema_cls = schema.get(rschema_name, type=s_objtypes.ObjectType)

        for refdict in py_cls.get_own_refdicts().values():
            ref_ptr = schema_cls.maybe_get_ptr(schema,
                                               sn.UnqualName(refdict.attr))
            ref_cls = refdict.ref_cls
            assert issubclass(ref_cls, s_obj.Object)
            shadow_ref_ptr = None
            reflect_as_link = (ref_cls.get_reflection_method() is
                               s_obj.ReflectionMethod.AS_LINK)

            if reflect_as_link:
                reflection_link = ref_cls.get_reflection_link()
                assert reflection_link is not None
                target_field = ref_cls.get_field(reflection_link)
                target_cls = target_field.type
                shadow_pn = f'{refdict.attr}__internal'

                schema = _run_ddl(
                    f'''
                        ALTER TYPE {rschema_name} {{
                            CREATE OPTIONAL MULTI LINK {shadow_pn}
                            EXTENDING schema::reference
                             -> {get_schema_name_for_pycls(ref_cls)} {{
                                 ON TARGET DELETE ALLOW;
                             }};
                        }}
                    ''',
                    schema=schema,
                    delta=delta,
                )
                shadow_ref_ptr = schema_cls.getptr(schema,
                                                   sn.UnqualName(shadow_pn))
            else:
                target_cls = ref_cls

            if ref_ptr is None:
                ptr_type = get_schema_name_for_pycls(target_cls)
                schema = _run_ddl(
                    f'''
                        ALTER TYPE {rschema_name} {{
                            CREATE OPTIONAL MULTI LINK {refdict.attr}
                            EXTENDING schema::reference
                             -> {ptr_type} {{
                                 ON TARGET DELETE ALLOW;
                             }};
                        }}
                    ''',
                    schema=schema,
                    delta=delta,
                )

                ref_ptr = schema_cls.getptr(schema,
                                            sn.UnqualName(refdict.attr))
            else:
                schema = _run_ddl(
                    f'''
                        ALTER TYPE {rschema_name} {{
                            ALTER LINK {refdict.attr}
                            ON TARGET DELETE ALLOW;
                        }}
                    ''',
                    schema=schema,
                    delta=delta,
                )

            assert isinstance(ref_ptr, s_links.Link)

            if py_cls not in classlayout:
                classlayout[py_cls] = {}

            # First, fields declared to be reflected as link properties.
            props = _get_reflected_link_props(
                ref_ptr=ref_ptr,
                target_cls=ref_cls,
                schema=schema,
            )

            if reflect_as_link:
                # Then, because it's a passthrough reflection, all scalar
                # fields of the proxy object.
                fields_as_props = [
                    f for f in ref_cls.get_ownfields().values()
                    if (not f.ephemeral and (
                        f.reflection_method is not s_obj.ReflectionMethod.
                        AS_LINK) and f.name != refdict.backref_attr
                        and f.name != ref_cls.get_reflection_link())
                ]

                extra_props = _classify_scalar_object_fields(fields_as_props)

            for fn, storage in {**props, **extra_props}.items():
                prop_ptr = ref_ptr.maybe_get_ptr(schema, sn.UnqualName(fn))
                if prop_ptr is None:
                    pty = storage.ptrtype
                    schema = _run_ddl(
                        f'''
                            ALTER TYPE {rschema_name} {{
                                ALTER LINK {refdict.attr} {{
                                    CREATE OPTIONAL PROPERTY {fn} -> {pty};
                                }}
                            }}
                        ''',
                        schema=schema,
                        delta=delta,
                    )

            if shadow_ref_ptr is not None:
                assert isinstance(shadow_ref_ptr, s_links.Link)
                shadow_pn = shadow_ref_ptr.get_shortname(schema).name
                for fn, storage in props.items():
                    prop_ptr = shadow_ref_ptr.maybe_get_ptr(
                        schema, sn.UnqualName(fn))
                    if prop_ptr is None:
                        pty = storage.ptrtype
                        schema = _run_ddl(
                            f'''
                                ALTER TYPE {rschema_name} {{
                                    ALTER LINK {shadow_pn} {{
                                        CREATE OPTIONAL PROPERTY {fn} -> {pty};
                                    }}
                                }}
                            ''',
                            schema=schema,
                            delta=delta,
                        )

    for py_cls in py_classes:
        rschema_name = get_schema_name_for_pycls(py_cls)
        schema_cls = schema.get(rschema_name, type=s_objtypes.ObjectType)

        is_concrete = not schema_cls.get_abstract(schema)
        read_shape = read_sets[py_cls]

        for refdict in py_cls.get_refdicts():
            if py_cls not in classlayout:
                classlayout[py_cls] = {}

            ref_ptr = schema_cls.getptr(schema,
                                        sn.UnqualName(refdict.attr),
                                        type=s_links.Link)
            tgt = ref_ptr.get_target(schema)
            assert tgt is not None
            cardinality = ref_ptr.get_cardinality(schema)
            assert cardinality is not None
            classlayout[py_cls][refdict.attr] = SchemaFieldDesc(
                fieldname=refdict.attr,
                type=tgt,
                cardinality=cardinality,
                properties={},
                is_ordered=ref_ptr.issubclass(schema, ordered_link),
                reflection_proxy=None,
                is_refdict=True,
            )

            target_cls = refdict.ref_cls

            props = _get_reflected_link_props(
                ref_ptr=ref_ptr,
                target_cls=target_cls,
                schema=schema,
            )

            reflect_as_link = (target_cls.get_reflection_method() is
                               s_obj.ReflectionMethod.AS_LINK)

            prop_layout = {}
            extra_prop_layout = {}

            for fn, storage in props.items():
                prop_ptr = ref_ptr.getptr(schema, sn.UnqualName(fn))
                prop_tgt = prop_ptr.get_target(schema)
                assert prop_tgt is not None
                prop_layout[fn] = (prop_tgt, storage.fieldtype)

            if reflect_as_link:
                # Then, because it's a passthrough reflection, all scalar
                # fields of the proxy object.
                fields_as_props = [
                    f for f in target_cls.get_ownfields().values()
                    if (not f.ephemeral and (
                        f.reflection_method is not s_obj.ReflectionMethod.
                        AS_LINK) and f.name != refdict.backref_attr
                        and f.name != target_cls.get_reflection_link())
                ]

                extra_props = _classify_scalar_object_fields(fields_as_props)

                for fn, storage in extra_props.items():
                    prop_ptr = ref_ptr.getptr(schema, sn.UnqualName(fn))
                    prop_tgt = prop_ptr.get_target(schema)
                    assert prop_tgt is not None
                    extra_prop_layout[fn] = (prop_tgt, storage.fieldtype)
            else:
                extra_prop_layout = {}

            classlayout[py_cls][refdict.attr].properties.update({
                **prop_layout,
                **extra_prop_layout,
            })

            if reflect_as_link:
                shadow_tgt = schema.get(
                    get_schema_name_for_pycls(ref_cls),
                    type=s_objtypes.ObjectType,
                )

                classlayout[py_cls][f'{refdict.attr}__internal'] = (
                    SchemaFieldDesc(
                        fieldname=refdict.attr,
                        type=shadow_tgt,
                        cardinality=qltypes.SchemaCardinality.Many,
                        properties=prop_layout,
                        is_refdict=True,
                    ))

            if is_concrete:
                read_ptr = refdict.attr
                prop_shape_els = []

                if reflect_as_link:
                    read_ptr = f'{read_ptr}__internal'
                    ref_ptr = schema_cls.getptr(
                        schema,
                        sn.UnqualName(f'{refdict.attr}__internal'),
                    )

                for fn in props:
                    prop_shape_els.append(f'@{fn}')

                if prop_shape_els:
                    prop_shape = ',\n'.join(prop_shape_els)
                    read_ptr = f'{read_ptr}: {{id, {prop_shape}}}'

                if ref_ptr.issubclass(schema, ordered_link):
                    read_ptr = f'{read_ptr} ORDER BY @index'

                read_shape.append(read_ptr)

    local_parts = []
    global_parts = []
    for py_cls, shape_els in read_sets.items():
        if (not shape_els
                # The CollectionExprAlias family needs to be excluded
                # because TupleExprAlias and ArrayExprAlias inherit from
                # concrete classes and so are picked up from those.
                or issubclass(py_cls, s_types.CollectionExprAlias)):
            continue

        rschema_name = get_schema_name_for_pycls(py_cls)
        shape = ',\n'.join(shape_els)
        qry = f'''
            SELECT {rschema_name} {{
                {shape}
            }}
        '''
        if not issubclass(py_cls, (s_types.Collection, s_obj.GlobalObject)):
            qry += ' FILTER NOT .builtin'

        if issubclass(py_cls, s_obj.GlobalObject):
            global_parts.append(qry)
        else:
            local_parts.append(qry)

    delta.canonical = True
    return SchemaReflectionParts(
        intro_schema_delta=delta,
        class_layout=classlayout,
        local_intro_parts=local_parts,
        global_intro_parts=global_parts,
    )
コード例 #20
0
ファイル: bootstrap.py プロジェクト: dmgolembiowski/edgedb
async def _init_stdlib(
    ctx: BootstrapContext,
    testmode: bool,
    global_ids: Mapping[str, uuid.UUID],
) -> Tuple[StdlibBits, edbcompiler.Compiler]:
    in_dev_mode = devmode.is_in_dev_mode()
    conn = ctx.conn
    cluster = ctx.cluster

    specified_cache_dir = os.environ.get('_EDGEDB_WRITE_DATA_CACHE_TO')
    if not specified_cache_dir:
        cache_dir = None
    else:
        cache_dir = pathlib.Path(specified_cache_dir)

    stdlib_cache = f'backend-stdlib.pickle'
    tpldbdump_cache = f'backend-tpldbdump.sql'

    src_hash = buildmeta.hash_dirs(
        buildmeta.get_cache_src_dirs(),
        extra_files=[__file__],
    )

    stdlib = buildmeta.read_data_cache(src_hash,
                                       stdlib_cache,
                                       source_dir=cache_dir)
    tpldbdump = buildmeta.read_data_cache(src_hash,
                                          tpldbdump_cache,
                                          source_dir=cache_dir,
                                          pickled=False)

    if stdlib is None:
        logger.info('Compiling the standard library...')
        stdlib = await _make_stdlib(ctx, in_dev_mode or testmode, global_ids)

    logger.info('Creating the necessary PostgreSQL extensions...')
    await metaschema.create_pg_extensions(conn)

    if tpldbdump is None:
        logger.info('Populating internal SQL structures...')
        await metaschema.bootstrap(conn)
        logger.info('Executing the standard library...')
        await _execute_ddl(conn, stdlib.sqltext)

        if in_dev_mode or specified_cache_dir:
            tpl_db_name = edbdef.EDGEDB_TEMPLATE_DB
            tpl_pg_db_name = cluster.get_db_name(tpl_db_name)
            tpl_pg_db_name_dyn = (
                f"edgedb.get_database_backend_name({ql(tpl_db_name)})")
            tpldbdump = cluster.dump_database(
                tpl_pg_db_name,
                exclude_schemas=['edgedbinstdata', 'edgedbext'],
                dump_object_owners=False,
            )

            # Excluding the "edgedbext" schema above apparently
            # doesn't apply to extensions created in that schema,
            # so we have to resort to commenting out extension
            # statements in the dump.
            tpldbdump = re.sub(
                rb'^(CREATE|COMMENT ON) EXTENSION.*$',
                rb'-- \g<0>',
                tpldbdump,
                flags=re.MULTILINE,
            )

            global_metadata = await conn.fetchval(
                f'SELECT edgedb.get_database_metadata({ql(tpl_db_name)})', )

            pl_block = dbops.PLTopBlock()

            dbops.SetMetadata(
                dbops.Database(name='__dummy_placeholder_database__'),
                json.loads(global_metadata),
            ).generate(pl_block)

            text = pl_block.to_string()
            text = text.replace(
                '__dummy_placeholder_database__',
                f"' || quote_ident({tpl_pg_db_name_dyn}) || '",
            )

            tpldbdump += b'\n' + text.encode('utf-8')

            buildmeta.write_data_cache(
                tpldbdump,
                src_hash,
                tpldbdump_cache,
                pickled=False,
                target_dir=cache_dir,
            )

            buildmeta.write_data_cache(
                stdlib,
                src_hash,
                stdlib_cache,
                target_dir=cache_dir,
            )
    else:
        logger.info('Initializing the standard library...')
        await metaschema._execute_sql_script(conn, tpldbdump.decode('utf-8'))

    if not in_dev_mode and testmode:
        # Running tests on a production build.
        stdlib, testmode_sql = await _amend_stdlib(
            ctx,
            s_std.get_std_module_text(sn.UnqualName('_testmode')),
            stdlib,
        )
        await conn.execute(testmode_sql)

    # Make sure that schema backend_id properties are in sync with
    # the database.

    compiler = edbcompiler.new_compiler(
        std_schema=stdlib.stdschema,
        reflection_schema=stdlib.reflschema,
        schema_class_layout=stdlib.classlayout,
    )
    _, sql = compile_bootstrap_script(
        compiler,
        stdlib.reflschema,
        '''
        SELECT schema::ScalarType {
            id,
            backend_id,
        } FILTER .builtin AND NOT (.abstract ?? False);
        ''',
        expected_cardinality_one=False,
        single_statement=True,
    )
    schema = stdlib.stdschema
    typemap = await conn.fetchval(sql)
    for entry in json.loads(typemap):
        t = schema.get_by_id(uuidgen.UUID(entry['id']))
        schema = t.set_field_value(schema, 'backend_id', entry['backend_id'])

    stdlib = stdlib._replace(stdschema=schema)

    await _store_static_bin_cache(
        ctx,
        'stdschema',
        pickle.dumps(schema, protocol=pickle.HIGHEST_PROTOCOL),
    )

    await _store_static_bin_cache(
        ctx,
        'reflschema',
        pickle.dumps(stdlib.reflschema, protocol=pickle.HIGHEST_PROTOCOL),
    )

    await _store_static_bin_cache(
        ctx,
        'global_schema',
        pickle.dumps(stdlib.global_schema, protocol=pickle.HIGHEST_PROTOCOL),
    )

    await _store_static_bin_cache(
        ctx,
        'classlayout',
        pickle.dumps(stdlib.classlayout, protocol=pickle.HIGHEST_PROTOCOL),
    )

    await _store_static_text_cache(
        ctx,
        'local_intro_query',
        stdlib.local_intro_query,
    )

    await _store_static_text_cache(
        ctx,
        'global_intro_query',
        stdlib.global_intro_query,
    )

    await metaschema.generate_support_views(conn, stdlib.reflschema)
    await metaschema.generate_support_functions(conn, stdlib.reflschema)

    compiler = edbcompiler.new_compiler(
        std_schema=schema,
        reflection_schema=stdlib.reflschema,
        schema_class_layout=stdlib.classlayout,
    )

    await metaschema.generate_more_support_functions(conn, compiler,
                                                     stdlib.reflschema,
                                                     testmode)

    if tpldbdump is not None:
        # When we restore a database from a dump, OIDs for non-system
        # Postgres types might get skewed as they are not part of the dump.
        # A good example of that is `std::bigint` which is implemented as
        # a custom domain type. The OIDs are stored under
        # `schema::Object.backend_id` property and are injected into
        # array query arguments.
        #
        # The code below re-syncs backend_id properties of EdgeDB builtin
        # types with the actual OIDs in the DB.

        compiler = edbcompiler.new_compiler(
            std_schema=stdlib.stdschema,
            reflection_schema=stdlib.reflschema,
            schema_class_layout=stdlib.classlayout,
        )
        _, sql = compile_bootstrap_script(
            compiler,
            stdlib.reflschema,
            '''
            UPDATE schema::Type
            FILTER
                .builtin
                AND NOT (.abstract ?? False)
                AND schema::Type IS schema::ScalarType | schema::Tuple
            SET {
                backend_id := sys::_get_pg_type_for_edgedb_type(
                    .id,
                    <uuid>{}
                )
            }
            ''',
            expected_cardinality_one=False,
            single_statement=True,
        )
        await conn.execute(sql)

        _, sql = compile_bootstrap_script(
            compiler,
            stdlib.reflschema,
            '''
            UPDATE schema::Array
            FILTER
                .builtin
                AND NOT (.abstract ?? False)
            SET {
                backend_id := sys::_get_pg_type_for_edgedb_type(
                    .id,
                    .element_type.id,
                )
            }
            ''',
            expected_cardinality_one=False,
            single_statement=True,
        )
        await conn.execute(sql)

    return stdlib, compiler
コード例 #21
0
def _process_view(
    *,
    stype: s_objtypes.ObjectType,
    path_id: irast.PathId,
    path_id_namespace: Optional[irast.Namespace] = None,
    elements: Optional[Sequence[qlast.ShapeElement]],
    view_rptr: Optional[context.ViewRPtr] = None,
    view_name: Optional[sn.QualName] = None,
    is_insert: bool = False,
    is_update: bool = False,
    is_delete: bool = False,
    parser_context: Optional[pctx.ParserContext],
    ctx: context.ContextLevel,
) -> s_objtypes.ObjectType:

    if (view_name is None and ctx.env.options.schema_view_mode
            and view_rptr is not None):
        # Make sure persistent schema expression aliases have properly formed
        # names as opposed to the usual mangled form of the ephemeral
        # aliases.  This is needed for introspection readability, as well
        # as helps in maintaining proper type names for schema
        # representations that require alphanumeric names, such as
        # GraphQL.
        #
        # We use the name of the source together with the name
        # of the inbound link to form the name, so in e.g.
        #    CREATE ALIAS V := (SELECT Foo { bar: { baz: { ... } })
        # The name of the innermost alias would be "__V__bar__baz".
        source_name = view_rptr.source.get_name(ctx.env.schema).name
        if not source_name.startswith('__'):
            source_name = f'__{source_name}'
        if view_rptr.ptrcls_name is not None:
            ptr_name = view_rptr.ptrcls_name.name
        elif view_rptr.ptrcls is not None:
            ptr_name = view_rptr.ptrcls.get_shortname(ctx.env.schema).name
        else:
            raise errors.InternalServerError(
                '_process_view in schema mode received view_rptr with '
                'neither ptrcls_name, not ptrcls'
            )

        name = f'{source_name}__{ptr_name}'
        view_name = sn.QualName(
            module=ctx.derived_target_module or '__derived__',
            name=name,
        )

    view_scls = schemactx.derive_view(
        stype,
        is_insert=is_insert,
        is_update=is_update,
        is_delete=is_delete,
        derived_name=view_name,
        ctx=ctx,
    )
    assert isinstance(view_scls, s_objtypes.ObjectType), view_scls
    is_mutation = is_insert or is_update
    is_defining_shape = ctx.expr_exposed or is_mutation

    if view_rptr is not None and view_rptr.ptrcls is None:
        derive_ptrcls(
            view_rptr, target_scls=view_scls,
            transparent=True, ctx=ctx)

    pointers = []

    elements = elements or ()
    for shape_el in elements:
        with ctx.newscope(fenced=True) as scopectx:
            pointer = _normalize_view_ptr_expr(
                shape_el, view_scls, path_id=path_id,
                path_id_namespace=path_id_namespace,
                is_insert=is_insert, is_update=is_update,
                view_rptr=view_rptr,
                ctx=scopectx)

            if pointer in pointers:
                schema = ctx.env.schema
                vnp = pointer.get_verbosename(schema, with_parent=True)

                raise errors.QueryError(
                    f'duplicate definition of {vnp}',
                    context=shape_el.context)

            pointers.append(pointer)

    # If we are not defining a shape (so we might care about
    # materialization), look through our parent view (if one exists)
    # for materialized properties that are not present in this shape.
    # If any are found, inject them.
    # (See test_edgeql_volatility_rebind_flat_01 for an example.)
    schema = ctx.env.schema
    base = view_scls.get_bases(schema).objects(schema)[0]
    base_ptrs = (view_scls.get_pointers(schema).objects(schema)
                 if not is_defining_shape else ())
    for ptrcls in base_ptrs:
        if ptrcls in pointers or base not in ctx.env.view_shapes:
            continue
        pptr = ptrcls.get_bases(schema).objects(schema)[0]
        if (pptr, qlast.ShapeOp.MATERIALIZE) not in ctx.env.view_shapes[base]:
            continue

        # Make up a dummy shape element
        name = ptrcls.get_shortname(schema).name
        dummy_el = qlast.ShapeElement(expr=qlast.Path(
            steps=[qlast.Ptr(ptr=qlast.ObjectRef(name=name))]))

        with ctx.newscope(fenced=True) as scopectx:
            pointer = _normalize_view_ptr_expr(
                dummy_el, view_scls, path_id=path_id,
                path_id_namespace=path_id_namespace,
                is_insert=is_insert, is_update=is_update,
                view_rptr=view_rptr,
                ctx=scopectx)

        pointers.append(pointer)

    if is_insert:
        explicit_ptrs = {
            ptrcls.get_local_name(ctx.env.schema)
            for ptrcls in pointers
        }
        scls_pointers = stype.get_pointers(ctx.env.schema)
        for pn, ptrcls in scls_pointers.items(ctx.env.schema):
            if (pn in explicit_ptrs or
                    ptrcls.is_pure_computable(ctx.env.schema)):
                continue

            default_expr = ptrcls.get_default(ctx.env.schema)
            if not default_expr:
                if (
                    ptrcls.get_required(ctx.env.schema)
                    and pn != sn.UnqualName('__type__')
                ):
                    if ptrcls.is_property(ctx.env.schema):
                        # If the target is a sequence, there's no need
                        # for an explicit value.
                        ptrcls_target = ptrcls.get_target(ctx.env.schema)
                        assert ptrcls_target is not None
                        if ptrcls_target.issubclass(
                                ctx.env.schema,
                                ctx.env.schema.get(
                                    'std::sequence',
                                    type=s_objects.SubclassableObject)):
                            continue
                    vn = ptrcls.get_verbosename(
                        ctx.env.schema, with_parent=True)
                    raise errors.MissingRequiredError(
                        f'missing value for required {vn}')
                else:
                    continue

            ptrcls_sn = ptrcls.get_shortname(ctx.env.schema)
            default_ql = qlast.ShapeElement(
                expr=qlast.Path(
                    steps=[
                        qlast.Ptr(
                            ptr=qlast.ObjectRef(
                                name=ptrcls_sn.name,
                                module=ptrcls_sn.module,
                            ),
                        ),
                    ],
                ),
                compexpr=qlast.DetachedExpr(
                    expr=default_expr.qlast,
                ),
            )

            with ctx.newscope(fenced=True) as scopectx:
                pointers.append(
                    _normalize_view_ptr_expr(
                        default_ql,
                        view_scls,
                        path_id=path_id,
                        path_id_namespace=path_id_namespace,
                        is_insert=is_insert,
                        is_update=is_update,
                        from_default=True,
                        view_rptr=view_rptr,
                        ctx=scopectx,
                    ),
                )

    for ptrcls in pointers:
        source: Union[s_types.Type, s_pointers.PointerLike]

        if ptrcls.is_link_property(ctx.env.schema):
            assert view_rptr is not None and view_rptr.ptrcls is not None
            source = view_rptr.ptrcls
        else:
            source = view_scls

        if is_defining_shape:
            cinfo = ctx.source_map.get(ptrcls)
            if cinfo is not None:
                shape_op = cinfo.shape_op
            else:
                shape_op = qlast.ShapeOp.ASSIGN
        elif ptrcls.get_computable(ctx.env.schema):
            shape_op = qlast.ShapeOp.MATERIALIZE
        else:
            continue

        ctx.env.view_shapes[source].append((ptrcls, shape_op))

    if (view_rptr is not None and view_rptr.ptrcls is not None and
            view_scls != stype):
        ctx.env.schema = view_scls.set_field_value(
            ctx.env.schema, 'rptr', view_rptr.ptrcls)

    return view_scls
コード例 #22
0
ファイル: conflicts.py プロジェクト: stjordanis/edgedb
def _compile_conflict_select(
    stmt: irast.MutatingStmt,
    subject_typ: s_objtypes.ObjectType,
    *,
    for_inheritance: bool,
    fake_dml_set: Optional[irast.Set],
    obj_constrs: Sequence[s_constr.Constraint],
    constrs: Dict[str, Tuple[s_pointers.Pointer, List[s_constr.Constraint]]],
    parser_context: Optional[pctx.ParserContext],
    ctx: context.ContextLevel,
) -> Optional[qlast.Expr]:
    """Synthesize a select of conflicting objects

    ... for a single object type. This gets called once for each ancestor
    type that provides constraints to the type being inserted.

    `cnstrs` contains the constraints to consider.
    """
    # Find which pointers we need to grab
    needed_ptrs, ptr_anchors = _get_needed_ptrs(
        subject_typ, obj_constrs, constrs.keys(), ctx=ctx
    )

    ctx.anchors = ctx.anchors.copy()

    # If we are given a fake_dml_set to directly represent the result
    # of our DML, use that instead of populating the result.
    if fake_dml_set:
        for p in needed_ptrs | {'id'}:
            ptr = subject_typ.getptr(ctx.env.schema, s_name.UnqualName(p))
            val = setgen.extend_path(fake_dml_set, ptr, ctx=ctx)

            ptr_anchors[p] = ctx.create_anchor(val, p)

    # Find the IR corresponding to the fields we care about and
    # produce anchors for them
    ptrs_in_shape = set()
    for elem, _ in stmt.subject.shape:
        assert elem.rptr is not None
        name = elem.rptr.ptrref.shortname.name
        ptrs_in_shape.add(name)
        if name in needed_ptrs and name not in ptr_anchors:
            assert elem.expr
            if inference.infer_volatility(elem.expr, ctx.env).is_volatile():
                if for_inheritance:
                    error = (
                        'INSERT does not support volatile properties with '
                        'exclusive constraints when another statement in '
                        'the same query modifies a related type'
                    )
                else:
                    error = (
                        'INSERT UNLESS CONFLICT ON does not support volatile '
                        'properties'
                    )
                raise errors.UnsupportedFeatureError(
                    error, context=parser_context
                )

            # We want to use the same path_scope_id as the original
            elem_set = setgen.ensure_set(elem.expr, ctx=ctx)
            elem_set.path_scope_id = elem.path_scope_id

            # FIXME: The wrong thing will definitely happen if there are
            # volatile entries here
            ptr_anchors[name] = ctx.create_anchor(elem_set, name)

    if for_inheritance and not ptrs_in_shape:
        return None

    # Fill in empty sets for pointers that are needed but not present
    present_ptrs = set(ptr_anchors)
    for p in (needed_ptrs - present_ptrs):
        ptr = subject_typ.getptr(ctx.env.schema, s_name.UnqualName(p))
        typ = ptr.get_target(ctx.env.schema)
        assert typ
        ptr_anchors[p] = qlast.TypeCast(
            expr=qlast.Set(elements=[]),
            type=typegen.type_to_ql_typeref(typ, ctx=ctx))

    if not ptr_anchors:
        raise errors.QueryError(
            'INSERT UNLESS CONFLICT property requires matching shape',
            context=parser_context,
        )

    conds: List[qlast.Expr] = []
    for ptrname, (ptr, ptr_cnstrs) in constrs.items():
        if ptrname not in present_ptrs:
            continue
        anchor = qlutils.subject_paths_substitute(
            ptr_anchors[ptrname], ptr_anchors)
        ptr_val = qlast.Path(partial=True, steps=[
            qlast.Ptr(ptr=qlast.ObjectRef(name=ptrname))
        ])
        ptr, ptr_cnstrs = constrs[ptrname]
        ptr_card = ptr.get_cardinality(ctx.env.schema)

        for cnstr in ptr_cnstrs:
            lhs: qlast.Expr = anchor
            rhs: qlast.Expr = ptr_val
            # If there is a subjectexpr, substitute our lhs and rhs in
            # for __subject__ in the subjectexpr and compare *that*
            if (subjectexpr := cnstr.get_subjectexpr(ctx.env.schema)):
                assert isinstance(subjectexpr.qlast, qlast.Expr)
                lhs = qlutils.subject_substitute(subjectexpr.qlast, lhs)
                rhs = qlutils.subject_substitute(subjectexpr.qlast, rhs)

            conds.append(qlast.BinOp(
                op='=' if ptr_card.is_single() else 'IN',
                left=lhs, right=rhs,
            ))
コード例 #23
0
ファイル: setgen.py プロジェクト: signupsi/edgedb
def resolve_ptr(
    near_endpoint: s_obj.Object,
    pointer_name: str,
    *,
    far_endpoints: Iterable[s_obj.Object] = (),
    direction: s_pointers.PointerDirection = (
        s_pointers.PointerDirection.Outbound
    ),
    source_context: Optional[parsing.ParserContext] = None,
    track_ref: Optional[Union[qlast.Base, Literal[False]]],
    ctx: context.ContextLevel,
) -> s_pointers.Pointer:

    if not isinstance(near_endpoint, s_sources.Source):
        # Reference to a property on non-object
        msg = 'invalid property reference on a primitive type expression'
        raise errors.InvalidReferenceError(msg, context=source_context)

    ptr: Optional[s_pointers.Pointer] = None

    if direction is s_pointers.PointerDirection.Outbound:
        ptr = near_endpoint.maybe_get_ptr(
            ctx.env.schema,
            s_name.UnqualName(pointer_name),
        )

        if ptr is not None:
            ref = ptr.get_nearest_non_derived_parent(ctx.env.schema)
            if track_ref is not False:
                ctx.env.add_schema_ref(ref, track_ref)

    else:
        ptrs = near_endpoint.getrptrs(ctx.env.schema, pointer_name,
                                      sources=far_endpoints)
        if ptrs:
            if track_ref is not False:
                for p in ptrs:
                    ctx.env.add_schema_ref(
                        p.get_nearest_non_derived_parent(ctx.env.schema),
                        track_ref)

            opaque = not far_endpoints
            ctx.env.schema, ptr = s_pointers.get_or_create_union_pointer(
                ctx.env.schema,
                ptrname=s_name.UnqualName(pointer_name),
                source=near_endpoint,
                direction=direction,
                components=ptrs,
                opaque=opaque,
                modname=ctx.derived_target_module,
            )

    if ptr is not None:
        return ptr

    if isinstance(near_endpoint, s_links.Link):
        vname = near_endpoint.get_verbosename(ctx.env.schema, with_parent=True)
        msg = f'{vname} has no property {pointer_name!r}'

    elif direction == s_pointers.PointerDirection.Outbound:
        msg = (f'{near_endpoint.get_verbosename(ctx.env.schema)} '
               f'has no link or property {pointer_name!r}')

    else:
        nep_name = near_endpoint.get_displayname(ctx.env.schema)
        path = f'{nep_name}.{direction}{pointer_name}'
        msg = f'{path!r} does not resolve to any known path'

    err = errors.InvalidReferenceError(msg, context=source_context)

    if direction is s_pointers.PointerDirection.Outbound:
        near_enpoint_pointers = near_endpoint.get_pointers(ctx.env.schema)
        s_utils.enrich_schema_lookup_error(
            err,
            s_name.UnqualName(pointer_name),
            modaliases=ctx.modaliases,
            item_type=s_pointers.Pointer,
            collection=near_enpoint_pointers.objects(ctx.env.schema),
            schema=ctx.env.schema,
        )

    raise err
コード例 #24
0
def get_ref_storage_info(schema, refs):
    link_biased = {}
    objtype_biased = {}

    ref_ptrs = {}
    refs = list(refs)
    for ref in refs:
        rptr = ref.rptr
        if rptr is None:
            source_typeref = ref.typeref
            if not irtyputils.is_object(source_typeref):
                continue
            schema, t = irtyputils.ir_typeref_to_type(schema, ref.typeref)
            ptr = t.getptr(schema, s_name.UnqualName('id'))
        else:
            ptrref = ref.rptr.ptrref
            schema, ptr = irtyputils.ptrcls_from_ptrref(ptrref, schema=schema)
            source_typeref = ref.rptr.source.typeref

        if ptr.is_link_property(schema):
            srcref = ref.rptr.source.rptr.ptrref
            schema, src = irtyputils.ptrcls_from_ptrref(srcref, schema=schema)
            if src.get_is_derived(schema):
                # This specialized pointer was derived specifically
                # for the purposes of constraint expr compilation.
                src = src.get_bases(schema).first(schema)
        elif ptr.is_tuple_indirection():
            refs.append(ref.rptr.source)
            continue
        elif ptr.is_type_intersection():
            refs.append(ref.rptr.source)
            continue
        else:
            schema, src = irtyputils.ir_typeref_to_type(schema, source_typeref)
        ref_ptrs[ref] = (ptr, src)

    for ref, (ptr, src) in ref_ptrs.items():
        ptr_info = types.get_pointer_storage_info(ptr,
                                                  source=src,
                                                  resolve_type=False,
                                                  schema=schema)

        # See if any of the refs are hosted in pointer tables and others
        # are not...
        if ptr_info.table_type == 'link':
            link_biased[ref] = ptr_info
        else:
            objtype_biased[ref] = ptr_info

        if link_biased and objtype_biased:
            break

    if link_biased and objtype_biased:
        for ref in objtype_biased.copy():
            ptr, src = ref_ptrs[ref]
            ptr_info = types.get_pointer_storage_info(ptr,
                                                      source=src,
                                                      resolve_type=False,
                                                      link_bias=True,
                                                      schema=schema)

            if ptr_info is not None and ptr_info.table_type == 'link':
                link_biased[ref] = ptr_info
                objtype_biased.pop(ref)

    ref_tables = {}

    for ref, ptr_info in itertools.chain(objtype_biased.items(),
                                         link_biased.items()):
        ptr, src = ref_ptrs[ref]

        try:
            ref_tables[ptr_info.table_name].append((ref, ptr, src, ptr_info))
        except KeyError:
            ref_tables[ptr_info.table_name] = [(ref, ptr, src, ptr_info)]

    return ref_tables
コード例 #25
0
def get_schema_object(
    ref: qlast.BaseObjectRef,
    module: Optional[str] = None,
    *,
    item_type: Optional[Type[s_obj.Object]] = None,
    condition: Optional[Callable[[s_obj.Object], bool]] = None,
    label: Optional[str] = None,
    ctx: context.ContextLevel,
    srcctx: Optional[parsing.ParserContext] = None,
) -> s_obj.Object:

    if isinstance(ref, qlast.ObjectRef):
        if srcctx is None:
            srcctx = ref.context
        module = ref.module
        lname = ref.name
    elif isinstance(ref, qlast.AnyType):
        return s_pseudo.PseudoType.get(ctx.env.schema, 'anytype')
    elif isinstance(ref, qlast.AnyTuple):
        return s_pseudo.PseudoType.get(ctx.env.schema, 'anytuple')
    else:
        raise AssertionError(f"Unhandled BaseObjectRef subclass: {ref!r}")

    name: sn.Name
    if module:
        name = sn.QualName(module=module, name=lname)
    else:
        name = sn.UnqualName(name=lname)

    view = _get_type_variant(name, ctx)
    if view is not None:
        return view

    try:
        stype = ctx.env.get_track_schema_object(
            name=name,
            expr=ref,
            modaliases=ctx.modaliases,
            type=item_type,
            condition=condition,
            label=label,
        )

    except errors.QueryError as e:
        s_utils.enrich_schema_lookup_error(
            e,
            name,
            modaliases=ctx.modaliases,
            schema=ctx.env.schema,
            item_type=item_type,
            condition=condition,
            context=srcctx,
        )
        raise

    view = _get_type_variant(stype.get_name(ctx.env.schema), ctx)
    if view is not None:
        return view
    elif stype == ctx.defining_view:
        # stype is the view in process of being defined and as such is
        # not yet a valid schema object
        raise errors.SchemaDefinitionError(
            f'illegal self-reference in definition of {str(name)!r}',
            context=srcctx)
    else:
        return stype
コード例 #26
0
ファイル: stmt.py プロジェクト: signupsi/edgedb
def compile_result_clause(
        result: qlast.Expr, *,
        view_scls: Optional[s_types.Type]=None,
        view_rptr: Optional[context.ViewRPtr]=None,
        view_name: Optional[s_name.QualName]=None,
        result_alias: Optional[str]=None,
        forward_rptr: bool=False,
        ctx: context.ContextLevel) -> irast.Set:
    with ctx.new() as sctx:
        if sctx.stmt is ctx.toplevel_stmt:
            sctx.expr_exposed = True

        if forward_rptr:
            sctx.view_rptr = view_rptr
            # sctx.view_scls = view_scls

        result_expr: qlast.Expr
        shape: Optional[Sequence[qlast.ShapeElement]]

        if isinstance(result, qlast.Shape):
            result_expr = result.expr
            shape = result.elements
        else:
            result_expr = result
            shape = None

        if result_alias:
            # `SELECT foo := expr` is equivalent to
            # `WITH foo := expr SELECT foo`
            rexpr = astutils.ensure_ql_select(result_expr)
            if (
                sctx.implicit_limit
                and rexpr.limit is None
                and not sctx.inhibit_implicit_limit
            ):
                # Inline alias is special: it's both "exposed",
                # but also subject for further processing, so
                # make sure we don't mangle it with an implicit
                # limit.
                rexpr.limit = qlast.TypeCast(
                    expr=qlast.Set(),
                    type=qlast.TypeName(
                        maintype=qlast.ObjectRef(
                            module='__std__',
                            name='int64',
                        )
                    )
                )

            stmtctx.declare_view(
                rexpr,
                alias=s_name.UnqualName(result_alias),
                ctx=sctx,
            )

            result_expr = qlast.Path(
                steps=[qlast.ObjectRef(name=result_alias)]
            )

        if (view_rptr is not None and
                (view_rptr.is_insert or view_rptr.is_update) and
                view_rptr.ptrcls is not None) and False:
            # If we have an empty set assigned to a pointer in an INSERT
            # or UPDATE, there's no need to explicitly specify the
            # empty set type and it can be assumed to match the pointer
            # target type.
            target_t = view_rptr.ptrcls.get_target(ctx.env.schema)

            if astutils.is_ql_empty_set(result_expr):
                expr = setgen.new_empty_set(
                    stype=target_t,
                    alias=ctx.aliases.get('e'),
                    ctx=sctx,
                    srcctx=result_expr.context,
                )
            else:
                with sctx.new() as exprctx:
                    exprctx.empty_result_type_hint = target_t
                    expr = setgen.ensure_set(
                        dispatch.compile(result_expr, ctx=exprctx),
                        ctx=exprctx)
        else:
            if astutils.is_ql_empty_set(result_expr):
                expr = setgen.new_empty_set(
                    stype=sctx.empty_result_type_hint,
                    alias=ctx.aliases.get('e'),
                    ctx=sctx,
                    srcctx=result_expr.context,
                )
            else:
                expr = setgen.ensure_set(
                    dispatch.compile(result_expr, ctx=sctx), ctx=sctx)

        ctx.partial_path_prefix = expr

        ir_result = compile_query_subject(
            expr, shape=shape, view_rptr=view_rptr, view_name=view_name,
            result_alias=result_alias,
            view_scls=view_scls,
            compile_views=ctx.stmt is ctx.toplevel_stmt,
            ctx=sctx,
            parser_context=result.context)

        ctx.partial_path_prefix = ir_result

    return ir_result
コード例 #27
0
def compile_path(expr: qlast.Path, *, ctx: context.ContextLevel) -> irast.Set:
    """Create an ir.Set representing the given EdgeQL path expression."""
    anchors = ctx.anchors

    if expr.partial:
        if ctx.partial_path_prefix is not None:
            path_tip = ctx.partial_path_prefix
        else:
            raise errors.QueryError('could not resolve partial path ',
                                    context=expr.context)

    computables = []
    path_sets = []

    for i, step in enumerate(expr.steps):
        if isinstance(step, qlast.SpecialAnchor):
            path_tip = resolve_special_anchor(step, ctx=ctx)

        elif isinstance(step, qlast.ObjectRef):
            if i > 0:  # pragma: no cover
                raise RuntimeError(
                    'unexpected ObjectRef as a non-first path item')

            refnode = None

            if (not step.module
                    and s_name.UnqualName(step.name) not in ctx.aliased_views):
                # Check if the starting path label is a known anchor
                refnode = anchors.get(step.name)

            if refnode is not None:
                path_tip = new_set_from_set(refnode,
                                            preserve_scope_ns=True,
                                            ctx=ctx)
            else:
                stype = schemactx.get_schema_type(
                    step,
                    condition=lambda o:
                    (isinstance(o, s_types.Type) and
                     (o.is_object_type() or o.is_view(ctx.env.schema))),
                    label='object type or alias',
                    item_type=s_types.QualifiedType,
                    srcctx=step.context,
                    ctx=ctx,
                )

                if (stype.get_expr_type(ctx.env.schema) is not None and
                        stype.get_name(ctx.env.schema) not in ctx.view_nodes):
                    # This is a schema-level view, as opposed to
                    # a WITH-block or inline alias view.
                    stype = stmtctx.declare_view_from_schema(stype, ctx=ctx)

                view_set = ctx.view_sets.get(stype)
                if view_set is not None:
                    view_scope_info = ctx.path_scope_map[view_set]
                    path_tip = new_set_from_set(
                        view_set,
                        preserve_scope_ns=(view_scope_info.pinned_path_id_ns
                                           is not None),
                        is_binding=True,
                        ctx=ctx,
                    )
                else:
                    path_tip = class_set(stype, ctx=ctx)

                view_scls = ctx.class_view_overrides.get(stype.id)
                if (view_scls is not None
                        and view_scls != get_set_type(path_tip, ctx=ctx)):
                    path_tip = ensure_set(path_tip,
                                          type_override=view_scls,
                                          ctx=ctx)

        elif isinstance(step, qlast.Ptr):
            # Pointer traversal step
            ptr_expr = step
            if ptr_expr.direction is not None:
                direction = s_pointers.PointerDirection(ptr_expr.direction)
            else:
                direction = s_pointers.PointerDirection.Outbound

            ptr_name = ptr_expr.ptr.name

            source: s_obj.Object
            ptr: s_pointers.PointerLike

            if ptr_expr.type == 'property':
                # Link property reference; the source is the
                # link immediately preceding this step in the path.
                if path_tip.rptr is None:
                    raise errors.EdgeQLSyntaxError(
                        f"unexpected reference to link property {ptr_name!r} "
                        "outside of a path expression",
                        context=ptr_expr.ptr.context,
                    )

                if isinstance(path_tip.rptr.ptrref,
                              irast.TypeIntersectionPointerRef):
                    ind_prefix, ptrs = typegen.collapse_type_intersection_rptr(
                        path_tip,
                        ctx=ctx,
                    )

                    assert ind_prefix.rptr is not None
                    prefix_type = get_set_type(ind_prefix.rptr.source, ctx=ctx)
                    assert isinstance(prefix_type, s_objtypes.ObjectType)

                    if not ptrs:
                        tip_type = get_set_type(path_tip, ctx=ctx)
                        s_vn = prefix_type.get_verbosename(ctx.env.schema)
                        t_vn = tip_type.get_verbosename(ctx.env.schema)
                        pn = ind_prefix.rptr.ptrref.shortname.name
                        if direction is s_pointers.PointerDirection.Inbound:
                            s_vn, t_vn = t_vn, s_vn
                        raise errors.InvalidReferenceError(
                            f"property '{ptr_name}' does not exist because"
                            f" there are no '{pn}' links between"
                            f" {s_vn} and {t_vn}",
                            context=ptr_expr.ptr.context,
                        )

                    prefix_ptr_name = (next(iter(ptrs)).get_local_name(
                        ctx.env.schema))

                    ptr = schemactx.get_union_pointer(
                        ptrname=prefix_ptr_name,
                        source=prefix_type,
                        direction=ind_prefix.rptr.direction,
                        components=ptrs,
                        ctx=ctx,
                    )
                else:
                    ptr = typegen.ptrcls_from_ptrref(path_tip.rptr.ptrref,
                                                     ctx=ctx)

                if isinstance(ptr, s_links.Link):
                    source = ptr
                else:
                    raise errors.QueryError(
                        'improper reference to link property on '
                        'a non-link object',
                        context=step.context,
                    )
            else:
                source = get_set_type(path_tip, ctx=ctx)

            # If this is followed by type intersections, collect
            # them up, since we need them in ptr_step_set.
            upcoming_intersections = []
            for j in range(i + 1, len(expr.steps)):
                nstep = expr.steps[j]
                if (isinstance(nstep, qlast.TypeIntersection)
                        and isinstance(nstep.type, qlast.TypeName)):
                    upcoming_intersections.append(
                        schemactx.get_schema_type(nstep.type.maintype,
                                                  ctx=ctx))
                else:
                    break

            if isinstance(source, s_types.Tuple):
                path_tip = tuple_indirection_set(path_tip,
                                                 source=source,
                                                 ptr_name=ptr_name,
                                                 source_context=step.context,
                                                 ctx=ctx)

            else:
                path_tip = ptr_step_set(
                    path_tip,
                    expr=step,
                    source=source,
                    ptr_name=ptr_name,
                    direction=direction,
                    upcoming_intersections=upcoming_intersections,
                    ignore_computable=True,
                    source_context=step.context,
                    ctx=ctx)

                assert path_tip.rptr is not None
                ptrcls = typegen.ptrcls_from_ptrref(path_tip.rptr.ptrref,
                                                    ctx=ctx)
                if _is_computable_ptr(ptrcls, ctx=ctx):
                    computables.append(path_tip)

        elif isinstance(step, qlast.TypeIntersection):
            arg_type = inference.infer_type(path_tip, ctx.env)
            if not isinstance(arg_type, s_objtypes.ObjectType):
                raise errors.QueryError(
                    f'cannot apply type intersection operator to '
                    f'{arg_type.get_verbosename(ctx.env.schema)}: '
                    f'it is not an object type',
                    context=step.context)

            if not isinstance(step.type, qlast.TypeName):
                raise errors.QueryError(
                    f'complex type expressions are not supported here',
                    context=step.context,
                )

            typ = schemactx.get_schema_type(step.type.maintype, ctx=ctx)

            try:
                path_tip = type_intersection_set(path_tip,
                                                 typ,
                                                 optional=False,
                                                 ctx=ctx)
            except errors.SchemaError as e:
                e.set_source_context(step.type.context)
                raise

        else:
            # Arbitrary expression
            if i > 0:  # pragma: no cover
                raise RuntimeError(
                    'unexpected expression as a non-first path item')

            # We need to fence this if the head is a mutating
            # statement, to make sure that the factoring allowlist
            # works right.
            is_subquery = isinstance(step, qlast.Statement)
            with ctx.newscope(fenced=is_subquery) as subctx:
                path_tip = ensure_set(dispatch.compile(step, ctx=subctx),
                                      ctx=subctx)

                # If the head of the path is a direct object
                # reference, wrap it in an expression set to give it a
                # new path id. This prevents the object path from being
                # spuriously visible to computable paths defined in a shape
                # at the root of a path. (See test_edgeql_select_tvariant_04
                # for an example).
                if (path_tip.path_id.is_objtype_path()
                        and not path_tip.path_id.is_view_path()
                        and path_tip.path_id.src_path() is None):
                    path_tip = expression_set(ensure_stmt(path_tip,
                                                          ctx=subctx),
                                              ctx=subctx)

                if path_tip.path_id.is_type_intersection_path():
                    assert path_tip.rptr is not None
                    scope_set = path_tip.rptr.source
                else:
                    scope_set = path_tip

                scope_set = scoped_set(scope_set, ctx=subctx)

        for key_path_id in path_tip.path_id.iter_weak_namespace_prefixes():
            mapped = ctx.view_map.get(key_path_id)
            if mapped is not None:
                path_tip = new_set(path_id=mapped.path_id,
                                   stype=get_set_type(path_tip, ctx=ctx),
                                   expr=mapped.expr,
                                   rptr=mapped.rptr,
                                   ctx=ctx)
                break

        if pathctx.path_is_banned(path_tip.path_id, ctx=ctx):
            dname = stype.get_displayname(ctx.env.schema)
            raise errors.QueryError(
                f'invalid reference to {dname}: '
                f'self-referencing INSERTs are not allowed',
                hint=(f'Use DETACHED if you meant to refer to an '
                      f'uncorrelated {dname} set'),
                context=step.context,
            )

        path_sets.append(path_tip)

    path_tip.context = expr.context
    # Since we are attaching the computable scopes as siblings to
    # the subpaths they're computing, we must make sure that the
    # actual path head is not visible from inside the computable scope.
    #
    # Example:
    # type Tree {
    #   multi link children -> Tree;
    #   parent := .<children[IS Tree];
    # }
    # `SELECT Tree.parent` should generate rougly the following scope tree:
    #
    # (test::Tree).>parent[IS test::Tree]: {
    #    "BRANCH": {
    #       "(test::Tree)"
    #    },
    #    "FENCE": {
    #        "ns@(test::Tree).<children": {
    #            "(test::Tree) 0x7f30c7885d90"
    #        }
    #    },
    # }
    #
    # Note that we use an unfenced BRANCH node to isolate the path head,
    # to make sure it is still properly factorable.
    # The branch insertion is handled automatically by attach_path, and
    # we temporarily flip the branch to be a full fence for the compilation
    # of the computable.
    fences = pathctx.register_set_in_scope(
        path_tip,
        ctx=ctx,
    )

    for fence in fences:
        fence.fenced = True

    for ir_set in computables:
        scope = ctx.path_scope.find_descendant(ir_set.path_id)
        if scope is None:
            scope = ctx.path_scope.find_visible(ir_set.path_id)
        # We skip recompiling if we can't find a scope for it.
        # This whole mechanism seems a little sketchy, unfortunately.
        if scope is None:
            continue

        with ctx.new() as subctx:
            subctx.path_scope = scope
            assert ir_set.rptr is not None
            comp_ir_set = computable_ptr_set(ir_set.rptr, ctx=subctx)
            i = path_sets.index(ir_set)
            if i != len(path_sets) - 1:
                prptr = path_sets[i + 1].rptr
                assert prptr is not None
                prptr.source = comp_ir_set
            else:
                path_tip = comp_ir_set
            path_sets[i] = comp_ir_set

    for fence in fences:
        fence.fenced = False

    return path_tip
コード例 #28
0
ファイル: func.py プロジェクト: fantix/edgedb
def compile_FunctionCall(
        expr: qlast.FunctionCall, *, ctx: context.ContextLevel) -> irast.Set:

    env = ctx.env

    funcname: sn.Name
    if isinstance(expr.func, str):
        if (
            ctx.env.options.func_params is not None
            and ctx.env.options.func_params.get_by_name(
                env.schema, expr.func
            )
        ):
            raise errors.QueryError(
                f'parameter `{expr.func}` is not callable',
                context=expr.context)

        funcname = sn.UnqualName(expr.func)
    else:
        funcname = sn.QualName(*expr.func)

    funcs = env.schema.get_functions(funcname, module_aliases=ctx.modaliases)

    if funcs is None:
        raise errors.QueryError(
            f'could not resolve function name {funcname}',
            context=expr.context)

    in_polymorphic_func = (
        ctx.env.options.func_params is not None and
        ctx.env.options.func_params.has_polymorphic(env.schema)
    )

    in_abstract_constraint = (
        in_polymorphic_func and
        ctx.env.options.schema_object_context is s_constr.Constraint
    )

    args, kwargs, arg_ctxs = compile_call_args(expr, funcname, ctx=ctx)
    matched = polyres.find_callable(funcs, args=args, kwargs=kwargs, ctx=ctx)
    if not matched:
        alts = [f.get_signature_as_str(env.schema) for f in funcs]
        sig: List[str] = []
        # This is used to generate unique arg names.
        argnum = 0
        for argtype, _ in args:
            # Skip any name colliding with kwargs.
            while f'arg{argnum}' in kwargs:
                argnum += 1
            sig.append(
                f'arg{argnum}: {argtype.get_displayname(env.schema)}'
            )
            argnum += 1
        for kwname, (kwtype, _) in kwargs.items():
            sig.append(
                f'NAMED ONLY {kwname}: {kwtype.get_displayname(env.schema)}'
            )

        signature = f'{funcname}({", ".join(sig)})'

        if not funcs:
            hint = None
        elif len(alts) == 1:
            hint = f'Did you want "{alts[0]}"?'
        else:  # Multiple alternatives
            hint = (
                f'Did you want one of the following functions instead:\n' +
                ('\n'.join(alts))
            )

        raise errors.QueryError(
            f'function "{signature}" does not exist',
            hint=hint,
            context=expr.context)
    elif len(matched) > 1:
        if in_abstract_constraint:
            matched_call = matched[0]
        else:
            alts = [m.func.get_signature_as_str(env.schema) for m in matched]
            raise errors.QueryError(
                f'function {funcname} is not unique',
                hint=f'Please disambiguate between the following '
                     f'alternatives:\n' +
                     ('\n'.join(alts)),
                context=expr.context)
    else:
        matched_call = matched[0]

    func = matched_call.func

    # Record this node in the list of potential DML expressions.
    if isinstance(func, s_func.Function) and func.get_has_dml(env.schema):
        ctx.env.dml_exprs.append(expr)

        # This is some kind of mutation, so we need to check if it is
        # allowed.
        if ctx.env.options.in_ddl_context_name is not None:
            raise errors.SchemaDefinitionError(
                f'invalid mutation in {ctx.env.options.in_ddl_context_name}',
                context=expr.context,
            )
        elif ((dv := ctx.defining_view) is not None and
                dv.get_expr_type(ctx.env.schema) is s_types.ExprType.Select and
                not ctx.env.options.allow_top_level_shape_dml):
            # This is some shape in a regular query. Although
            # DML is not allowed in the computable, but it may
            # be possible to refactor it.
            raise errors.QueryError(
                f'invalid mutation in a shape computable',
                hint=(
                    f'To resolve this try to factor out the mutation '
                    f'expression into the top-level WITH block.'
                ),
                context=expr.context,
            )
コード例 #29
0
async def _make_stdlib(testmode: bool, global_ids) -> StdlibBits:
    schema = s_schema.ChainedSchema(
        s_schema.FlatSchema(),
        s_schema.FlatSchema(),
        s_schema.FlatSchema(),
    )
    schema, _ = s_mod.Module.create_in_schema(
        schema,
        name=sn.UnqualName('__derived__'),
    )

    current_block = dbops.PLTopBlock()

    std_texts = []
    for modname in s_schema.STD_SOURCES:
        std_texts.append(s_std.get_std_module_text(modname))

    if testmode:
        std_texts.append(s_std.get_std_module_text(sn.UnqualName('_testmode')))

    ddl_text = '\n'.join(std_texts)
    types: Set[uuid.UUID] = set()
    std_plans: List[sd.Command] = []

    for ddl_cmd in edgeql.parse_block(ddl_text):
        assert isinstance(ddl_cmd, qlast.DDLCommand)
        delta_command = s_ddl.delta_from_ddl(ddl_cmd,
                                             modaliases={},
                                             schema=schema,
                                             stdmode=True)

        if debug.flags.delta_plan_input:
            debug.header('Delta Plan Input')
            debug.dump(delta_command)

        # Apply and adapt delta, build native delta plan, which
        # will also update the schema.
        schema, plan = _process_delta(delta_command, schema)
        std_plans.append(delta_command)

        types.update(plan.new_types)
        plan.generate(current_block)

    _, schema_version = s_std.make_schema_version(schema)
    schema, plan = _process_delta(schema_version, schema)
    std_plans.append(schema_version)
    plan.generate(current_block)

    stdglobals = '\n'.join([
        f'''CREATE SUPERUSER ROLE {edbdef.EDGEDB_SUPERUSER} {{
            SET id := <uuid>'{global_ids[edbdef.EDGEDB_SUPERUSER]}'
        }};''',
    ])

    schema = await _execute_edgeql_ddl(schema, stdglobals)

    _, global_schema_version = s_std.make_global_schema_version(schema)
    schema, plan = _process_delta(global_schema_version, schema)
    std_plans.append(global_schema_version)
    plan.generate(current_block)

    reflection = s_refl.generate_structure(schema)
    reflschema, reflplan = _process_delta(reflection.intro_schema_delta,
                                          schema)

    assert current_block is not None
    reflplan.generate(current_block)
    subblock = current_block.add_block()

    compiler = edbcompiler.new_compiler(
        std_schema=schema.get_top_schema(),
        reflection_schema=reflschema.get_top_schema(),
        schema_class_layout=reflection.class_layout,  # type: ignore
    )

    compilerctx = edbcompiler.new_compiler_context(
        user_schema=reflschema.get_top_schema(),
        global_schema=schema.get_global_schema(),
        bootstrap_mode=True,
    )

    for std_plan in std_plans:
        compiler._compile_schema_storage_in_delta(
            ctx=compilerctx,
            delta=std_plan,
            block=subblock,
        )

    compilerctx = edbcompiler.new_compiler_context(
        user_schema=reflschema.get_top_schema(),
        global_schema=schema.get_global_schema(),
        bootstrap_mode=True,
        internal_schema_mode=True,
    )
    compiler._compile_schema_storage_in_delta(
        ctx=compilerctx,
        delta=reflection.intro_schema_delta,
        block=subblock,
    )

    sqltext = current_block.to_string()

    compilerctx = edbcompiler.new_compiler_context(
        user_schema=reflschema.get_top_schema(),
        global_schema=schema.get_global_schema(),
        schema_reflection_mode=True,
        output_format=edbcompiler.IoFormat.JSON_ELEMENTS,
    )

    # The introspection query bits are returned in chunks
    # because it's a large UNION and we currently generate SQL
    # that is much harder for Posgres to plan as opposed to a
    # straight flat UNION.
    sql_intro_local_parts = []
    sql_intro_global_parts = []
    for intropart in reflection.local_intro_parts:
        sql_intro_local_parts.append(
            compile_single_query(
                intropart,
                compiler=compiler,
                compilerctx=compilerctx,
            ), )

    for intropart in reflection.global_intro_parts:
        sql_intro_global_parts.append(
            compile_single_query(
                intropart,
                compiler=compiler,
                compilerctx=compilerctx,
            ), )

    local_intro_sql = ' UNION ALL '.join(sql_intro_local_parts)
    local_intro_sql = f'''
        WITH intro(c) AS ({local_intro_sql})
        SELECT json_agg(intro.c) FROM intro
    '''

    global_intro_sql = ' UNION ALL '.join(sql_intro_global_parts)
    global_intro_sql = f'''
        WITH intro(c) AS ({global_intro_sql})
        SELECT json_agg(intro.c) FROM intro
    '''

    return StdlibBits(
        stdschema=schema.get_top_schema(),
        reflschema=reflschema.get_top_schema(),
        global_schema=schema.get_global_schema(),
        sqltext=sqltext,
        types=types,
        classlayout=reflection.class_layout,
        local_intro_query=local_intro_sql,
        global_intro_query=global_intro_sql,
    )
コード例 #30
0
ファイル: func.py プロジェクト: stjordanis/edgedb
def finalize_args(
    bound_call: polyres.BoundCall,
    *,
    actual_typemods: Sequence[ft.TypeModifier] = (),
    guessed_typemods: Dict[Union[int, str], ft.TypeModifier],
    is_polymorphic: bool = False,
    ctx: context.ContextLevel,
) -> Tuple[List[irast.CallArg], List[ft.TypeModifier]]:

    args: List[irast.CallArg] = []
    typemods = []

    for i, barg in enumerate(bound_call.args):
        param = barg.param
        arg = barg.val
        arg_type_path_id: Optional[irast.PathId] = None
        if param is None:
            # defaults bitmask
            args.append(irast.CallArg(expr=arg))
            typemods.append(ft.TypeModifier.SingletonType)
            continue

        if actual_typemods:
            param_mod = actual_typemods[i]
        else:
            param_mod = param.get_typemod(ctx.env.schema)

        typemods.append(param_mod)

        if param_mod is not ft.TypeModifier.SetOfType:
            param_shortname = param.get_parameter_name(ctx.env.schema)

            if param_shortname in bound_call.null_args:
                pathctx.register_set_in_scope(arg, optional=True, ctx=ctx)

            # If we guessed the argument was optional but it wasn't,
            # try to go back and make it *not* optional.
            elif (param_mod is ft.TypeModifier.SingletonType
                  and barg.arg_id is not None
                  and param_mod is not guessed_typemods[barg.arg_id]):
                for child in ctx.path_scope.children:
                    if (child.path_id == arg.path_id
                            or (arg.path_scope_id is not None
                                and child.unique_id == arg.path_scope_id)):
                        child.optional = False

            # Object type arguments to functions may be overloaded, so
            # we populate a path id field so that we can also pass the
            # type as an argument on the pgsql side. If the param type
            # is "anytype", though, then it can't be overloaded on
            # that argument.
            arg_type = setgen.get_set_type(arg, ctx=ctx)
            if (isinstance(arg_type, s_objtypes.ObjectType) and barg.param
                    and not barg.param.get_type(ctx.env.schema).is_any(
                        ctx.env.schema)):
                arg_type_path_id = pathctx.extend_path_id(
                    arg.path_id,
                    ptrcls=arg_type.getptr(ctx.env.schema,
                                           sn.UnqualName('__type__')),
                    ctx=ctx,
                )
        else:
            is_array_agg = (isinstance(bound_call.func, s_func.Function)
                            and (bound_call.func.get_shortname(ctx.env.schema)
                                 == sn.QualName('std', 'array_agg')))

            if (
                    # Ideally, we should implicitly slice all array values,
                    # but in practice, the vast majority of large arrays
                    # will come from array_agg, and so we only care about
                    # that.
                    is_array_agg and ctx.expr_exposed and
                    ctx.implicit_limit and
                    isinstance(arg.expr, irast.SelectStmt) and
                    arg.expr.limit is None and not ctx.inhibit_implicit_limit):
                arg.expr.limit = dispatch.compile(
                    qlast.IntegerConstant(value=str(ctx.implicit_limit)),
                    ctx=ctx,
                )

        paramtype = barg.param_type
        param_kind = param.get_kind(ctx.env.schema)
        if param_kind is ft.ParameterKind.VariadicParam:
            # For variadic params, paramtype would be array<T>,
            # and we need T to cast the arguments.
            assert isinstance(paramtype, s_types.Array)
            paramtype = list(paramtype.get_subtypes(ctx.env.schema))[0]

        # Check if we need to cast the argument value before passing
        # it to the callable.
        compatible = s_types.is_type_compatible(
            paramtype,
            barg.valtype,
            schema=ctx.env.schema,
        )

        if not compatible:
            # The callable form was chosen via an implicit cast,
            # cast the arguments so that the backend has no
            # wiggle room to apply its own (potentially different)
            # casting.
            arg = casts.compile_cast(arg, paramtype, srcctx=None, ctx=ctx)

        args.append(
            irast.CallArg(expr=arg,
                          expr_type_path_id=arg_type_path_id,
                          is_default=barg.is_default))

    return args, typemods