コード例 #1
0
ファイル: output.py プロジェクト: xing0713/edgedb
def coll_as_json_object(expr, *, styperef, env):
    if irtyputils.is_tuple(styperef):
        return tuple_as_json_object(expr, styperef=styperef, env=env)
    elif irtyputils.is_array(styperef):
        return array_as_json_object(expr, styperef=styperef, env=env)
    else:
        raise RuntimeError(f'{styperef!r} is not a collection')
コード例 #2
0
def pg_type_from_ir_typeref(
        ir_typeref: irast.TypeRef,
        *,
        serialized: bool = False,
        persistent_tuples: bool = False) -> Tuple[str, ...]:

    if irtyputils.is_array(ir_typeref):
        if (irtyputils.is_generic(ir_typeref)
                or (irtyputils.is_abstract(ir_typeref.subtypes[0])
                    and irtyputils.is_scalar(ir_typeref.subtypes[0]))):
            return ('anyarray', )
        else:
            tp = pg_type_from_ir_typeref(ir_typeref.subtypes[0],
                                         serialized=serialized,
                                         persistent_tuples=persistent_tuples)
            if len(tp) == 1:
                return (tp[0] + '[]', )
            else:
                return (tp[0], tp[1] + '[]')

    elif irtyputils.is_anytuple(ir_typeref):
        return ('record', )

    elif irtyputils.is_tuple(ir_typeref):
        if ir_typeref.material_type:
            material = ir_typeref.material_type
        else:
            material = ir_typeref

        if persistent_tuples or material.in_schema:
            return common.get_tuple_backend_name(material.id, catenate=False)
        else:
            return ('record', )

    elif irtyputils.is_any(ir_typeref):
        return ('anyelement', )

    else:
        if ir_typeref.material_type:
            material = ir_typeref.material_type
        else:
            material = ir_typeref

        if irtyputils.is_object(material):
            if serialized:
                return ('record', )
            else:
                return ('uuid', )
        elif irtyputils.is_abstract(material):
            return ('anynonarray', )
        else:
            pg_type = base_type_name_map.get(material.id)
            if pg_type is None:
                # User-defined scalar type
                pg_type = common.get_scalar_backend_name(
                    material.id, material.name_hint.module, catenate=False)

            return pg_type
コード例 #3
0
ファイル: output.py プロジェクト: sailfish009/edgedb
def coll_as_json_object(
    expr: pgast.BaseExpr,
    *,
    styperef: irast.TypeRef,
    env: context.Environment,
) -> pgast.BaseExpr:
    if irtyputils.is_tuple(styperef):
        return tuple_as_json_object(expr, styperef=styperef, env=env)
    elif irtyputils.is_array(styperef):
        return array_as_json_object(expr, styperef=styperef, env=env)
    else:
        raise RuntimeError(f'{styperef!r} is not a collection')
コード例 #4
0
ファイル: types.py プロジェクト: schedutron/edgedb
def __infer_type_introspection(ir, env):
    if irtyputils.is_scalar(ir.typeref):
        return env.schema.get('schema::ScalarType')
    elif irtyputils.is_object(ir.typeref):
        return env.schema.get('schema::ObjectType')
    elif irtyputils.is_array(ir.typeref):
        return env.schema.get('schema::Array')
    elif irtyputils.is_tuple(ir.typeref):
        return env.schema.get('schema::Tuple')
    else:
        raise errors.QueryError('unexpected type in INTROSPECT',
                                context=ir.context)
コード例 #5
0
def __infer_type_introspection(
    ir: irast.TypeIntrospection,
    env: context.Environment,
) -> s_types.Type:
    if irtyputils.is_scalar(ir.typeref):
        return cast(s_objtypes.ObjectType,
                    env.schema.get('schema::ScalarType'))
    elif irtyputils.is_object(ir.typeref):
        return cast(s_objtypes.ObjectType,
                    env.schema.get('schema::ObjectType'))
    elif irtyputils.is_array(ir.typeref):
        return cast(s_objtypes.ObjectType, env.schema.get('schema::Array'))
    elif irtyputils.is_tuple(ir.typeref):
        return cast(s_objtypes.ObjectType, env.schema.get('schema::Tuple'))
    else:
        raise errors.QueryError('unexpected type in INTROSPECT',
                                context=ir.context)
コード例 #6
0
ファイル: schemamech.py プロジェクト: syyunn/edgedb
    def _edgeql_ref_to_pg_constr(cls, subject, tree, schema, link_bias):
        sql_tree = compiler.compile_ir_to_sql_tree(
            tree, singleton_mode=True)

        if isinstance(sql_tree, pg_ast.SelectStmt):
            # XXX: use ast pattern matcher for this
            sql_expr = sql_tree.from_clause[0].relation\
                .query.target_list[0].val
        else:
            sql_expr = sql_tree

        if isinstance(tree, irast.Statement):
            tree = tree.expr

        if isinstance(tree.expr, irast.SelectStmt):
            tree = tree.expr.result

        is_multicol = irtyputils.is_tuple(tree.typeref)

        # Determine if the sequence of references are all simple refs, not
        # expressions.  This influences the type of Postgres constraint used.
        #
        is_trivial = (
            isinstance(sql_expr, pg_ast.ColumnRef) or (
                isinstance(sql_expr, pg_ast.ImplicitRowExpr) and all(
                    isinstance(el, pg_ast.ColumnRef)
                    for el in sql_expr.args)))

        # Find all field references
        #
        flt = lambda n: isinstance(n, pg_ast.ColumnRef) and len(n.name) == 1
        refs = set(ast.find_children(sql_expr, flt))

        if isinstance(subject, s_scalars.ScalarType):
            # Domain constraint, replace <scalar_name> with VALUE

            subject_pg_name = common.edgedb_name_to_pg_name(str(subject.id))

            for ref in refs:
                if ref.name != [subject_pg_name]:
                    raise ValueError(
                        f'unexpected node reference in '
                        f'ScalarType constraint: {".".join(ref.name)}'
                    )

                # work around the immutability check
                object.__setattr__(ref, 'name', ['VALUE'])

        plain_expr = codegen.SQLSourceGenerator.to_source(sql_expr)

        if is_multicol:
            chunks = []

            for elem in sql_expr.args:
                chunks.append(codegen.SQLSourceGenerator.to_source(elem))
        else:
            chunks = [plain_expr]

        if isinstance(sql_expr, pg_ast.ColumnRef):
            refs.add(sql_expr)

        for ref in refs:
            ref.name.insert(0, 'NEW')
        new_expr = codegen.SQLSourceGenerator.to_source(sql_expr)

        for ref in refs:
            ref.name[0] = 'OLD'
        old_expr = codegen.SQLSourceGenerator.to_source(sql_expr)

        exprdata = dict(
            plain=plain_expr, plain_chunks=chunks, new=new_expr, old=old_expr)

        return dict(
            exprdata=exprdata, is_multicol=is_multicol, is_trivial=is_trivial)
コード例 #7
0
ファイル: output.py プロジェクト: sailfish009/edgedb
def array_as_json_object(
    expr: pgast.BaseExpr,
    *,
    styperef: irast.TypeRef,
    env: context.Environment,
) -> pgast.BaseExpr:
    el_type = styperef.subtypes[0]

    if irtyputils.is_tuple(el_type):
        coldeflist = []
        json_args: List[pgast.BaseExpr] = []
        is_named = any(st.element_name for st in el_type.subtypes)

        for i, st in enumerate(el_type.subtypes):
            if is_named:
                colname = st.element_name
                json_args.append(pgast.StringConstant(val=st.element_name))
            else:
                colname = str(i)

            val: pgast.BaseExpr = pgast.ColumnRef(name=[colname])
            if irtyputils.is_collection(st):
                val = coll_as_json_object(val, styperef=st, env=env)

            json_args.append(val)

            if not irtyputils.is_persistent_tuple(el_type):
                # Column definition list is only allowed for functions
                # returning "record", i.e. an anonymous tuple, which
                # would not be the case for schema-persistent tuple types.
                coldeflist.append(
                    pgast.ColumnDef(
                        name=colname,
                        typename=pgast.TypeName(
                            name=pgtypes.pg_type_from_ir_typeref(st)
                        )
                    )
                )

        if is_named:
            json_func = _get_json_func('build_object', env=env)
        else:
            json_func = _get_json_func('build_array', env=env)

        return pgast.SelectStmt(
            target_list=[
                pgast.ResTarget(
                    val=pgast.CoalesceExpr(
                        args=[
                            pgast.FuncCall(
                                name=_get_json_func('agg', env=env),
                                args=[
                                    pgast.FuncCall(
                                        name=json_func,
                                        args=json_args,
                                    )
                                ]
                            ),
                            pgast.StringConstant(val='[]'),
                        ]
                    ),
                    ser_safe=True,
                )
            ],
            from_clause=[
                pgast.RangeFunction(
                    alias=pgast.Alias(
                        aliasname=env.aliases.get('q'),
                    ),
                    is_rowsfrom=True,
                    functions=[
                        pgast.FuncCall(
                            name=('unnest',),
                            args=[expr],
                            coldeflist=coldeflist,
                        )
                    ]
                )
            ]
        )
    else:
        return pgast.FuncCall(
            name=_get_json_func('to', env=env), args=[expr],
            null_safe=True, ser_safe=True)
コード例 #8
0
def process_update_body(
    ir_stmt: irast.MutatingStmt,
    wrapper: pgast.Query,
    update_cte: pgast.CommonTableExpr,
    typeref: irast.TypeRef,
    *,
    ctx: context.CompilerContextLevel,
) -> None:
    """Generate SQL DML CTEs from an UpdateStmt IR.

    :param ir_stmt:
        IR of the statement.
    :param wrapper:
        Top-level SQL query.
    :param update_cte:
        CTE representing the SQL UPDATE to the main relation of the Object.
    :param typeref:
        The specific TypeRef of a set being updated.
    """
    update_stmt = update_cte.query
    assert isinstance(update_stmt, pgast.UpdateStmt)

    external_updates = []

    with ctx.newscope() as subctx:
        # It is necessary to process the expressions in
        # the UpdateStmt shape body in the context of the
        # UPDATE statement so that references to the current
        # values of the updated object are resolved correctly.
        subctx.parent_rel = update_stmt
        subctx.expr_exposed = False

        for shape_el, shape_op in ir_stmt.subject.shape:
            ptrref = shape_el.rptr.ptrref
            updvalue = shape_el.expr
            ptr_info = pg_types.get_ptrref_storage_info(ptrref,
                                                        resolve_type=True,
                                                        link_bias=False)

            if ptr_info.table_type == 'ObjectType' and updvalue is not None:
                with subctx.newscope() as scopectx:
                    val: pgast.BaseExpr

                    if irtyputils.is_tuple(shape_el.typeref):
                        # When target is a tuple type, make sure
                        # the expression is compiled into a subquery
                        # returning a single column that is explicitly
                        # cast into the appropriate composite type.
                        val = relgen.set_as_subquery(
                            shape_el,
                            as_value=True,
                            explicit_cast=ptr_info.column_type,
                            ctx=scopectx,
                        )
                    else:
                        if (isinstance(updvalue, irast.MutatingStmt)
                                and updvalue in ctx.dml_stmts):
                            with scopectx.substmt() as relctx:
                                dml_cte = ctx.dml_stmts[updvalue]
                                wrap_dml_cte(updvalue, dml_cte, ctx=relctx)
                                pathctx.get_path_identity_output(
                                    relctx.rel,
                                    updvalue.subject.path_id,
                                    env=relctx.env,
                                )
                                val = relctx.rel
                        else:
                            val = dispatch.compile(updvalue, ctx=scopectx)

                        val = pgast.TypeCast(arg=val,
                                             type_name=pgast.TypeName(
                                                 name=ptr_info.column_type))

                    if shape_op is qlast.ShapeOp.SUBTRACT:
                        val = pgast.FuncCall(
                            name=('nullif', ),
                            args=[
                                pgast.ColumnRef(name=(ptr_info.column_name, )),
                                val,
                            ],
                        )

                    updtarget = pgast.UpdateTarget(
                        name=ptr_info.column_name,
                        val=val,
                    )

                    update_stmt.targets.append(updtarget)

            props_only = is_props_only_update(shape_el, ctx=subctx)

            ptr_info = pg_types.get_ptrref_storage_info(ptrref,
                                                        resolve_type=False,
                                                        link_bias=True)

            if ptr_info and ptr_info.table_type == 'link':
                external_updates.append((shape_el, shape_op, props_only))

    if not update_stmt.targets:
        # No updates directly to the set target table,
        # so convert the UPDATE statement into a SELECT.
        from_clause: List[pgast.BaseRangeVar] = [update_stmt.relation]
        from_clause.extend(update_stmt.from_clause)
        update_cte.query = pgast.SelectStmt(
            ctes=update_stmt.ctes,
            target_list=update_stmt.returning_list,
            from_clause=from_clause,
            where_clause=update_stmt.where_clause,
            path_namespace=update_stmt.path_namespace,
            path_outputs=update_stmt.path_outputs,
            path_scope=update_stmt.path_scope,
            path_rvar_map=update_stmt.path_rvar_map.copy(),
            view_path_id_map=update_stmt.view_path_id_map.copy(),
            ptr_join_map=update_stmt.ptr_join_map.copy(),
        )

    toplevel = ctx.toplevel_stmt
    toplevel.ctes.append(update_cte)

    # Process necessary updates to the link tables.
    for expr, shape_op, _ in external_updates:
        process_link_update(
            ir_stmt=ir_stmt,
            ir_set=expr,
            props_only=False,
            wrapper=wrapper,
            dml_cte=update_cte,
            iterator_cte=None,
            is_insert=False,
            shape_op=shape_op,
            source_typeref=typeref,
            ctx=ctx,
        )
コード例 #9
0
def process_insert_body(ir_stmt: irast.MutatingStmt, wrapper: pgast.SelectStmt,
                        insert_cte: pgast.CommonTableExpr,
                        insert_rvar: pgast.PathRangeVar, *,
                        ctx: context.CompilerContextLevel) -> None:
    """Generate SQL DML CTEs from an InsertStmt IR.

    :param ir_stmt:
        IR of the statement.
    :param wrapper:
        Top-level SQL query.
    :param insert_cte:
        CTE representing the SQL INSERT to the main relation of the Object.
    """
    cols = [pgast.ColumnRef(name=['__type__'])]
    select = pgast.SelectStmt(target_list=[])
    values = select.target_list

    # The main INSERT query of this statement will always be
    # present to insert at least the `id` and `__type__`
    # properties.
    insert_stmt = insert_cte.query
    assert isinstance(insert_stmt, pgast.InsertStmt)

    insert_stmt.cols = cols
    insert_stmt.select_stmt = select

    if ir_stmt.parent_stmt is not None:
        iterator_set = ir_stmt.parent_stmt.iterator_stmt
    else:
        iterator_set = None

    iterator_cte: Optional[pgast.CommonTableExpr]
    iterator_id: Optional[pgast.BaseExpr]

    if iterator_set is not None:
        with ctx.substmt() as ictx:
            ictx.path_scope = ictx.path_scope.new_child()
            ictx.path_scope[iterator_set.path_id] = ictx.rel
            clauses.compile_iterator_expr(ictx.rel, iterator_set, ctx=ictx)
            ictx.rel.path_id = iterator_set.path_id
            pathctx.put_path_bond(ictx.rel, iterator_set.path_id)
            iterator_cte = pgast.CommonTableExpr(
                query=ictx.rel, name=ctx.env.aliases.get('iter'))
            ictx.toplevel_stmt.ctes.append(iterator_cte)
        iterator_rvar = relctx.rvar_for_rel(iterator_cte, ctx=ctx)
        relctx.include_rvar(select,
                            iterator_rvar,
                            path_id=ictx.rel.path_id,
                            ctx=ctx)
        iterator_id = pathctx.get_path_identity_var(select,
                                                    iterator_set.path_id,
                                                    env=ctx.env)
    else:
        iterator_cte = None
        iterator_id = None

    typeref = ir_stmt.subject.typeref
    if typeref.material_type is not None:
        typeref = typeref.material_type

    values.append(
        pgast.ResTarget(val=pgast.TypeCast(
            arg=pgast.StringConstant(val=str(typeref.id)),
            type_name=pgast.TypeName(name=('uuid', ))), ))

    external_inserts = []

    with ctx.newrel() as subctx:
        subctx.rel = select
        subctx.rel_hierarchy[select] = insert_stmt

        subctx.expr_exposed = False

        if iterator_cte is not None:
            subctx.path_scope = ctx.path_scope.new_child()
            subctx.path_scope[iterator_cte.query.path_id] = select

        # Process the Insert IR and separate links that go
        # into the main table from links that are inserted into
        # a separate link table.
        for shape_el, shape_op in ir_stmt.subject.shape:
            assert shape_op is qlast.ShapeOp.ASSIGN

            rptr = shape_el.rptr
            ptrref = rptr.ptrref
            if ptrref.material_ptr is not None:
                ptrref = ptrref.material_ptr

            if (ptrref.source_ptr is not None
                    and rptr.source.path_id != ir_stmt.subject.path_id):
                continue

            ptr_info = pg_types.get_ptrref_storage_info(ptrref,
                                                        resolve_type=True,
                                                        link_bias=False)

            props_only = False

            # First, process all local link inserts.
            if ptr_info.table_type == 'ObjectType':
                props_only = True
                field = pgast.ColumnRef(name=[ptr_info.column_name])
                cols.append(field)

                rel = compile_insert_shape_element(insert_stmt,
                                                   wrapper,
                                                   ir_stmt,
                                                   shape_el,
                                                   iterator_id,
                                                   ctx=ctx)

                insvalue = pathctx.get_path_value_var(rel,
                                                      shape_el.path_id,
                                                      env=ctx.env)

                if irtyputils.is_tuple(shape_el.typeref):
                    # Tuples require an explicit cast.
                    insvalue = pgast.TypeCast(
                        arg=output.output_as_value(insvalue, env=ctx.env),
                        type_name=pgast.TypeName(name=ptr_info.column_type, ),
                    )

                values.append(pgast.ResTarget(val=insvalue))

            ptr_info = pg_types.get_ptrref_storage_info(ptrref,
                                                        resolve_type=False,
                                                        link_bias=True)

            if ptr_info and ptr_info.table_type == 'link':
                external_inserts.append((shape_el, props_only))

        if iterator_set is not None:
            cols.append(pgast.ColumnRef(name=['__edb_token']))

            values.append(pgast.ResTarget(val=iterator_id))

            pathctx.put_path_identity_var(insert_stmt,
                                          iterator_set.path_id,
                                          cols[-1],
                                          force=True,
                                          env=subctx.env)

            pathctx.put_path_bond(insert_stmt, iterator_set.path_id)
            pathctx.put_path_rvar(
                wrapper,
                path_id=iterator_set.path_id,
                rvar=insert_rvar,
                aspect='identity',
                env=subctx.env,
            )

    if isinstance(ir_stmt, irast.InsertStmt) and ir_stmt.on_conflict:
        assert not insert_stmt.on_conflict

        constraint_name = f'"{ir_stmt.on_conflict.id};schemaconstr"'

        insert_stmt.on_conflict = pgast.OnConflictClause(
            action='nothing',
            infer=pgast.InferClause(conname=constraint_name),
        )

    toplevel = ctx.toplevel_stmt
    toplevel.ctes.append(insert_cte)

    # Process necessary updates to the link tables.
    for shape_el, props_only in external_inserts:
        process_link_update(
            ir_stmt=ir_stmt,
            ir_set=shape_el,
            props_only=props_only,
            wrapper=wrapper,
            dml_cte=insert_cte,
            source_typeref=typeref,
            iterator_cte=iterator_cte,
            is_insert=True,
            ctx=ctx,
        )
コード例 #10
0
ファイル: output.py プロジェクト: xing0713/edgedb
def array_as_json_object(expr, *, styperef, env):
    el_type = styperef.subtypes[0]

    if irtyputils.is_tuple(el_type):
        coldeflist = []
        json_args = []
        is_named = any(st.element_name for st in el_type.subtypes)

        for i, st in enumerate(el_type.subtypes):
            if is_named:
                colname = env.aliases.get(st.element_name)
                json_args.append(pgast.StringConstant(val=st.element_name))
            else:
                colname = env.aliases.get(str(i))

            val = pgast.ColumnRef(name=[colname])
            if irtyputils.is_collection(st):
                val = coll_as_json_object(val, styperef=st, env=env)

            json_args.append(val)

            coldeflist.append(
                pgast.ColumnDef(name=colname,
                                typename=pgast.TypeName(
                                    name=pgtypes.pg_type_from_ir_typeref(st))))

        if is_named:
            json_func = _get_json_func('build_object', env=env)
        else:
            json_func = _get_json_func('build_array', env=env)

        return pgast.SelectStmt(target_list=[
            pgast.ResTarget(
                val=pgast.CoalesceExpr(args=[
                    pgast.FuncCall(name=_get_json_func('agg', env=env),
                                   args=[
                                       pgast.FuncCall(
                                           name=json_func,
                                           args=json_args,
                                       )
                                   ]),
                    pgast.StringConstant(val='[]'),
                ]),
                ser_safe=True,
            )
        ],
                                from_clause=[
                                    pgast.RangeFunction(
                                        alias=pgast.Alias(
                                            aliasname=env.aliases.get('q'), ),
                                        is_rowsfrom=True,
                                        functions=[
                                            pgast.FuncCall(
                                                name=('unnest', ),
                                                args=[expr],
                                                coldeflist=coldeflist,
                                            )
                                        ])
                                ])
    else:
        return pgast.FuncCall(name=_get_json_func('to', env=env),
                              args=[expr],
                              null_safe=True,
                              ser_safe=True)
コード例 #11
0
ファイル: output.py プロジェクト: stjordanis/edgedb
def array_as_json_object(
    expr: pgast.BaseExpr,
    *,
    styperef: irast.TypeRef,
    env: context.Environment,
) -> pgast.BaseExpr:
    el_type = styperef.subtypes[0]

    is_tuple = irtyputils.is_tuple(el_type)
    # Tuples and bytes might need underlying casts to be done
    if is_tuple or irtyputils.is_bytes(el_type):
        coldeflist = []

        out_alias = env.aliases.get('q')

        val: pgast.BaseExpr
        if is_tuple:
            json_args: List[pgast.BaseExpr] = []
            is_named = any(st.element_name for st in el_type.subtypes)
            for i, st in enumerate(el_type.subtypes):
                if is_named:
                    colname = st.element_name
                    assert colname
                    json_args.append(pgast.StringConstant(val=colname))
                else:
                    colname = str(i)

                val = pgast.ColumnRef(name=[colname])
                val = serialize_expr_to_json(val,
                                             styperef=st,
                                             nested=True,
                                             env=env)

                json_args.append(val)

                if not irtyputils.is_persistent_tuple(el_type):
                    # Column definition list is only allowed for functions
                    # returning "record", i.e. an anonymous tuple, which
                    # would not be the case for schema-persistent tuple types.
                    coldeflist.append(
                        pgast.ColumnDef(
                            name=colname,
                            typename=pgast.TypeName(
                                name=pgtypes.pg_type_from_ir_typeref(st))))

            json_func = 'build_object' if is_named else 'build_array'
            agg_arg = _build_json(json_func, json_args, env=env)

            needs_unnest = bool(el_type.subtypes)
        else:
            assert not el_type.subtypes
            val = pgast.ColumnRef(name=[out_alias])
            agg_arg = serialize_expr_to_json(val,
                                             styperef=el_type,
                                             nested=True,
                                             env=env)
            needs_unnest = True

        return pgast.SelectStmt(
            target_list=[
                pgast.ResTarget(
                    val=pgast.CoalesceExpr(args=[
                        pgast.FuncCall(
                            name=_get_json_func('agg', env=env),
                            args=[agg_arg],
                        ),
                        pgast.StringConstant(val='[]'),
                    ]),
                    ser_safe=True,
                )
            ],
            from_clause=[
                pgast.RangeFunction(alias=pgast.Alias(aliasname=out_alias),
                                    is_rowsfrom=True,
                                    functions=[
                                        pgast.FuncCall(
                                            name=('unnest', ),
                                            args=[expr],
                                            coldeflist=coldeflist,
                                        )
                                    ])
            ] if needs_unnest else [],
        )
    else:
        return pgast.FuncCall(name=_get_json_func('to', env=env),
                              args=[expr],
                              null_safe=True,
                              ser_safe=True)