Exemple #1
0
def compile_FunctionCall(
        expr: irast.FunctionCall, *,
        ctx: context.CompilerContextLevel) -> pgast.BaseExpr:

    if expr.typemod is ql_ft.TypeModifier.SET_OF:
        raise RuntimeError(
            'set returning functions are not supported in simple expressions')

    args = [dispatch.compile(a.expr, ctx=ctx) for a in expr.args]

    if expr.has_empty_variadic and expr.variadic_param_type is not None:
        var = pgast.TypeCast(
            arg=pgast.ArrayExpr(elements=[]),
            type_name=pgast.TypeName(
                name=pg_types.pg_type_from_ir_typeref(expr.variadic_param_type)
            )
        )

        args.append(pgast.VariadicArgument(expr=var))

    if expr.func_sql_function:
        # The name might contain a "." if it's one of our
        # metaschema helpers.
        name = tuple(expr.func_sql_function.split('.', 1))
    else:
        name = common.get_function_backend_name(expr.func_shortname,
                                                expr.func_module_id)

    result: pgast.BaseExpr = pgast.FuncCall(name=name, args=args)

    if expr.force_return_cast:
        # The underlying function has a return value type
        # different from that of the EdgeQL function declaration,
        # so we need to make an explicit cast here.
        result = pgast.TypeCast(
            arg=result,
            type_name=pgast.TypeName(
                name=pg_types.pg_type_from_ir_typeref(expr.typeref)
            )
        )

    return result
Exemple #2
0
def compile_TypeCheckOp(expr: irast.TypeCheckOp, *,
                        ctx: context.CompilerContextLevel) -> pgast.Base:

    with ctx.new() as newctx:
        newctx.expr_exposed = False
        left = dispatch.compile(expr.left, ctx=newctx)
        negated = expr.op == 'IS NOT'

        if expr.result is not None:
            result = pgast.BooleanConstant(
                val='false' if not expr.result or negated else 'true')
        else:
            right = dispatch.compile(expr.right, ctx=newctx)

            result = pgast.FuncCall(name=('edgedb', 'issubclass'),
                                    args=[left, right])

            if negated:
                result = astutils.new_unop('NOT', result)

    return result
Exemple #3
0
def tuple_getattr(tuple_val, tuple_typeref, attr):
    ttypes = []
    pgtypes = []
    for i, st in enumerate(tuple_typeref.subtypes):
        pgtype = pg_types.pg_type_from_ir_typeref(st)
        pgtypes.append(pgtype)

        if st.element_name:
            ttypes.append(st.element_name)
        else:
            ttypes.append(str(i))

    index = ttypes.index(attr)

    if tuple_typeref.in_schema:
        set_expr = pgast.Indirection(
            arg=tuple_val,
            indirection=[
                pgast.ColumnRef(name=[attr], ),
            ],
        )
    else:
        set_expr = pgast.SelectStmt(
            target_list=[
                pgast.ResTarget(val=pgast.ColumnRef(name=[str(index)], ), ),
            ],
            from_clause=[
                pgast.RangeFunction(functions=[
                    pgast.FuncCall(
                        name=('unnest', ),
                        args=[pgast.ArrayExpr(elements=[tuple_val], )],
                        coldeflist=[
                            pgast.ColumnDef(name=str(i),
                                            typename=pgast.TypeName(name=t))
                            for i, t in enumerate(pgtypes)
                        ])
                ])
            ])

    return set_expr
Exemple #4
0
def compile_SliceIndirection(expr: irast.Base, *,
                             ctx: context.CompilerContextLevel) -> pgast.Base:
    # Handle Expr[Index], where Expr may be std::str, array<T> or
    # std::json. For strings we translate this into substr calls.
    # Arrays use the native slice syntax. JSON is handled by a
    # combination of unnesting aggregation and array slicing.
    with ctx.new() as subctx:
        subctx.expr_exposed = False
        subj = dispatch.compile(expr.expr, ctx=subctx)
        if expr.start is None:
            start = pgast.NullConstant()
        else:
            start = dispatch.compile(expr.start, ctx=subctx)
        if expr.stop is None:
            stop = pgast.NullConstant()
        else:
            stop = dispatch.compile(expr.stop, ctx=subctx)

    result = pgast.FuncCall(name=('edgedb', '_slice'),
                            args=[subj, start, stop])

    return result
Exemple #5
0
def compile_IndexIndirection(expr: irast.Base, *,
                             ctx: context.CompilerContextLevel) -> pgast.Base:
    # Handle Expr[Index], where Expr may be std::str, array<T> or
    # std::json. For strings we translate this into substr calls.
    # Arrays use the native index access. JSON is handled by using the
    # `->` accessor. Additionally, in all of the above cases a
    # boundary-check is performed on the index and an exception is
    # potentially raised.

    # line, column and filename are captured here to be used with the
    # error message
    srcctx = pgast.StringConstant(val=irutils.get_source_context_as_json(
        expr.index, errors.InvalidValueError))

    with ctx.new() as subctx:
        subctx.expr_exposed = False
        subj = dispatch.compile(expr.expr, ctx=subctx)
        index = dispatch.compile(expr.index, ctx=subctx)

    result = pgast.FuncCall(name=('edgedb', '_index'),
                            args=[subj, index, srcctx])

    return result
Exemple #6
0
def compile_FunctionCall(
        expr: irast.FunctionCall, *,
        ctx: context.CompilerContextLevel) -> pgast.BaseExpr:

    if expr.typemod is ql_ft.TypeModifier.SetOfType:
        raise RuntimeError(
            'set returning functions are not supported in simple expressions')

    args = [dispatch.compile(a.expr, ctx=ctx) for a in expr.args]

    if expr.has_empty_variadic and expr.variadic_param_type is not None:
        var = pgast.TypeCast(
            arg=pgast.ArrayExpr(elements=[]),
            type_name=pgast.TypeName(
                name=pg_types.pg_type_from_ir_typeref(expr.variadic_param_type)
            )
        )

        args.append(pgast.VariadicArgument(expr=var))

    name = relgen.get_func_call_backend_name(expr, ctx=ctx)

    result: pgast.BaseExpr = pgast.FuncCall(name=name, args=args)

    if expr.force_return_cast:
        # The underlying function has a return value type
        # different from that of the EdgeQL function declaration,
        # so we need to make an explicit cast here.
        result = pgast.TypeCast(
            arg=result,
            type_name=pgast.TypeName(
                name=pg_types.pg_type_from_ir_typeref(expr.typeref)
            )
        )

    return result
Exemple #7
0
def compile_ConfigSet(
    op: irast.ConfigSet,
    *,
    ctx: context.CompilerContextLevel,
) -> pgast.BaseExpr:

    val: pgast.BaseExpr

    with ctx.new() as subctx:
        if op.backend_setting:
            output_format = context.OutputFormat.NATIVE
        else:
            output_format = context.OutputFormat.JSONB

        with context.output_format(ctx, output_format):
            if isinstance(op.expr, irast.EmptySet):
                # Special handling for empty sets, because we want a
                # singleton representation of the value and not an empty rel
                # in this context.
                if op.cardinality is qltypes.SchemaCardinality.One:
                    val = pgast.NullConstant()
                elif subctx.env.output_format is context.OutputFormat.JSONB:
                    val = pgast.TypeCast(
                        arg=pgast.StringConstant(val='[]'),
                        type_name=pgast.TypeName(
                            name=('jsonb',),
                        ),
                    )
                else:
                    val = pgast.TypeCast(
                        arg=pgast.ArrayExpr(),
                        type_name=pgast.TypeName(
                            name=('text[]',),
                        ),
                    )
            else:
                val = dispatch.compile(op.expr, ctx=subctx)
                assert isinstance(val, pgast.SelectStmt), "expected SelectStmt"

                pathctx.get_path_serialized_output(
                    val, op.expr.path_id, env=ctx.env)

                if op.cardinality is qltypes.SchemaCardinality.Many:
                    val = output.aggregate_json_output(
                        val, op.expr, env=ctx.env)

    result: pgast.BaseExpr

    if op.scope is qltypes.ConfigScope.SYSTEM and op.backend_setting:
        assert isinstance(val, pgast.SelectStmt) and len(val.target_list) == 1
        valval = val.target_list[0].val
        if isinstance(valval, pgast.TypeCast):
            valval = valval.arg
        if not isinstance(valval, pgast.BaseConstant):
            raise AssertionError('value is not a constant in ConfigSet')
        result = pgast.AlterSystem(
            name=op.backend_setting,
            value=valval,
        )

    elif op.scope is qltypes.ConfigScope.DATABASE and op.backend_setting:
        fcall = pgast.FuncCall(
            name=('edgedb', '_alter_current_database_set'),
            args=[pgast.StringConstant(val=op.backend_setting), val],
        )

        result = output.wrap_script_stmt(
            pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]),
            suppress_all_output=True,
            env=ctx.env,
        )

    elif op.scope is qltypes.ConfigScope.SESSION and op.backend_setting:
        fcall = pgast.FuncCall(
            name=('pg_catalog', 'set_config'),
            args=[
                pgast.StringConstant(val=op.backend_setting),
                pgast.TypeCast(
                    arg=val,
                    type_name=pgast.TypeName(name=('text',)),
                ),
                pgast.BooleanConstant(val='false'),
            ],
        )

        result = output.wrap_script_stmt(
            pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]),
            suppress_all_output=True,
            env=ctx.env,
        )

    elif op.scope is qltypes.ConfigScope.SYSTEM:
        result_row = pgast.RowExpr(
            args=[
                pgast.StringConstant(val='SET'),
                pgast.StringConstant(val=str(op.scope)),
                pgast.StringConstant(val=op.name),
                val,
            ]
        )

        result = pgast.FuncCall(
            name=('jsonb_build_array',),
            args=result_row.args,
            null_safe=True,
            ser_safe=True,
        )

        result = pgast.SelectStmt(
            target_list=[
                pgast.ResTarget(
                    val=result,
                ),
            ],
        )
    elif op.scope is qltypes.ConfigScope.SESSION:
        result = pgast.InsertStmt(
            relation=pgast.RelRangeVar(
                relation=pgast.Relation(
                    name='_edgecon_state',
                ),
            ),
            select_stmt=pgast.SelectStmt(
                values=[
                    pgast.ImplicitRowExpr(
                        args=[
                            pgast.StringConstant(
                                val=op.name,
                            ),
                            val,
                            pgast.StringConstant(
                                val='C',
                            ),
                        ]
                    )
                ]
            ),
            cols=[
                pgast.ColumnRef(name=['name']),
                pgast.ColumnRef(name=['value']),
                pgast.ColumnRef(name=['type']),
            ],
            on_conflict=pgast.OnConflictClause(
                action='update',
                infer=pgast.InferClause(
                    index_elems=[
                        pgast.ColumnRef(name=['name']),
                        pgast.ColumnRef(name=['type']),
                    ],
                ),
                target_list=[
                    pgast.MultiAssignRef(
                        columns=[pgast.ColumnRef(name=['value'])],
                        source=pgast.RowExpr(
                            args=[
                                val,
                            ],
                        ),
                    ),
                ],
            ),
        )
    elif op.scope is qltypes.ConfigScope.DATABASE:
        result = pgast.InsertStmt(
            relation=pgast.RelRangeVar(
                relation=pgast.Relation(
                    name='_db_config',
                    schemaname='edgedb',
                ),
            ),
            select_stmt=pgast.SelectStmt(
                values=[
                    pgast.ImplicitRowExpr(
                        args=[
                            pgast.StringConstant(
                                val=op.name,
                            ),
                            val,
                        ]
                    )
                ]
            ),
            cols=[
                pgast.ColumnRef(name=['name']),
                pgast.ColumnRef(name=['value']),
            ],
            on_conflict=pgast.OnConflictClause(
                action='update',
                infer=pgast.InferClause(
                    index_elems=[
                        pgast.ColumnRef(name=['name']),
                    ],
                ),
                target_list=[
                    pgast.MultiAssignRef(
                        columns=[pgast.ColumnRef(name=['value'])],
                        source=pgast.RowExpr(
                            args=[
                                val,
                            ],
                        ),
                    ),
                ],
            ),
        )
    else:
        raise AssertionError(f'unexpected configuration scope: {op.scope}')

    return result
Exemple #8
0
def compile_ConfigReset(
    op: irast.ConfigReset,
    *,
    ctx: context.CompilerContextLevel,
) -> pgast.BaseExpr:

    stmt: pgast.BaseExpr

    if op.scope is qltypes.ConfigScope.SYSTEM and op.backend_setting:
        stmt = pgast.AlterSystem(
            name=op.backend_setting,
            value=None,
        )

    elif op.scope is qltypes.ConfigScope.DATABASE and op.backend_setting:
        fcall = pgast.FuncCall(
            name=('edgedb', '_alter_current_database_set'),
            args=[
                pgast.StringConstant(val=op.backend_setting),
                pgast.NullConstant(),
            ],
        )

        stmt = output.wrap_script_stmt(
            pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]),
            suppress_all_output=True,
            env=ctx.env,
        )

    elif op.scope is qltypes.ConfigScope.SESSION and op.backend_setting:
        fcall = pgast.FuncCall(
            name=('pg_catalog', 'set_config'),
            args=[
                pgast.StringConstant(val=op.backend_setting),
                pgast.NullConstant(),
                pgast.BooleanConstant(val='false'),
            ],
        )

        stmt = output.wrap_script_stmt(
            pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]),
            suppress_all_output=True,
            env=ctx.env,
        )

    elif op.scope is qltypes.ConfigScope.SYSTEM:

        if op.selector is None:
            # Scalar reset
            result_row = pgast.RowExpr(
                args=[
                    pgast.StringConstant(val='RESET'),
                    pgast.StringConstant(val=str(op.scope)),
                    pgast.StringConstant(val=op.name),
                    pgast.NullConstant(),
                ]
            )

            rvar = None
        else:
            with context.output_format(ctx, context.OutputFormat.JSONB):
                selector = dispatch.compile(op.selector, ctx=ctx)

            assert isinstance(selector, pgast.SelectStmt), \
                "expected ast.SelectStmt"
            target = selector.target_list[0]
            if not target.name:
                target = selector.target_list[0] = pgast.ResTarget(
                    name=ctx.env.aliases.get('res'),
                    val=target.val,
                )

            rvar = relctx.rvar_for_rel(selector, ctx=ctx)

            result_row = pgast.RowExpr(
                args=[
                    pgast.StringConstant(val='REM'),
                    pgast.StringConstant(val=str(op.scope)),
                    pgast.StringConstant(val=op.name),
                    astutils.get_column(rvar, target.name),
                ]
            )

        result = pgast.FuncCall(
            name=('jsonb_build_array',),
            args=result_row.args,
            null_safe=True,
            ser_safe=True,
        )

        stmt = pgast.SelectStmt(
            target_list=[
                pgast.ResTarget(
                    val=result,
                ),
            ],
        )

        if rvar is not None:
            stmt.from_clause = [rvar]

    elif op.scope is qltypes.ConfigScope.DATABASE:
        stmt = pgast.DeleteStmt(
            relation=pgast.RelRangeVar(
                relation=pgast.Relation(
                    name='_db_config',
                    schemaname='edgedb',
                ),
            ),

            where_clause=astutils.new_binop(
                lexpr=pgast.ColumnRef(name=['name']),
                rexpr=pgast.StringConstant(val=op.name),
                op='=',
            ),
        )

    elif op.scope is qltypes.ConfigScope.SESSION:
        stmt = pgast.DeleteStmt(
            relation=pgast.RelRangeVar(
                relation=pgast.Relation(
                    name='_edgecon_state',
                ),
            ),

            where_clause=astutils.new_binop(
                lexpr=astutils.new_binop(
                    lexpr=pgast.ColumnRef(name=['name']),
                    rexpr=pgast.StringConstant(val=op.name),
                    op='=',
                ),
                rexpr=astutils.new_binop(
                    lexpr=pgast.ColumnRef(name=['type']),
                    rexpr=pgast.StringConstant(val='C'),
                    op='=',
                ),
                op='AND',
            )
        )

    else:
        raise AssertionError(f'unexpected configuration scope: {op.scope}')

    return stmt
Exemple #9
0
def compile_ConfigSet(
        op: irast.ConfigSet, *,
        ctx: context.CompilerContextLevel) -> pgast.Query:

    with ctx.new() as subctx:
        val = dispatch.compile(op.expr, ctx=subctx)
        pathctx.get_path_serialized_output(
            val, op.expr.path_id, env=ctx.env)
        if op.cardinality is qltypes.Cardinality.MANY:
            val = output.aggregate_json_output(val, op.expr, env=ctx.env)

    result_row = pgast.RowExpr(
        args=[
            pgast.StringConstant(val='SET'),
            pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'),
            pgast.StringConstant(val=op.name),
            val,
        ]
    )

    result = pgast.FuncCall(
        name=('jsonb_build_array',),
        args=result_row.args,
        null_safe=True,
        ser_safe=True,
    )

    if not op.system:
        stmt = pgast.InsertStmt(
            relation=pgast.RangeVar(
                relation=pgast.Relation(
                    name='_edgecon_state',
                ),
            ),
            select_stmt=pgast.SelectStmt(
                values=[
                    pgast.ImplicitRowExpr(
                        args=[
                            pgast.StringConstant(
                                val=op.name,
                            ),
                            val,
                            pgast.StringConstant(
                                val='C',
                            ),
                        ]
                    )
                ]
            ),
            cols=[
                pgast.ColumnRef(name=['name']),
                pgast.ColumnRef(name=['value']),
                pgast.ColumnRef(name=['type']),
            ],
            on_conflict=pgast.OnConflictClause(
                action='update',
                infer=pgast.InferClause(
                    index_elems=[
                        pgast.ColumnRef(name=['name']),
                        pgast.ColumnRef(name=['type']),
                    ],
                ),
                target_list=[
                    pgast.MultiAssignRef(
                        columns=[pgast.ColumnRef(name=['value'])],
                        source=pgast.RowExpr(
                            args=[
                                val,
                            ],
                        ),
                    ),
                ],
            ),
        )
    else:
        stmt = pgast.SelectStmt(
            target_list=[
                pgast.ResTarget(
                    val=result,
                ),
            ],
        )

    return stmt
Exemple #10
0
def named_tuple_as_json_object(
    expr: pgast.BaseExpr,
    *,
    styperef: irast.TypeRef,
    env: context.Environment,
) -> pgast.BaseExpr:
    keyvals: List[pgast.BaseExpr] = []

    if irtyputils.is_persistent_tuple(styperef):
        for el_type in styperef.subtypes:
            assert el_type.element_name
            keyvals.append(pgast.StringConstant(val=el_type.element_name))
            val: pgast.BaseExpr = pgast.Indirection(
                arg=expr,
                indirection=[pgast.ColumnRef(name=[el_type.element_name])])
            val = serialize_expr_to_json(val,
                                         styperef=el_type,
                                         nested=True,
                                         env=env)
            keyvals.append(val)

        obj = _build_json(
            'build_object',
            args=keyvals,
            null_safe=True,
            ser_safe=True,
            nullable=expr.nullable,
            env=env,
        )

    else:
        coldeflist = []

        for el_type in styperef.subtypes:
            assert el_type.element_name
            keyvals.append(pgast.StringConstant(val=el_type.element_name))

            coldeflist.append(
                pgast.ColumnDef(
                    name=el_type.element_name,
                    typename=pgast.TypeName(
                        name=pgtypes.pg_type_from_ir_typeref(el_type), ),
                ))

            val = pgast.ColumnRef(name=[el_type.element_name])

            val = serialize_expr_to_json(val,
                                         styperef=el_type,
                                         nested=True,
                                         env=env)

            keyvals.append(val)

        obj = _build_json(
            'build_object',
            args=keyvals,
            null_safe=True,
            ser_safe=True,
            nullable=expr.nullable,
            env=env,
        )

        obj = pgast.SelectStmt(
            target_list=[
                pgast.ResTarget(val=obj, ),
            ],
            from_clause=[
                pgast.RangeFunction(functions=[
                    pgast.FuncCall(
                        name=('unnest', ),
                        args=[pgast.ArrayExpr(elements=[expr], )],
                        coldeflist=coldeflist,
                    )
                ])
            ] if styperef.subtypes else [])

    if expr.nullable:
        obj = pgast.SelectStmt(target_list=[pgast.ResTarget(val=obj)],
                               where_clause=pgast.NullTest(arg=expr,
                                                           negated=True))
    return obj
Exemple #11
0
def named_tuple_as_json_object(
    expr: pgast.BaseExpr,
    *,
    styperef: irast.TypeRef,
    env: context.Environment,
) -> pgast.BaseExpr:
    keyvals: List[pgast.BaseExpr] = []

    if irtyputils.is_persistent_tuple(styperef):
        for el_type in styperef.subtypes:
            keyvals.append(pgast.StringConstant(val=el_type.element_name))
            val: pgast.BaseExpr = pgast.Indirection(
                arg=expr,
                indirection=[
                    pgast.ColumnRef(
                        name=[el_type.element_name]
                    )
                ]
            )
            if irtyputils.is_collection(el_type):
                val = coll_as_json_object(val, styperef=el_type, env=env)
            keyvals.append(val)

        return pgast.FuncCall(
            name=_get_json_func('build_object', env=env),
            args=keyvals, null_safe=True, ser_safe=True,
            nullable=expr.nullable)

    else:
        coldeflist = []

        for el_type in styperef.subtypes:
            keyvals.append(pgast.StringConstant(val=el_type.element_name))

            coldeflist.append(pgast.ColumnDef(
                name=el_type.element_name,
                typename=pgast.TypeName(
                    name=pgtypes.pg_type_from_ir_typeref(el_type),
                ),
            ))

            val = pgast.ColumnRef(name=[el_type.element_name])

            if irtyputils.is_collection(el_type):
                val = coll_as_json_object(val, styperef=el_type, env=env)

            keyvals.append(val)

        res = pgast.FuncCall(
            name=_get_json_func('build_object', env=env),
            args=keyvals, null_safe=True, ser_safe=True,
            nullable=expr.nullable)

        return pgast.SelectStmt(
            target_list=[
                pgast.ResTarget(
                    val=res,
                ),
            ],
            from_clause=[
                pgast.RangeFunction(
                    functions=[
                        pgast.FuncCall(
                            name=('unnest',),
                            args=[
                                pgast.ArrayExpr(
                                    elements=[expr],
                                )
                            ],
                            coldeflist=coldeflist,
                        )
                    ]
                )
            ]
        )
Exemple #12
0
def process_update_body(
    ir_stmt: irast.MutatingStmt,
    wrapper: pgast.Query,
    update_cte: pgast.CommonTableExpr,
    typeref: irast.TypeRef,
    *,
    ctx: context.CompilerContextLevel,
) -> None:
    """Generate SQL DML CTEs from an UpdateStmt IR.

    :param ir_stmt:
        IR of the statement.
    :param wrapper:
        Top-level SQL query.
    :param update_cte:
        CTE representing the SQL UPDATE to the main relation of the Object.
    :param typeref:
        The specific TypeRef of a set being updated.
    """
    update_stmt = update_cte.query
    assert isinstance(update_stmt, pgast.UpdateStmt)

    external_updates = []

    with ctx.newscope() as subctx:
        # It is necessary to process the expressions in
        # the UpdateStmt shape body in the context of the
        # UPDATE statement so that references to the current
        # values of the updated object are resolved correctly.
        subctx.parent_rel = update_stmt
        subctx.expr_exposed = False

        for shape_el, shape_op in ir_stmt.subject.shape:
            ptrref = shape_el.rptr.ptrref
            updvalue = shape_el.expr
            ptr_info = pg_types.get_ptrref_storage_info(ptrref,
                                                        resolve_type=True,
                                                        link_bias=False)

            if ptr_info.table_type == 'ObjectType' and updvalue is not None:
                with subctx.newscope() as scopectx:
                    val: pgast.BaseExpr

                    if irtyputils.is_tuple(shape_el.typeref):
                        # When target is a tuple type, make sure
                        # the expression is compiled into a subquery
                        # returning a single column that is explicitly
                        # cast into the appropriate composite type.
                        val = relgen.set_as_subquery(
                            shape_el,
                            as_value=True,
                            explicit_cast=ptr_info.column_type,
                            ctx=scopectx,
                        )
                    else:
                        if (isinstance(updvalue, irast.MutatingStmt)
                                and updvalue in ctx.dml_stmts):
                            with scopectx.substmt() as relctx:
                                dml_cte = ctx.dml_stmts[updvalue]
                                wrap_dml_cte(updvalue, dml_cte, ctx=relctx)
                                pathctx.get_path_identity_output(
                                    relctx.rel,
                                    updvalue.subject.path_id,
                                    env=relctx.env,
                                )
                                val = relctx.rel
                        else:
                            val = dispatch.compile(updvalue, ctx=scopectx)

                        val = pgast.TypeCast(arg=val,
                                             type_name=pgast.TypeName(
                                                 name=ptr_info.column_type))

                    if shape_op is qlast.ShapeOp.SUBTRACT:
                        val = pgast.FuncCall(
                            name=('nullif', ),
                            args=[
                                pgast.ColumnRef(name=(ptr_info.column_name, )),
                                val,
                            ],
                        )

                    updtarget = pgast.UpdateTarget(
                        name=ptr_info.column_name,
                        val=val,
                    )

                    update_stmt.targets.append(updtarget)

            props_only = is_props_only_update(shape_el, ctx=subctx)

            ptr_info = pg_types.get_ptrref_storage_info(ptrref,
                                                        resolve_type=False,
                                                        link_bias=True)

            if ptr_info and ptr_info.table_type == 'link':
                external_updates.append((shape_el, shape_op, props_only))

    if not update_stmt.targets:
        # No updates directly to the set target table,
        # so convert the UPDATE statement into a SELECT.
        from_clause: List[pgast.BaseRangeVar] = [update_stmt.relation]
        from_clause.extend(update_stmt.from_clause)
        update_cte.query = pgast.SelectStmt(
            ctes=update_stmt.ctes,
            target_list=update_stmt.returning_list,
            from_clause=from_clause,
            where_clause=update_stmt.where_clause,
            path_namespace=update_stmt.path_namespace,
            path_outputs=update_stmt.path_outputs,
            path_scope=update_stmt.path_scope,
            path_rvar_map=update_stmt.path_rvar_map.copy(),
            view_path_id_map=update_stmt.view_path_id_map.copy(),
            ptr_join_map=update_stmt.ptr_join_map.copy(),
        )

    toplevel = ctx.toplevel_stmt
    toplevel.ctes.append(update_cte)

    # Process necessary updates to the link tables.
    for expr, shape_op, _ in external_updates:
        process_link_update(
            ir_stmt=ir_stmt,
            ir_set=expr,
            props_only=False,
            wrapper=wrapper,
            dml_cte=update_cte,
            iterator_cte=None,
            is_insert=False,
            shape_op=shape_op,
            source_typeref=typeref,
            ctx=ctx,
        )
Exemple #13
0
def compile_ConfigReset(op: irast.ConfigReset, *,
                        ctx: context.CompilerContextLevel) -> pgast.Query:

    if op.selector is None:
        # Scalar reset
        result_row = pgast.RowExpr(args=[
            pgast.StringConstant(val='RESET'),
            pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'),
            pgast.StringConstant(val=op.name),
            pgast.NullConstant(),
        ])

        rvar = None
    else:
        selector = dispatch.compile(op.selector, ctx=ctx)
        assert isinstance(selector, pgast.SelectStmt), \
            "expected ast.SelectStmt"
        target = selector.target_list[0]
        if not target.name:
            target = selector.target_list[0] = pgast.ResTarget(
                name=ctx.env.aliases.get('res'),
                val=target.val,
            )

        rvar = relctx.rvar_for_rel(selector, ctx=ctx)

        result_row = pgast.RowExpr(args=[
            pgast.StringConstant(val='REM'),
            pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'),
            pgast.StringConstant(val=op.name),
            astutils.get_column(rvar, target.name),
        ])

    result = pgast.FuncCall(
        name=('jsonb_build_array', ),
        args=result_row.args,
        null_safe=True,
        ser_safe=True,
    )

    stmt: pgast.Query

    if not op.system:
        stmt = pgast.DeleteStmt(
            relation=pgast.RelRangeVar(relation=pgast.Relation(
                name='_edgecon_state', ), ),
            where_clause=astutils.new_binop(
                lexpr=astutils.new_binop(
                    lexpr=pgast.ColumnRef(name=['name']),
                    rexpr=pgast.StringConstant(val=op.name),
                    op='=',
                ),
                rexpr=astutils.new_binop(
                    lexpr=pgast.ColumnRef(name=['type']),
                    rexpr=pgast.StringConstant(val='C'),
                    op='=',
                ),
                op='AND',
            ))
    else:
        stmt = pgast.SelectStmt(target_list=[
            pgast.ResTarget(val=result, ),
        ], )

        if rvar is not None:
            stmt.from_clause = [rvar]

    return stmt
Exemple #14
0
def _compile_grouping_value(
        stmt: irast.GroupStmt, used_args: AbstractSet[str], *,
        ctx: context.CompilerContextLevel) -> pgast.BaseExpr:
    '''Produce the value for the grouping binding saying what is grouped on'''
    assert stmt.grouping_binding
    grouprel = ctx.rel

    # If there is only one thing grouped on, just output the hardcoded
    if len(used_args) == 1:
        return pgast.ArrayExpr(elements=[
            pgast.StringConstant(
                val=desugar_group.key_name(list(used_args)[0]))
        ])

    using = {k: stmt.using[k] for k in used_args}

    args = [
        pathctx.get_path_var(grouprel,
                             alias_set.path_id,
                             aspect='value',
                             env=ctx.env) for alias_set, _ in using.values()
    ]

    # Call grouping on each element we group on to produce a bitmask
    grouping_alias = ctx.env.aliases.get('g')
    grouping_call = pgast.FuncCall(name=('grouping', ), args=args)
    subq = pgast.SelectStmt(target_list=[
        pgast.ResTarget(name=grouping_alias, val=grouping_call),
    ])
    q = pgast.SelectStmt(from_clause=[
        pgast.RangeSubselect(
            subquery=subq, alias=pgast.Alias(aliasname=ctx.env.aliases.get()))
    ])

    grouping_ref = pgast.ColumnRef(name=(grouping_alias, ))

    # Generate a call to ARRAY[...] with a case for each grouping
    # element, then array_remove out the NULLs.
    els: List[pgast.BaseExpr] = []
    for i, name in enumerate(using):
        name = desugar_group.key_name(name)
        mask = 1 << (len(using) - i - 1)
        # (CASE (e & <mask>) WHEN 0 THEN '<name>' ELSE NULL END)

        els.append(
            pgast.CaseExpr(
                arg=pgast.Expr(kind=pgast.ExprKind.OP,
                               name='&',
                               lexpr=grouping_ref,
                               rexpr=pgast.LiteralExpr(expr=str(mask))),
                args=[
                    pgast.CaseWhen(expr=pgast.LiteralExpr(expr='0'),
                                   result=pgast.StringConstant(val=name))
                ],
                defresult=pgast.NullConstant()))

    val = pgast.FuncCall(
        name=('array_remove', ),
        args=[pgast.ArrayExpr(elements=els),
              pgast.NullConstant()])

    q.target_list.append(pgast.ResTarget(val=val))

    return q
Exemple #15
0
def _compile_config_value(
    op: irast.ConfigSet,
    *,
    ctx: context.CompilerContextLevel,
) -> pgast.BaseExpr:
    val: pgast.BaseExpr

    if op.backend_setting:
        assert op.backend_expr is not None
        expr = op.backend_expr
    else:
        expr = op.expr

    with ctx.new() as subctx:
        if op.backend_setting or op.scope == qltypes.ConfigScope.GLOBAL:
            output_format = context.OutputFormat.NATIVE
        else:
            output_format = context.OutputFormat.JSONB

        with context.output_format(ctx, output_format):
            if isinstance(expr, irast.EmptySet):
                # Special handling for empty sets, because we want a
                # singleton representation of the value and not an empty rel
                # in this context.
                if op.cardinality is qltypes.SchemaCardinality.One:
                    val = pgast.NullConstant()
                elif subctx.env.output_format is context.OutputFormat.JSONB:
                    val = pgast.TypeCast(
                        arg=pgast.StringConstant(val='[]'),
                        type_name=pgast.TypeName(name=('jsonb', ), ),
                    )
                else:
                    val = pgast.TypeCast(
                        arg=pgast.ArrayExpr(elements=[]),
                        type_name=pgast.TypeName(name=('text[]', ), ),
                    )
            else:
                val = dispatch.compile(expr, ctx=subctx)
                assert isinstance(val, pgast.SelectStmt), "expected SelectStmt"

                pathctx.get_path_serialized_output(val,
                                                   expr.path_id,
                                                   env=ctx.env)

                if op.cardinality is qltypes.SchemaCardinality.Many:
                    val = output.aggregate_json_output(val, expr, env=ctx.env)

    # For globals, we need to output the binary encoding so that we
    # can just hand it back to the server. We abuse `record_send` to
    # act as a generic `_send` function
    if op.scope is qltypes.ConfigScope.GLOBAL:
        val = pgast.FuncCall(
            name=('substring', ),
            args=[
                pgast.FuncCall(
                    name=('record_send', ),
                    args=[pgast.RowExpr(args=[val])],
                ),
                # The first twelve bytes are header, the rest is the
                # encoding of the actual element
                pgast.NumericConstant(val="13"),
            ],
        )
        cast_name = s_casts.get_cast_fullname_from_names(
            'std', 'std::bytes', 'std::json')
        val = pgast.FuncCall(
            name=common.get_cast_backend_name(cast_name, aspect='function'),
            args=[val],
        )

    if op.backend_setting and op.scope is qltypes.ConfigScope.INSTANCE:
        assert isinstance(val, pgast.SelectStmt) and len(val.target_list) == 1
        val = val.target_list[0].val
        if isinstance(val, pgast.TypeCast):
            val = val.arg
        if not isinstance(val, pgast.BaseConstant):
            raise AssertionError('value is not a constant in ConfigSet')

    return val
Exemple #16
0
def compile_ConfigSet(
    op: irast.ConfigSet,
    *,
    ctx: context.CompilerContextLevel,
) -> pgast.BaseExpr:

    val = _compile_config_value(op, ctx=ctx)
    result: pgast.BaseExpr

    if op.scope is qltypes.ConfigScope.INSTANCE and op.backend_setting:
        if not ctx.env.backend_runtime_params.has_configfile_access:
            raise errors.UnsupportedBackendFeatureError(
                "configuring backend parameters via CONFIGURE INSTANCE"
                " is not supported by the current backend")
        result = pgast.AlterSystem(
            name=op.backend_setting,
            value=val,
        )

    elif op.scope is qltypes.ConfigScope.DATABASE and op.backend_setting:
        if not isinstance(val, pgast.StringConstant):
            val = pgast.TypeCast(
                arg=val,
                type_name=pgast.TypeName(name=('text', )),
            )

        fcall = pgast.FuncCall(
            name=('edgedb', '_alter_current_database_set'),
            args=[pgast.StringConstant(val=op.backend_setting), val],
        )

        result = output.wrap_script_stmt(
            pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]),
            suppress_all_output=True,
            env=ctx.env,
        )

    elif op.scope is qltypes.ConfigScope.SESSION and op.backend_setting:
        if not isinstance(val, pgast.StringConstant):
            val = pgast.TypeCast(
                arg=val,
                type_name=pgast.TypeName(name=('text', )),
            )

        fcall = pgast.FuncCall(
            name=('pg_catalog', 'set_config'),
            args=[
                pgast.StringConstant(val=op.backend_setting),
                val,
                pgast.BooleanConstant(val='false'),
            ],
        )

        result = output.wrap_script_stmt(
            pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]),
            suppress_all_output=True,
            env=ctx.env,
        )

    elif op.scope is qltypes.ConfigScope.INSTANCE:
        result_row = pgast.RowExpr(args=[
            pgast.StringConstant(val='SET'),
            pgast.StringConstant(val=str(op.scope)),
            pgast.StringConstant(val=op.name),
            val,
        ])

        result = pgast.FuncCall(
            name=('jsonb_build_array', ),
            args=result_row.args,
            null_safe=True,
            ser_safe=True,
        )

        result = pgast.SelectStmt(target_list=[
            pgast.ResTarget(val=result, ),
        ], )
    elif op.scope in (qltypes.ConfigScope.SESSION, qltypes.ConfigScope.GLOBAL):
        flag = 'G' if op.scope is qltypes.ConfigScope.GLOBAL else 'C'
        result = pgast.InsertStmt(
            relation=pgast.RelRangeVar(relation=pgast.Relation(
                name='_edgecon_state', ), ),
            select_stmt=pgast.SelectStmt(values=[
                pgast.ImplicitRowExpr(args=[
                    pgast.StringConstant(val=op.name, ),
                    val,
                    pgast.StringConstant(val=flag, ),
                ])
            ]),
            cols=[
                pgast.ColumnRef(name=['name']),
                pgast.ColumnRef(name=['value']),
                pgast.ColumnRef(name=['type']),
            ],
            on_conflict=pgast.OnConflictClause(
                action='update',
                infer=pgast.InferClause(index_elems=[
                    pgast.ColumnRef(name=['name']),
                    pgast.ColumnRef(name=['type']),
                ], ),
                target_list=[
                    pgast.MultiAssignRef(
                        columns=[pgast.ColumnRef(name=['value'])],
                        source=pgast.RowExpr(args=[
                            val,
                        ], ),
                    ),
                ],
            ),
        )

        if op.scope is qltypes.ConfigScope.GLOBAL:
            result_row = pgast.RowExpr(args=[
                pgast.StringConstant(val='SET'),
                pgast.StringConstant(val=str(op.scope)),
                pgast.StringConstant(val=op.name),
                val,
            ])

            build_array = pgast.FuncCall(
                name=('jsonb_build_array', ),
                args=result_row.args,
                null_safe=True,
                ser_safe=True,
            )

            result = pgast.SelectStmt(
                ctes=[pgast.CommonTableExpr(
                    name='ins',
                    query=result,
                )],
                target_list=[pgast.ResTarget(val=build_array)],
            )

    elif op.scope is qltypes.ConfigScope.DATABASE:
        result = pgast.InsertStmt(
            relation=pgast.RelRangeVar(relation=pgast.Relation(
                name='_db_config',
                schemaname='edgedb',
            ), ),
            select_stmt=pgast.SelectStmt(values=[
                pgast.ImplicitRowExpr(args=[
                    pgast.StringConstant(val=op.name, ),
                    val,
                ])
            ]),
            cols=[
                pgast.ColumnRef(name=['name']),
                pgast.ColumnRef(name=['value']),
            ],
            on_conflict=pgast.OnConflictClause(
                action='update',
                infer=pgast.InferClause(index_elems=[
                    pgast.ColumnRef(name=['name']),
                ], ),
                target_list=[
                    pgast.MultiAssignRef(
                        columns=[pgast.ColumnRef(name=['value'])],
                        source=pgast.RowExpr(args=[
                            val,
                        ], ),
                    ),
                ],
            ),
        )
    else:
        raise AssertionError(f'unexpected configuration scope: {op.scope}')

    return result
Exemple #17
0
def array_as_json_object(expr, *, styperef, env):
    el_type = styperef.subtypes[0]

    if irtyputils.is_tuple(el_type):
        coldeflist = []
        json_args = []
        is_named = any(st.element_name for st in el_type.subtypes)

        for i, st in enumerate(el_type.subtypes):
            if is_named:
                colname = env.aliases.get(st.element_name)
                json_args.append(pgast.StringConstant(val=st.element_name))
            else:
                colname = env.aliases.get(str(i))

            val = pgast.ColumnRef(name=[colname])
            if irtyputils.is_collection(st):
                val = coll_as_json_object(val, styperef=st, env=env)

            json_args.append(val)

            coldeflist.append(
                pgast.ColumnDef(name=colname,
                                typename=pgast.TypeName(
                                    name=pgtypes.pg_type_from_ir_typeref(st))))

        if is_named:
            json_func = _get_json_func('build_object', env=env)
        else:
            json_func = _get_json_func('build_array', env=env)

        return pgast.SelectStmt(target_list=[
            pgast.ResTarget(
                val=pgast.CoalesceExpr(args=[
                    pgast.FuncCall(name=_get_json_func('agg', env=env),
                                   args=[
                                       pgast.FuncCall(
                                           name=json_func,
                                           args=json_args,
                                       )
                                   ]),
                    pgast.StringConstant(val='[]'),
                ]),
                ser_safe=True,
            )
        ],
                                from_clause=[
                                    pgast.RangeFunction(
                                        alias=pgast.Alias(
                                            aliasname=env.aliases.get('q'), ),
                                        is_rowsfrom=True,
                                        functions=[
                                            pgast.FuncCall(
                                                name=('unnest', ),
                                                args=[expr],
                                                coldeflist=coldeflist,
                                            )
                                        ])
                                ])
    else:
        return pgast.FuncCall(name=_get_json_func('to', env=env),
                              args=[expr],
                              null_safe=True,
                              ser_safe=True)
Exemple #18
0
def unnamed_tuple_as_json_object(
    expr: pgast.BaseExpr,
    *,
    styperef: irast.TypeRef,
    env: context.Environment,
) -> pgast.BaseExpr:
    vals: List[pgast.BaseExpr] = []

    if irtyputils.is_persistent_tuple(styperef):
        for el_idx, el_type in enumerate(styperef.subtypes):
            val: pgast.BaseExpr = pgast.Indirection(
                arg=expr,
                indirection=[
                    pgast.ColumnRef(name=[str(el_idx)], ),
                ],
            )
            if irtyputils.is_collection(el_type):
                val = coll_as_json_object(val, styperef=el_type, env=env)
            vals.append(val)

        return _build_json(
            'build_array',
            args=vals,
            null_safe=True,
            ser_safe=True,
            nullable=expr.nullable,
            env=env,
        )

    else:
        coldeflist = []

        for el_idx, el_type in enumerate(styperef.subtypes):

            coldeflist.append(
                pgast.ColumnDef(
                    name=str(el_idx),
                    typename=pgast.TypeName(
                        name=pgtypes.pg_type_from_ir_typeref(el_type), ),
                ))

            val = pgast.ColumnRef(name=[str(el_idx)])

            if irtyputils.is_collection(el_type):
                val = coll_as_json_object(val, styperef=el_type, env=env)

            vals.append(val)

        res = _build_json(
            'build_array',
            args=vals,
            null_safe=True,
            ser_safe=True,
            nullable=expr.nullable,
            env=env,
        )

        return pgast.SelectStmt(
            target_list=[
                pgast.ResTarget(val=res, ),
            ],
            from_clause=[
                pgast.RangeFunction(functions=[
                    pgast.FuncCall(
                        name=('unnest', ),
                        args=[pgast.ArrayExpr(elements=[expr], )],
                        coldeflist=coldeflist,
                    )
                ])
            ])
Exemple #19
0
def compile_ConfigSet(op: irast.ConfigSet, *,
                      ctx: context.CompilerContextLevel) -> pgast.Query:

    val: pgast.BaseExpr

    with ctx.new() as subctx:
        if isinstance(op.expr, irast.EmptySet):
            # Special handling for empty sets, because we want a
            # singleton representation of the value and not an empty rel
            # in this context.
            if op.cardinality is qltypes.SchemaCardinality.ONE:
                val = pgast.NullConstant()
            else:
                val = pgast.TypeCast(
                    arg=pgast.StringConstant(val='[]'),
                    type_name=pgast.TypeName(name=('jsonb', ), ),
                )
        else:
            val = dispatch.compile(op.expr, ctx=subctx)
            assert isinstance(val, pgast.SelectStmt), "expected ast.SelectStmt"

            pathctx.get_path_serialized_output(val,
                                               op.expr.path_id,
                                               env=ctx.env)
            if op.cardinality is qltypes.SchemaCardinality.MANY:
                val = output.aggregate_json_output(val, op.expr, env=ctx.env)

    result_row = pgast.RowExpr(args=[
        pgast.StringConstant(val='SET'),
        pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'),
        pgast.StringConstant(val=op.name),
        val,
    ])

    result = pgast.FuncCall(
        name=('jsonb_build_array', ),
        args=result_row.args,
        null_safe=True,
        ser_safe=True,
    )

    stmt: pgast.Query

    if not op.system:
        stmt = pgast.InsertStmt(
            relation=pgast.RelRangeVar(relation=pgast.Relation(
                name='_edgecon_state', ), ),
            select_stmt=pgast.SelectStmt(values=[
                pgast.ImplicitRowExpr(args=[
                    pgast.StringConstant(val=op.name, ),
                    val,
                    pgast.StringConstant(val='C', ),
                ])
            ]),
            cols=[
                pgast.ColumnRef(name=['name']),
                pgast.ColumnRef(name=['value']),
                pgast.ColumnRef(name=['type']),
            ],
            on_conflict=pgast.OnConflictClause(
                action='update',
                infer=pgast.InferClause(index_elems=[
                    pgast.ColumnRef(name=['name']),
                    pgast.ColumnRef(name=['type']),
                ], ),
                target_list=[
                    pgast.MultiAssignRef(
                        columns=[pgast.ColumnRef(name=['value'])],
                        source=pgast.RowExpr(args=[
                            val,
                        ], ),
                    ),
                ],
            ),
        )
    else:
        stmt = pgast.SelectStmt(target_list=[
            pgast.ResTarget(val=result, ),
        ], )

    return stmt
Exemple #20
0
def _build_json(
    name: str,
    args: Sequence[pgast.BaseExpr],
    *,
    null_safe: bool = False,
    ser_safe: bool = False,
    nullable: Optional[bool] = None,
    env: context.Environment,
) -> pgast.BaseExpr:
    # PostgreSQL has a limit on the maximum number of arguments
    # passed to a function call, so we must chop input into chunks
    # if the argument count is greater then the limit.

    if len(args) > s_defs.MAX_FUNC_ARG_COUNT:
        json_func = _get_json_func(
            name,
            output_format=context.OutputFormat.JSONB,
            env=env,
        )

        chunk_iters = [iter(args)] * s_defs.MAX_FUNC_ARG_COUNT
        chunks = list(itertools.zip_longest(*chunk_iters, fillvalue=None))
        if len(args) != len(chunks) * s_defs.MAX_FUNC_ARG_COUNT:
            chunks[-1] = tuple(filter(None, chunks[-1]))

        result: pgast.BaseExpr = pgast.FuncCall(
            name=json_func,
            args=chunks[0],
            null_safe=null_safe,
            ser_safe=ser_safe,
            nullable=nullable,
        )

        for chunk in chunks[1:]:
            fc = pgast.FuncCall(
                name=json_func,
                args=chunk,
                null_safe=null_safe,
                ser_safe=ser_safe,
                nullable=nullable,
            )

            result = astutils.new_binop(
                lexpr=result,
                rexpr=fc,
                op='||',
            )

        if env.output_format in _JSON_FORMATS:
            result = pgast.TypeCast(arg=result,
                                    type_name=pgast.TypeName(name=('json', )))

        return result

    else:
        json_func = _get_json_func(name, env=env)

        return pgast.FuncCall(
            name=json_func,
            args=args,
            null_safe=null_safe,
            ser_safe=ser_safe,
            nullable=nullable,
        )
Exemple #21
0
def compile_operator(expr: irast.OperatorCall, args: Sequence[pgast.BaseExpr],
                     *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr:
    lexpr = rexpr = None
    result: Optional[pgast.BaseExpr] = None

    if expr.operator_kind is ql_ft.OperatorKind.Infix:
        lexpr, rexpr = args
    elif expr.operator_kind is ql_ft.OperatorKind.Prefix:
        rexpr = args[0]
    elif expr.operator_kind is ql_ft.OperatorKind.Postfix:
        lexpr = args[0]
    else:
        raise RuntimeError(f'unexpected operator kind: {expr.operator_kind!r}')

    str_func_name = str(expr.func_shortname)
    if ((str_func_name in {'std::=', 'std::!='}
         or str(expr.origin_name) in {'std::=', 'std::!='})
            and expr.args[0].expr.typeref is not None
            and irtyputils.is_object(expr.args[0].expr.typeref)
            and expr.args[1].expr.typeref is not None
            and irtyputils.is_object(expr.args[1].expr.typeref)):
        if str_func_name == 'std::=' or str(expr.origin_name) == 'std::=':
            sql_oper = '='
        else:
            sql_oper = '!='

    elif str_func_name == 'std::EXISTS':
        assert rexpr
        result = pgast.NullTest(arg=rexpr, negated=True)

    elif expr.sql_operator:
        sql_oper = expr.sql_operator[0]
        if len(expr.sql_operator) > 1:
            # Explicit operand types given in FROM SQL OPERATOR
            lexpr, rexpr = _cast_operands(lexpr, rexpr, expr.sql_operator[1:])

    elif expr.sql_function:
        sql_func = expr.sql_function[0]
        func_name = tuple(sql_func.split('.', 1))
        if len(expr.sql_function) > 1:
            # Explicit operand types given in FROM SQL FUNCTION
            lexpr, rexpr = _cast_operands(lexpr, rexpr, expr.sql_function[1:])

        args = []
        if lexpr is not None:
            args.append(lexpr)
        if rexpr is not None:
            args.append(rexpr)

        result = pgast.FuncCall(name=func_name, args=args)

    elif expr.origin_name is not None:
        sql_oper = common.get_operator_backend_name(expr.origin_name)[1]

    else:
        sql_oper = common.get_operator_backend_name(expr.func_shortname)[1]

    # If result was not already computed, it's going to be a generic Expr.
    if result is None:
        result = pgast.Expr(
            kind=pgast.ExprKind.OP,
            name=sql_oper,
            lexpr=lexpr,
            rexpr=rexpr,
        )

    if expr.force_return_cast:
        # The underlying operator has a return value type
        # different from that of the EdgeQL operator declaration,
        # so we need to make an explicit cast here.
        result = pgast.TypeCast(
            arg=result,
            type_name=pgast.TypeName(
                name=pg_types.pg_type_from_ir_typeref(expr.typeref)))

    return result
Exemple #22
0
def compile_GroupStmt(stmt: irast.GroupStmt, *,
                      ctx: context.CompilerContextLevel) -> pgast.Query:

    parent_ctx = ctx
    with parent_ctx.substmt() as ctx:
        clauses.init_stmt(stmt, ctx=ctx, parent_ctx=parent_ctx)

        group_path_id = stmt.group_path_id

        # Process the GROUP .. BY part into a subquery.
        with ctx.subrel() as gctx:
            gctx.expr_exposed = False
            gquery = gctx.rel
            pathctx.put_path_bond(gquery, group_path_id)
            relctx.update_scope(stmt.subject, gquery, ctx=gctx)
            stmt.subject.path_scope = None
            clauses.compile_output(stmt.subject, ctx=gctx)
            subj_rvar = pathctx.get_path_rvar(gquery,
                                              stmt.subject.path_id,
                                              aspect='value',
                                              env=gctx.env)
            relctx.ensure_bond_for_expr(stmt.subject,
                                        subj_rvar.query,
                                        ctx=gctx)

            group_paths = set()

            part_clause: List[pgast.BaseExpr] = []

            for ir_gexpr in stmt.groupby:
                with gctx.new() as subctx:
                    partexpr = dispatch.compile(ir_gexpr, ctx=subctx)

                part_clause.append(partexpr)
                group_paths.add(ir_gexpr.path_id)

            # Since we will be computing arbitrary expressions
            # based on the grouped sets, it is more efficient
            # to compute the "group bond" as a small unique
            # value than it is to use GROUP BY and aggregate
            # actual id values into an array.
            #
            # To achieve this we use the first_value() window
            # function while using the GROUP BY clause as
            # a partition clause.  We use the id of the first
            # object in each partition if GROUP BY input is
            # a ObjectType, otherwise we generate the id using
            # row_number().
            if stmt.subject.path_id.is_objtype_path():
                first_val = pathctx.get_path_identity_var(gquery,
                                                          stmt.subject.path_id,
                                                          env=ctx.env)
            else:
                with ctx.subrel() as subctx:
                    wrapper = subctx.rel

                    gquery_rvar = relctx.rvar_for_rel(gquery, ctx=subctx)
                    wrapper.from_clause = [gquery_rvar]
                    relctx.pull_path_namespace(target=wrapper,
                                               source=gquery_rvar,
                                               ctx=subctx)

                    new_part_clause: List[pgast.BaseExpr] = []

                    for i, expr in enumerate(part_clause):
                        path_id = stmt.groupby[i].path_id
                        pathctx.put_path_value_var(gquery,
                                                   path_id,
                                                   expr,
                                                   force=True,
                                                   env=ctx.env)
                        output_ref = pathctx.get_path_value_output(gquery,
                                                                   path_id,
                                                                   env=ctx.env)
                        assert isinstance(output_ref, pgast.ColumnRef)
                        new_part_clause.append(
                            astutils.get_column(gquery_rvar, output_ref))

                    part_clause = new_part_clause

                    first_val = pathctx.get_rvar_path_identity_var(
                        gquery_rvar, stmt.subject.path_id, env=ctx.env)

                    gquery = wrapper
                    pathctx.put_path_bond(gquery, group_path_id)

            group_id = pgast.FuncCall(
                name=('first_value', ),
                args=[first_val],
                over=pgast.WindowDef(partition_clause=part_clause))

            pathctx.put_path_identity_var(gquery,
                                          group_path_id,
                                          group_id,
                                          env=ctx.env)

            pathctx.put_path_value_var(gquery,
                                       group_path_id,
                                       group_id,
                                       env=ctx.env)

        group_cte = pgast.CommonTableExpr(query=gquery,
                                          name=ctx.env.aliases.get('g'))

        group_cte_rvar = relctx.rvar_for_rel(group_cte, ctx=ctx)

        # Generate another subquery contaning distinct values of
        # path expressions in BY.
        with ctx.subrel() as gvctx:
            gvquery = gvctx.rel
            relctx.include_rvar(gvquery,
                                group_cte_rvar,
                                path_id=group_path_id,
                                ctx=gvctx)

            pathctx.put_path_bond(gvquery, group_path_id)

            for group_set in stmt.groupby:
                dispatch.visit(group_set, ctx=gvctx)
                path_id = group_set.path_id
                if path_id.is_objtype_path():
                    pathctx.put_path_bond(gvquery, path_id)

            gvquery.distinct_clause = [
                pathctx.get_path_identity_var(gvquery,
                                              group_path_id,
                                              env=ctx.env)
            ]

            for path_id, aspect in list(gvquery.path_rvar_map):
                if path_id not in group_paths and path_id != group_path_id:
                    gvquery.path_rvar_map.pop((path_id, aspect))

            for path_id, aspect in list(gquery.path_rvar_map):
                if path_id in group_paths:
                    gquery.path_rvar_map.pop((path_id, aspect))
                    gquery.path_namespace.pop((path_id, aspect), None)
                    gquery.path_outputs.pop((path_id, aspect), None)

        groupval_cte = pgast.CommonTableExpr(query=gvquery,
                                             name=ctx.env.aliases.get('gv'))

        groupval_cte_rvar = relctx.rvar_for_rel(groupval_cte, ctx=ctx)

        o_stmt = stmt.result.expr
        assert isinstance(o_stmt, irast.SelectStmt)

        # process the result expression;
        with ctx.subrel() as selctx:
            selquery = selctx.rel
            outer_id = stmt.result.path_id
            inner_id = o_stmt.result.path_id

            relctx.include_specific_rvar(selquery,
                                         groupval_cte_rvar,
                                         group_path_id,
                                         aspects=['identity'],
                                         ctx=ctx)

            for path_id in group_paths:
                selctx.path_scope[path_id] = selquery
                pathctx.put_path_rvar(selquery,
                                      path_id,
                                      groupval_cte_rvar,
                                      aspect='value',
                                      env=ctx.env)

            selctx.group_by_rels = selctx.group_by_rels.copy()
            selctx.group_by_rels[group_path_id, stmt.subject.path_id] = \
                group_cte

            selquery.view_path_id_map = {outer_id: inner_id}

            selquery.ctes.append(group_cte)

            sortoutputs = []

            selquery.ctes.append(groupval_cte)

            clauses.compile_output(o_stmt.result, ctx=selctx)

            # The WHERE clause
            if o_stmt.where is not None:
                selquery.where_clause = astutils.extend_binop(
                    selquery.where_clause,
                    clauses.compile_filter_clause(o_stmt.where,
                                                  o_stmt.where_card,
                                                  ctx=selctx))

            for ir_sortexpr in o_stmt.orderby:
                alias = ctx.env.aliases.get('s')
                sexpr = dispatch.compile(ir_sortexpr.expr, ctx=selctx)
                selquery.target_list.append(
                    pgast.ResTarget(val=sexpr, name=alias))
                sortoutputs.append(alias)

        if not gvquery.target_list:
            # No values were pulled from the group-values rel,
            # we must remove the DISTINCT clause to prevent
            # a syntax error.
            gvquery.distinct_clause[:] = []

        query = ctx.rel
        result_rvar = relctx.rvar_for_rel(selquery, lateral=True, ctx=ctx)
        relctx.include_rvar(query, result_rvar, path_id=outer_id, ctx=ctx)

        for rt in selquery.target_list:
            if rt.name is None:
                rt.name = ctx.env.aliases.get('v')
            if rt.name not in sortoutputs:
                query.target_list.append(
                    pgast.ResTarget(val=astutils.get_column(
                        result_rvar, rt.name),
                                    name=rt.name))

        for i, ir_oexpr in enumerate(o_stmt.orderby):
            sort_ref = astutils.get_column(result_rvar, sortoutputs[i])
            sortexpr = pgast.SortBy(node=sort_ref,
                                    dir=ir_oexpr.direction,
                                    nulls=ir_oexpr.nones_order)
            query.sort_clause.append(sortexpr)

        # The OFFSET clause
        if o_stmt.offset:
            with ctx.new() as ctx1:
                ctx1.expr_exposed = False
                query.limit_offset = dispatch.compile(o_stmt.offset, ctx=ctx1)

        # The LIMIT clause
        if o_stmt.limit:
            with ctx.new() as ctx1:
                ctx1.expr_exposed = False
                query.limit_count = dispatch.compile(o_stmt.limit, ctx=ctx1)

        clauses.fini_stmt(query, ctx, parent_ctx)

    return query
Exemple #23
0
def array_as_json_object(
    expr: pgast.BaseExpr,
    *,
    styperef: irast.TypeRef,
    env: context.Environment,
) -> pgast.BaseExpr:
    el_type = styperef.subtypes[0]

    if irtyputils.is_tuple(el_type):
        coldeflist = []
        json_args: List[pgast.BaseExpr] = []
        is_named = any(st.element_name for st in el_type.subtypes)

        for i, st in enumerate(el_type.subtypes):
            if is_named:
                colname = st.element_name
                json_args.append(pgast.StringConstant(val=st.element_name))
            else:
                colname = str(i)

            val: pgast.BaseExpr = pgast.ColumnRef(name=[colname])
            if irtyputils.is_collection(st):
                val = coll_as_json_object(val, styperef=st, env=env)

            json_args.append(val)

            if not irtyputils.is_persistent_tuple(el_type):
                # Column definition list is only allowed for functions
                # returning "record", i.e. an anonymous tuple, which
                # would not be the case for schema-persistent tuple types.
                coldeflist.append(
                    pgast.ColumnDef(
                        name=colname,
                        typename=pgast.TypeName(
                            name=pgtypes.pg_type_from_ir_typeref(st)
                        )
                    )
                )

        if is_named:
            json_func = _get_json_func('build_object', env=env)
        else:
            json_func = _get_json_func('build_array', env=env)

        return pgast.SelectStmt(
            target_list=[
                pgast.ResTarget(
                    val=pgast.CoalesceExpr(
                        args=[
                            pgast.FuncCall(
                                name=_get_json_func('agg', env=env),
                                args=[
                                    pgast.FuncCall(
                                        name=json_func,
                                        args=json_args,
                                    )
                                ]
                            ),
                            pgast.StringConstant(val='[]'),
                        ]
                    ),
                    ser_safe=True,
                )
            ],
            from_clause=[
                pgast.RangeFunction(
                    alias=pgast.Alias(
                        aliasname=env.aliases.get('q'),
                    ),
                    is_rowsfrom=True,
                    functions=[
                        pgast.FuncCall(
                            name=('unnest',),
                            args=[expr],
                            coldeflist=coldeflist,
                        )
                    ]
                )
            ]
        )
    else:
        return pgast.FuncCall(
            name=_get_json_func('to', env=env), args=[expr],
            null_safe=True, ser_safe=True)
Exemple #24
0
def array_as_json_object(
    expr: pgast.BaseExpr,
    *,
    styperef: irast.TypeRef,
    env: context.Environment,
) -> pgast.BaseExpr:
    el_type = styperef.subtypes[0]

    is_tuple = irtyputils.is_tuple(el_type)
    # Tuples and bytes might need underlying casts to be done
    if is_tuple or irtyputils.is_bytes(el_type):
        coldeflist = []

        out_alias = env.aliases.get('q')

        val: pgast.BaseExpr
        if is_tuple:
            json_args: List[pgast.BaseExpr] = []
            is_named = any(st.element_name for st in el_type.subtypes)
            for i, st in enumerate(el_type.subtypes):
                if is_named:
                    colname = st.element_name
                    assert colname
                    json_args.append(pgast.StringConstant(val=colname))
                else:
                    colname = str(i)

                val = pgast.ColumnRef(name=[colname])
                val = serialize_expr_to_json(val,
                                             styperef=st,
                                             nested=True,
                                             env=env)

                json_args.append(val)

                if not irtyputils.is_persistent_tuple(el_type):
                    # Column definition list is only allowed for functions
                    # returning "record", i.e. an anonymous tuple, which
                    # would not be the case for schema-persistent tuple types.
                    coldeflist.append(
                        pgast.ColumnDef(
                            name=colname,
                            typename=pgast.TypeName(
                                name=pgtypes.pg_type_from_ir_typeref(st))))

            json_func = 'build_object' if is_named else 'build_array'
            agg_arg = _build_json(json_func, json_args, env=env)

            needs_unnest = bool(el_type.subtypes)
        else:
            assert not el_type.subtypes
            val = pgast.ColumnRef(name=[out_alias])
            agg_arg = serialize_expr_to_json(val,
                                             styperef=el_type,
                                             nested=True,
                                             env=env)
            needs_unnest = True

        return pgast.SelectStmt(
            target_list=[
                pgast.ResTarget(
                    val=pgast.CoalesceExpr(args=[
                        pgast.FuncCall(
                            name=_get_json_func('agg', env=env),
                            args=[agg_arg],
                        ),
                        pgast.StringConstant(val='[]'),
                    ]),
                    ser_safe=True,
                )
            ],
            from_clause=[
                pgast.RangeFunction(alias=pgast.Alias(aliasname=out_alias),
                                    is_rowsfrom=True,
                                    functions=[
                                        pgast.FuncCall(
                                            name=('unnest', ),
                                            args=[expr],
                                            coldeflist=coldeflist,
                                        )
                                    ])
            ] if needs_unnest else [],
        )
    else:
        return pgast.FuncCall(name=_get_json_func('to', env=env),
                              args=[expr],
                              null_safe=True,
                              ser_safe=True)