def named_tuple_as_json_object(expr, *, styperef, env): keyvals = [] for el_idx, el_type in enumerate(styperef.subtypes): keyvals.append(pgast.StringConstant(val=el_type.element_name)) type_sentinel = pgast.TypeCast( arg=pgast.NullConstant(), type_name=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type) ) ) val = pgast.FuncCall( name=('edgedb', 'row_getattr_by_num'), args=[ expr, pgast.NumericConstant(val=str(el_idx + 1)), type_sentinel ]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) return pgast.FuncCall( name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable)
def fini_stmt(stmt: pgast.Query, ctx: context.CompilerContextLevel, parent_ctx: context.CompilerContextLevel) -> None: if stmt is ctx.toplevel_stmt: stmt.argnames = argmap = ctx.argmap if not ctx.env.use_named_params: # Adding unused parameters into a CTE targets = [] for param in ctx.env.query_params: if param.name in argmap: continue if param.name.isdecimal(): idx = int(param.name) + 1 else: idx = len(argmap) + 1 argmap[param.name] = pgast.Param( index=idx, required=param.required, ) targets.append( pgast.ResTarget(val=pgast.TypeCast( arg=pgast.ParamRef(number=idx), type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref( param.ir_type))))) if targets: ctx.toplevel_stmt.ctes.append( pgast.CommonTableExpr( name="__unused_vars", query=pgast.SelectStmt(target_list=targets)))
def unnamed_tuple_as_json_object(expr, *, styperef, env): has_colls = any(irtyputils.is_collection(st) for st in styperef.subtypes) if not has_colls: # No nested collections, take the fast path. return pgast.FuncCall( name=('edgedb',) + _get_json_func('row_to_array', env=env), args=[expr], null_safe=True, ser_safe=True, nullable=expr.nullable) vals = [] for el_idx, el_type in enumerate(styperef.subtypes): type_sentinel = pgast.TypeCast( arg=pgast.NullConstant(), type_name=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type) ) ) val = pgast.FuncCall( name=('edgedb', 'row_getattr_by_num'), args=[ expr, pgast.NumericConstant(val=str(el_idx + 1)), type_sentinel ]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) return pgast.FuncCall( name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable)
def top_output_as_value(stmt: pgast.SelectStmt, ir_set: irast.Set, *, env: context.Environment) -> pgast.SelectStmt: """Finalize output serialization on the top level.""" if (env.output_format is context.OutputFormat.JSON and not env.expected_cardinality_one): # For JSON we just want to aggregate the whole thing # into a JSON array. return aggregate_json_output(stmt, ir_set, env=env) elif (env.explicit_top_cast is not None and (env.output_format is context.OutputFormat.NATIVE or env.output_format is context.OutputFormat.NATIVE_INTERNAL)): typecast = pgast.TypeCast( arg=stmt.target_list[0].val, type_name=pgast.TypeName(name=pgtypes.pg_type_from_ir_typeref( env.explicit_top_cast, persistent_tuples=True, ), ), ) stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=typecast, ) return stmt elif env.output_format is context.OutputFormat.SCRIPT: return wrap_script_stmt(stmt, env=env) else: # JSON_ELEMENTS and BINARY don't require any wrapping return stmt
def compile_Parameter(expr: irast.Parameter, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: result: pgast.BaseParamRef is_decimal: bool = expr.name.isdecimal() if not is_decimal and ctx.env.use_named_params: result = pgast.NamedParamRef( name=expr.name, nullable=not expr.required, ) else: try: index = ctx.argmap[expr.name].index except KeyError: if expr.name in ctx.argmap: index = ctx.argmap[expr.name].index else: if expr.name.startswith('__edb_arg_'): index = int(expr.name[10:]) + 1 elif is_decimal: index = int(expr.name) + 1 else: index = next(ctx.next_argument) ctx.argmap[expr.name] = pgast.Param( index=index, required=expr.required, ) result = pgast.ParamRef(number=index, nullable=not expr.required) return pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref)))
def compile_StringConstant(expr: irast.StringConstant, *, ctx: context.CompilerContextLevel) -> pgast.Base: return pgast.TypeCast( arg=pgast.EscapedStringConstant(val=expr.value), type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref)))
def top_output_as_value(stmt: pgast.Query, ir_set: irast.Set, *, env: context.Environment) -> pgast.Query: """Finalize output serialization on the top level.""" if (env.output_format is context.OutputFormat.JSON and not env.expected_cardinality_one): # For JSON we just want to aggregate the whole thing # into a JSON array. return aggregate_json_output(stmt, ir_set, env=env) elif (env.output_format is context.OutputFormat.NATIVE and env.explicit_top_cast is not None): typecast = pgast.TypeCast( arg=stmt.target_list[0].val, type_name=pgast.TypeName(name=pgtypes.pg_type_from_ir_typeref( env.explicit_top_cast, persistent_tuples=True, ), ), ) stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=typecast, ) return stmt else: return stmt
def fini_toplevel(stmt: pgast.Query, ctx: context.CompilerContextLevel) -> None: scan_check_ctes(stmt, ctx.env.check_ctes, ctx=ctx) # Type rewrites go first. if stmt.ctes is None: stmt.ctes = [] stmt.ctes[:0] = list(ctx.type_ctes.values()) stmt.argnames = argmap = ctx.argmap if not ctx.env.use_named_params: # Adding unused parameters into a CTE targets = [] for param in ctx.env.query_params: pgparam = argmap[param.name] if pgparam.used: continue targets.append( pgast.ResTarget(val=pgast.TypeCast( arg=pgast.ParamRef(number=pgparam.index), type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref( param.ir_type))))) if targets: stmt.append_cte( pgast.CommonTableExpr( name="__unused_vars", query=pgast.SelectStmt(target_list=targets)))
def _get_rel_object_id_output(rel: pgast.BaseRelation, path_id: irast.PathId, *, aspect: str, ptr_info: typing.Optional[ pg_types.PointerStorageInfo] = None, env: context.Environment) -> pgast.OutputVar: var = rel.path_outputs.get((path_id, aspect)) if var is not None: return var if isinstance(rel, pgast.NullRelation): name = env.aliases.get('id') val = pgast.TypeCast(arg=pgast.NullConstant(), type_name=pgast.TypeName(name=('uuid', ), )) rel.target_list.append(pgast.ResTarget(name=name, val=val)) result = pgast.ColumnRef(name=[name], nullable=True) else: result = pgast.ColumnRef(name=['id'], nullable=False) _put_path_output_var(rel, path_id, aspect, result, env=env) return result
def compile_FloatConstant(expr: irast.BaseConstant, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: return pgast.TypeCast( arg=pgast.NumericConstant(val=expr.value), type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref)))
def compile_Parameter( expr: irast.Parameter, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: result: pgast.BaseParamRef if expr.name.isdecimal(): index = int(expr.name) + 1 result = pgast.ParamRef(number=index) else: if ctx.env.use_named_params: result = pgast.NamedParamRef(name=expr.name) else: if expr.name in ctx.argmap: index = ctx.argmap[expr.name] else: index = len(ctx.argmap) + 1 ctx.argmap[expr.name] = index result = pgast.ParamRef(number=index) return pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref) ) )
def output_as_value( expr: pgast.BaseExpr, *, env: context.Environment) -> pgast.BaseExpr: val = expr if isinstance(expr, pgast.TupleVar): RowCls: Union[Type[pgast.ImplicitRowExpr], Type[pgast.RowExpr]] if len(expr.elements) > 1: RowCls = pgast.ImplicitRowExpr else: RowCls = pgast.RowExpr val = RowCls(args=[ output_as_value(e.val, env=env) for e in expr.elements ]) if (expr.typeref is not None and not env.singleton_mode and irtyputils.is_persistent_tuple(expr.typeref)): pg_type = pgtypes.pg_type_from_ir_typeref(expr.typeref) val = pgast.TypeCast( arg=val, type_name=pgast.TypeName( name=pg_type, ), ) return val
def compile_TypeCast(expr: irast.TypeCast, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: pg_expr = dispatch.compile(expr.expr, ctx=ctx) if expr.sql_cast: # Use explicit SQL cast. pg_type = pg_types.pg_type_from_ir_typeref(expr.to_type) return pgast.TypeCast(arg=pg_expr, type_name=pgast.TypeName(name=pg_type)) elif expr.sql_function or expr.sql_expr: # Cast implemented as a function. if expr.sql_expr: func_name = common.get_cast_backend_name(expr.cast_name, expr.cast_module_id, aspect='function') else: func_name = tuple(expr.sql_function.split('.')) return pgast.FuncCall( name=func_name, args=[pg_expr], ) else: raise RuntimeError('cast not supported')
def output_as_value(expr: pgast.BaseExpr, *, env: context.Environment) -> pgast.BaseExpr: val = expr if isinstance(expr, pgast.TupleVar): RowCls: Union[Type[pgast.ImplicitRowExpr], Type[pgast.RowExpr]] if (env.output_format is context.OutputFormat.NATIVE_INTERNAL and len(expr.elements) == 1 and (path_id := (el0 := expr.elements[0]).path_id) is not None and (rptr_name := path_id.rptr_name()) is not None and (rptr_name.name == 'id')): # This is is a special mode whereby bare refs to objects # are serialized to UUID values. return output_as_value(el0.val, env=env) elif len(expr.elements) > 1: RowCls = pgast.ImplicitRowExpr else: RowCls = pgast.RowExpr val = RowCls( args=[output_as_value(e.val, env=env) for e in expr.elements]) if (expr.typeref is not None and not env.singleton_mode and irtyputils.is_persistent_tuple(expr.typeref)): pg_type = pgtypes.pg_type_from_ir_typeref(expr.typeref) val = pgast.TypeCast( arg=val, type_name=pgast.TypeName(name=pg_type, ), )
def unnamed_tuple_as_json_object(expr, *, styperef, env): vals = [] if styperef.in_schema: for el_idx, el_type in enumerate(styperef.subtypes): val = pgast.Indirection( arg=expr, indirection=[ pgast.ColumnRef(name=[str(el_idx)], ), ], ) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) return pgast.FuncCall(name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable) else: coldeflist = [] for el_idx, el_type in enumerate(styperef.subtypes): coldeflist.append( pgast.ColumnDef( name=str(el_idx), typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[str(el_idx)]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) res = pgast.FuncCall(name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=res, ), ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall( name=('unnest', ), args=[pgast.ArrayExpr(elements=[expr], )], coldeflist=coldeflist, ) ]) ])
def compile_typeref(expr: irast.TypeRef) -> pgast.BaseExpr: if expr.collection: raise NotImplementedError() else: result = pgast.TypeCast(arg=pgast.StringConstant(val=str(expr.id)), type_name=pgast.TypeName(name=('uuid', ))) return result
def tuple_getattr(tuple_val, tuple_typeref, attr): ttypes = [] pgtypes = [] for i, st in enumerate(tuple_typeref.subtypes): pgtype = pg_types.pg_type_from_ir_typeref(st) pgtypes.append(pgtype) if st.element_name: ttypes.append(st.element_name) else: ttypes.append(str(i)) index = ttypes.index(attr) if tuple_typeref.in_schema: set_expr = pgast.Indirection( arg=tuple_val, indirection=[ pgast.ColumnRef( name=[attr], ), ], ) else: set_expr = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.ColumnRef( name=[str(index)], ), ), ], from_clause=[ pgast.RangeFunction( functions=[ pgast.FuncCall( name=('unnest',), args=[ pgast.ArrayExpr( elements=[tuple_val], ) ], coldeflist=[ pgast.ColumnDef( name=str(i), typename=pgast.TypeName( name=t ) ) for i, t in enumerate(pgtypes) ] ) ] ) ] ) return set_expr
def compile_FunctionCall( expr: irast.FunctionCall, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: if expr.typemod is ql_ft.TypeModifier.SET_OF: raise RuntimeError( 'set returning functions are not supported in simple expressions') args = [dispatch.compile(a.expr, ctx=ctx) for a in expr.args] if expr.has_empty_variadic and expr.variadic_param_type is not None: var = pgast.TypeCast( arg=pgast.ArrayExpr(elements=[]), type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.variadic_param_type) ) ) args.append(pgast.VariadicArgument(expr=var)) if expr.func_sql_function: # The name might contain a "." if it's one of our # metaschema helpers. name = tuple(expr.func_sql_function.split('.', 1)) else: name = common.get_function_backend_name(expr.func_shortname, expr.func_module_id) result: pgast.BaseExpr = pgast.FuncCall(name=name, args=args) if expr.force_return_cast: # The underlying function has a return value type # different from that of the EdgeQL function declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref) ) ) return result
def compile_Array(expr: irast.Base, *, ctx: context.CompilerContextLevel) -> pgast.Base: elements = [dispatch.compile(e, ctx=ctx) for e in expr.elements] array = astutils.safe_array_expr(elements) if irutils.is_empty_array_expr(expr): serialized = output.in_serialization_ctx(ctx=ctx) return pgast.TypeCast( arg=array, type_name=pgast.TypeName(name=pg_types.pg_type_from_ir_typeref( expr.typeref, serialized=serialized))) else: return array
def compile_TypeCast( expr: irast.TypeCast, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: pg_expr = dispatch.compile(expr.expr, ctx=ctx) if expr.sql_cast: # Use explicit SQL cast. pg_type = pg_types.pg_type_from_ir_typeref(expr.to_type) res: pgast.BaseExpr = pgast.TypeCast( arg=pg_expr, type_name=pgast.TypeName( name=pg_type ) ) elif expr.sql_function or expr.sql_expr: # Cast implemented as a function. if expr.sql_expr: func_name = common.get_cast_backend_name( expr.cast_name, expr.cast_module_id, aspect='function') else: func_name = tuple(expr.sql_function.split('.')) res = pgast.FuncCall( name=func_name, args=[pg_expr], ) else: raise RuntimeError('cast not supported') if expr.cardinality_mod is qlast.CardinalityModifier.Required: res = pgast.FuncCall( name=('edgedb', '_raise_exception_on_null'), args=[ res, pgast.StringConstant( val='invalid_parameter_value', ), pgast.StringConstant( val='invalid null value in cast', ), pgast.StringConstant(val=''), ] ) return res
def compile_FunctionCall( expr: irast.FunctionCall, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: if expr.typemod is ql_ft.TypeModifier.SetOfType: raise RuntimeError( 'set returning functions are not supported in simple expressions') args = [dispatch.compile(a.expr, ctx=ctx) for a in expr.args] if expr.has_empty_variadic and expr.variadic_param_type is not None: var = pgast.TypeCast( arg=pgast.ArrayExpr(elements=[]), type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.variadic_param_type) ) ) args.append(pgast.VariadicArgument(expr=var)) name = relgen.get_func_call_backend_name(expr, ctx=ctx) result: pgast.BaseExpr = pgast.FuncCall(name=name, args=args) if expr.force_return_cast: # The underlying function has a return value type # different from that of the EdgeQL function declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref) ) ) return result
def compile_Parameter(expr: irast.Parameter, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: result: pgast.BaseParamRef is_decimal: bool = expr.name.isdecimal() if not is_decimal and ctx.env.use_named_params: result = pgast.NamedParamRef( name=expr.name, nullable=not expr.required, ) else: index = ctx.argmap[expr.name].index ctx.argmap[expr.name].used = True result = pgast.ParamRef(number=index, nullable=not expr.required) return pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref)))
def insert_value_for_shape_element( insert_stmt: pgast.InsertStmt, wrapper: pgast.Query, ir_stmt: irast.MutatingStmt, shape_el: irast.Set, iterator_id: pgast.OutputVar, *, ptr_info: pg_types.PointerStorageInfo, ctx: context.CompilerContextLevel) -> pgast.OutputVar: rel = compile_insert_shape_element(insert_stmt, wrapper, ir_stmt, shape_el, iterator_id, ctx=ctx) insvalue = pathctx.get_path_value_var(rel, shape_el.path_id, env=ctx.env) if isinstance(insvalue, pgast.TupleVar): if shape_el.path_id.is_objtype_path(): for element in insvalue.elements: name = element.path_id.rptr_name() if name == 'std::target': insvalue = pathctx.get_path_value_var(rel, element.path_id, env=ctx.env) break else: raise RuntimeError('could not find std::target in ' 'insert computable') else: insvalue = output.output_as_value(insvalue, env=ctx.env) insvalue = pgast.TypeCast( arg=insvalue, type_name=pgast.TypeName(name=ptr_info.column_type), ) return insvalue
def array_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: el_type = styperef.subtypes[0] if irtyputils.is_tuple(el_type): coldeflist = [] json_args: List[pgast.BaseExpr] = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = st.element_name json_args.append(pgast.StringConstant(val=st.element_name)) else: colname = str(i) val: pgast.BaseExpr = pgast.ColumnRef(name=[colname]) if irtyputils.is_collection(st): val = coll_as_json_object(val, styperef=st, env=env) json_args.append(val) if not irtyputils.is_persistent_tuple(el_type): # Column definition list is only allowed for functions # returning "record", i.e. an anonymous tuple, which # would not be the case for schema-persistent tuple types. coldeflist.append( pgast.ColumnDef( name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st) ) ) ) if is_named: json_func = _get_json_func('build_object', env=env) else: json_func = _get_json_func('build_array', env=env) return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr( args=[ pgast.FuncCall( name=_get_json_func('agg', env=env), args=[ pgast.FuncCall( name=json_func, args=json_args, ) ] ), pgast.StringConstant(val='[]'), ] ), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction( alias=pgast.Alias( aliasname=env.aliases.get('q'), ), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest',), args=[expr], coldeflist=coldeflist, ) ] ) ] ) else: return pgast.FuncCall( name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)
def named_tuple_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: keyvals: List[pgast.BaseExpr] = [] if irtyputils.is_persistent_tuple(styperef): for el_type in styperef.subtypes: keyvals.append(pgast.StringConstant(val=el_type.element_name)) val: pgast.BaseExpr = pgast.Indirection( arg=expr, indirection=[ pgast.ColumnRef( name=[el_type.element_name] ) ] ) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) return pgast.FuncCall( name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable) else: coldeflist = [] for el_type in styperef.subtypes: keyvals.append(pgast.StringConstant(val=el_type.element_name)) coldeflist.append(pgast.ColumnDef( name=el_type.element_name, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[el_type.element_name]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) res = pgast.FuncCall( name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable) return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=res, ), ], from_clause=[ pgast.RangeFunction( functions=[ pgast.FuncCall( name=('unnest',), args=[ pgast.ArrayExpr( elements=[expr], ) ], coldeflist=coldeflist, ) ] ) ] )
def compile_operator(expr: irast.OperatorCall, args: Sequence[pgast.BaseExpr], *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: lexpr = rexpr = None result: Optional[pgast.BaseExpr] = None if expr.operator_kind is ql_ft.OperatorKind.Infix: lexpr, rexpr = args elif expr.operator_kind is ql_ft.OperatorKind.Prefix: rexpr = args[0] elif expr.operator_kind is ql_ft.OperatorKind.Postfix: lexpr = args[0] else: raise RuntimeError(f'unexpected operator kind: {expr.operator_kind!r}') str_func_name = str(expr.func_shortname) if ((str_func_name in {'std::=', 'std::!='} or str(expr.origin_name) in {'std::=', 'std::!='}) and expr.args[0].expr.typeref is not None and irtyputils.is_object(expr.args[0].expr.typeref) and expr.args[1].expr.typeref is not None and irtyputils.is_object(expr.args[1].expr.typeref)): if str_func_name == 'std::=' or str(expr.origin_name) == 'std::=': sql_oper = '=' else: sql_oper = '!=' elif str_func_name == 'std::EXISTS': assert rexpr result = pgast.NullTest(arg=rexpr, negated=True) elif expr.sql_operator: sql_oper = expr.sql_operator[0] if len(expr.sql_operator) > 1: # Explicit operand types given in FROM SQL OPERATOR lexpr, rexpr = _cast_operands(lexpr, rexpr, expr.sql_operator[1:]) elif expr.sql_function: sql_func = expr.sql_function[0] func_name = tuple(sql_func.split('.', 1)) if len(expr.sql_function) > 1: # Explicit operand types given in FROM SQL FUNCTION lexpr, rexpr = _cast_operands(lexpr, rexpr, expr.sql_function[1:]) args = [] if lexpr is not None: args.append(lexpr) if rexpr is not None: args.append(rexpr) result = pgast.FuncCall(name=func_name, args=args) elif expr.origin_name is not None: sql_oper = common.get_operator_backend_name(expr.origin_name)[1] else: sql_oper = common.get_operator_backend_name(expr.func_shortname)[1] # If result was not already computed, it's going to be a generic Expr. if result is None: result = pgast.Expr( kind=pgast.ExprKind.OP, name=sql_oper, lexpr=lexpr, rexpr=rexpr, ) if expr.force_return_cast: # The underlying operator has a return value type # different from that of the EdgeQL operator declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref))) return result
def process_link_update( *, ir_stmt: irast.MutatingStmt, ir_set: irast.Set, props_only: bool, is_insert: bool, wrapper: pgast.Query, dml_cte: pgast.CommonTableExpr, iterator_cte: typing.Optional[pgast.CommonTableExpr], ctx: context.CompilerContextLevel) -> pgast.CommonTableExpr: """Perform updates to a link relation as part of a DML statement. :param ir_stmt: IR of the statement. :param ir_set: IR of the INSERT/UPDATE body element. :param props_only: Whether this link update only touches link properties. :param wrapper: Top-level SQL query. :param dml_cte: CTE representing the SQL INSERT or UPDATE to the main relation of the Object. :param iterator_cte: CTE representing the iterator range in the FOR clause of the EdgeQL DML statement. """ toplevel = ctx.toplevel_stmt rptr = ir_set.rptr ptrref = rptr.ptrref assert isinstance(ptrref, irast.PointerRef) target_is_scalar = irtyputils.is_scalar(ptrref.dir_target) path_id = ir_set.path_id # The links in the dml class shape have been derived, # but we must use the correct specialized link class for the # base material type. if ptrref.material_ptr is not None: mptrref = ptrref.material_ptr assert isinstance(mptrref, irast.PointerRef) else: mptrref = ptrref target_rvar = relctx.range_for_ptrref(mptrref, include_overlays=False, only_self=True, ctx=ctx) assert isinstance(target_rvar, pgast.RelRangeVar) assert isinstance(target_rvar.relation, pgast.Relation) target_alias = target_rvar.alias.aliasname target_tab_name = (target_rvar.relation.schemaname, target_rvar.relation.name) dml_cte_rvar = pgast.RelRangeVar( relation=dml_cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get('m'))) col_data = { 'ptr_item_id': pgast.TypeCast(arg=pgast.StringConstant(val=str(mptrref.id)), type_name=pgast.TypeName(name=('uuid', ))), 'source': pathctx.get_rvar_path_identity_var(dml_cte_rvar, ir_stmt.subject.path_id, env=ctx.env) } if not is_insert: # Drop all previous link records for this source. delcte = pgast.CommonTableExpr(query=pgast.DeleteStmt( relation=target_rvar, where_clause=astutils.new_binop( lexpr=col_data['source'], op='=', rexpr=pgast.ColumnRef(name=[target_alias, 'source'])), using_clause=[dml_cte_rvar], returning_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[target_alias, pgast.Star()])) ]), name=ctx.env.aliases.get(hint='d')) pathctx.put_path_value_rvar(delcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this removal in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'except', delcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(delcte) # Turn the IR of the expression on the right side of := # into a subquery returning records for the link table. data_cte, specified_cols = process_link_values(ir_stmt, ir_set, target_tab_name, col_data, dml_cte_rvar, [], props_only, target_is_scalar, iterator_cte, ctx=ctx) toplevel.ctes.append(data_cte) data_select = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RelRangeVar(relation=data_cte)]) cols = [pgast.ColumnRef(name=[col]) for col in specified_cols] if is_insert: conflict_clause = None else: # Inserting rows into the link table may produce cardinality # constraint violations, since the INSERT into the link table # is executed in the snapshot where the above DELETE from # the link table is not visible. Hence, we need to use # the ON CONFLICT clause to resolve this. conflict_cols = ['source', 'target', 'ptr_item_id'] conflict_inference = [] conflict_exc_row = [] for col in conflict_cols: conflict_inference.append(pgast.ColumnRef(name=[col])) conflict_exc_row.append(pgast.ColumnRef(name=['excluded', col])) conflict_data = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RelRangeVar(relation=data_cte)], where_clause=astutils.new_binop( lexpr=pgast.ImplicitRowExpr(args=conflict_inference), rexpr=pgast.ImplicitRowExpr(args=conflict_exc_row), op='=')) conflict_clause = pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=conflict_inference), target_list=[ pgast.MultiAssignRef(columns=cols, source=conflict_data) ]) updcte = pgast.CommonTableExpr( name=ctx.env.aliases.get(hint='i'), query=pgast.InsertStmt( relation=target_rvar, select_stmt=data_select, cols=cols, on_conflict=conflict_clause, returning_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[pgast.Star()])) ])) pathctx.put_path_value_rvar(updcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this insertion in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'union', updcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(updcte) return data_cte
def process_update_body(ir_stmt: irast.MutatingStmt, wrapper: pgast.Query, update_cte: pgast.CommonTableExpr, range_cte: pgast.CommonTableExpr, *, ctx: context.CompilerContextLevel): """Generate SQL DML CTEs from an UpdateStmt IR. :param ir_stmt: IR of the statement. :param wrapper: Top-level SQL query. :param update_cte: CTE representing the SQL UPDATE to the main relation of the Object. :param range_cte: CTE representing the range affected by the statement. """ update_stmt = update_cte.query assert isinstance(update_stmt, pgast.UpdateStmt) external_updates = [] toplevel = ctx.toplevel_stmt toplevel.ctes.append(range_cte) toplevel.ctes.append(update_cte) with ctx.newscope() as subctx: # It is necessary to process the expressions in # the UpdateStmt shape body in the context of the # UPDATE statement so that references to the current # values of the updated object are resolved correctly. subctx.path_scope[ir_stmt.subject.path_id] = update_stmt subctx.rel = update_stmt subctx.expr_exposed = False for shape_el in ir_stmt.subject.shape: ptrref = shape_el.rptr.ptrref updvalue = shape_el.expr ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=True, link_bias=False) if ptr_info.table_type == 'ObjectType' and updvalue is not None: with subctx.newscope() as scopectx: # First, process all internal link updates updtarget = pgast.UpdateTarget( name=ptr_info.column_name, val=pgast.TypeCast(arg=dispatch.compile(updvalue, ctx=scopectx), type_name=pgast.TypeName( name=ptr_info.column_type))) update_stmt.targets.append(updtarget) props_only = is_props_only_update(shape_el, ctx=subctx) ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=True) if ptr_info and ptr_info.table_type == 'link': external_updates.append((shape_el, props_only)) if not update_stmt.targets: # No updates directly to the set target table, # so convert the UPDATE statement into a SELECT. from_clause: typing.List[pgast.BaseRangeVar] = [update_stmt.relation] from_clause.extend(update_stmt.from_clause) update_cte.query = pgast.SelectStmt( ctes=update_stmt.ctes, target_list=update_stmt.returning_list, from_clause=from_clause, where_clause=update_stmt.where_clause, path_namespace=update_stmt.path_namespace, path_outputs=update_stmt.path_outputs, path_scope=update_stmt.path_scope, path_rvar_map=update_stmt.path_rvar_map.copy(), view_path_id_map=update_stmt.view_path_id_map.copy(), ptr_join_map=update_stmt.ptr_join_map.copy(), ) # Process necessary updates to the link tables. for expr, props_only in external_updates: process_link_update(ir_stmt=ir_stmt, ir_set=expr, props_only=False, wrapper=wrapper, dml_cte=update_cte, iterator_cte=None, is_insert=False, ctx=ctx)
def process_insert_body(ir_stmt: irast.MutatingStmt, wrapper: pgast.Query, insert_cte: pgast.CommonTableExpr, insert_rvar: pgast.PathRangeVar, *, ctx: context.CompilerContextLevel) -> None: """Generate SQL DML CTEs from an InsertStmt IR. :param ir_stmt: IR of the statement. :param wrapper: Top-level SQL query. :param insert_cte: CTE representing the SQL INSERT to the main relation of the Object. """ cols = [pgast.ColumnRef(name=['__type__'])] select = pgast.SelectStmt(target_list=[]) values = select.target_list # The main INSERT query of this statement will always be # present to insert at least the `id` and `__type__` # properties. insert_stmt = insert_cte.query assert isinstance(insert_stmt, pgast.InsertStmt) insert_stmt.cols = cols insert_stmt.select_stmt = select if ir_stmt.parent_stmt is not None: iterator_set = ir_stmt.parent_stmt.iterator_stmt else: iterator_set = None if iterator_set is not None: with ctx.substmt() as ictx: ictx.path_scope = ictx.path_scope.new_child() ictx.path_scope[iterator_set.path_id] = ictx.rel clauses.compile_iterator_expr(ictx.rel, iterator_set, ctx=ictx) ictx.rel.path_id = iterator_set.path_id pathctx.put_path_bond(ictx.rel, iterator_set.path_id) iterator_cte = pgast.CommonTableExpr( query=ictx.rel, name=ctx.env.aliases.get('iter')) ictx.toplevel_stmt.ctes.append(iterator_cte) iterator_rvar = relctx.rvar_for_rel(iterator_cte, ctx=ctx) relctx.include_rvar(select, iterator_rvar, path_id=ictx.rel.path_id, ctx=ctx) iterator_id = pathctx.get_path_identity_var(select, iterator_set.path_id, env=ctx.env) else: iterator_cte = None iterator_id = None typeref = ir_stmt.subject.typeref if typeref.material_type is not None: typeref = typeref.material_type values.append( pgast.ResTarget(val=pgast.TypeCast( arg=pgast.StringConstant(val=str(typeref.id)), type_name=pgast.TypeName(name=('uuid', ))), )) external_inserts = [] parent_link_props = [] with ctx.newrel() as subctx: subctx.rel = select subctx.rel_hierarchy[select] = insert_stmt subctx.expr_exposed = False if iterator_cte is not None: subctx.path_scope = ctx.path_scope.new_child() subctx.path_scope[iterator_cte.query.path_id] = select # Process the Insert IR and separate links that go # into the main table from links that are inserted into # a separate link table. for shape_el in ir_stmt.subject.shape: rptr = shape_el.rptr ptrref = rptr.ptrref if ptrref.material_ptr is not None: ptrref = ptrref.material_ptr if (ptrref.parent_ptr is not None and rptr.source.path_id != ir_stmt.subject.path_id): parent_link_props.append(shape_el) continue ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=True, link_bias=False) props_only = False # First, process all local link inserts. if ptr_info.table_type == 'ObjectType': props_only = True field = pgast.ColumnRef(name=[ptr_info.column_name]) cols.append(field) insvalue = insert_value_for_shape_element(insert_stmt, wrapper, ir_stmt, shape_el, iterator_id, ptr_info=ptr_info, ctx=subctx) values.append(pgast.ResTarget(val=insvalue)) ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=True) if ptr_info and ptr_info.table_type == 'link': external_inserts.append((shape_el, props_only)) if iterator_cte is not None: cols.append(pgast.ColumnRef(name=['__edb_token'])) values.append(pgast.ResTarget(val=iterator_id)) pathctx.put_path_identity_var(insert_stmt, iterator_set.path_id, cols[-1], force=True, env=subctx.env) pathctx.put_path_bond(insert_stmt, iterator_set.path_id) toplevel = ctx.toplevel_stmt toplevel.ctes.append(insert_cte) # Process necessary updates to the link tables. for shape_el, props_only in external_inserts: process_link_update(ir_stmt=ir_stmt, ir_set=shape_el, props_only=props_only, wrapper=wrapper, dml_cte=insert_cte, iterator_cte=iterator_cte, is_insert=True, ctx=ctx) if parent_link_props: prop_elements = [] with ctx.newscope() as scopectx: scopectx.rel = wrapper for shape_el in parent_link_props: rptr = shape_el.rptr scopectx.path_scope[rptr.source.path_id] = wrapper pathctx.put_path_rvar_if_not_exists(wrapper, rptr.source.path_id, insert_rvar, aspect='value', env=scopectx.env) dispatch.visit(shape_el, ctx=scopectx) tuple_el = astutils.tuple_element_for_shape_el(shape_el, None, ctx=scopectx) prop_elements.append(tuple_el) valtuple = pgast.TupleVar(elements=prop_elements, named=True) pathctx.put_path_value_var(wrapper, ir_stmt.subject.path_id, valtuple, force=True, env=ctx.env)
def process_link_update( *, ir_stmt: irast.MutatingStmt, ir_set: irast.Set, props_only: bool, is_insert: bool, shape_op: qlast.ShapeOp = qlast.ShapeOp.ASSIGN, source_typeref: irast.TypeRef, wrapper: pgast.Query, dml_cte: pgast.CommonTableExpr, iterator_cte: Optional[pgast.CommonTableExpr], ctx: context.CompilerContextLevel, ) -> pgast.CommonTableExpr: """Perform updates to a link relation as part of a DML statement. :param ir_stmt: IR of the statement. :param ir_set: IR of the INSERT/UPDATE body element. :param props_only: Whether this link update only touches link properties. :param wrapper: Top-level SQL query. :param dml_cte: CTE representing the SQL INSERT or UPDATE to the main relation of the Object. :param iterator_cte: CTE representing the iterator range in the FOR clause of the EdgeQL DML statement. """ toplevel = ctx.toplevel_stmt rptr = ir_set.rptr ptrref = rptr.ptrref assert isinstance(ptrref, irast.PointerRef) target_is_scalar = irtyputils.is_scalar(ir_set.typeref) path_id = ir_set.path_id # The links in the dml class shape have been derived, # but we must use the correct specialized link class for the # base material type. if ptrref.material_ptr is not None: mptrref = ptrref.material_ptr else: mptrref = ptrref if mptrref.out_source.id != source_typeref.id: for descendant in mptrref.descendants: if descendant.out_source.id == source_typeref.id: mptrref = descendant break else: raise errors.InternalServerError( 'missing PointerRef descriptor for source typeref') assert isinstance(mptrref, irast.PointerRef) target_rvar = relctx.range_for_ptrref(mptrref, for_mutation=True, only_self=True, ctx=ctx) assert isinstance(target_rvar, pgast.RelRangeVar) assert isinstance(target_rvar.relation, pgast.Relation) target_alias = target_rvar.alias.aliasname target_tab_name = (target_rvar.relation.schemaname, target_rvar.relation.name) dml_cte_rvar = pgast.RelRangeVar( relation=dml_cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get('m'))) col_data = { 'ptr_item_id': pgast.TypeCast(arg=pgast.StringConstant(val=str(mptrref.id)), type_name=pgast.TypeName(name=('uuid', ))), 'source': pathctx.get_rvar_path_identity_var(dml_cte_rvar, ir_stmt.subject.path_id, env=ctx.env) } # Turn the IR of the expression on the right side of := # into a subquery returning records for the link table. data_cte, specified_cols = process_link_values( ir_stmt=ir_stmt, ir_expr=ir_set, target_tab=target_tab_name, col_data=col_data, dml_rvar=dml_cte_rvar, sources=[], props_only=props_only, target_is_scalar=target_is_scalar, iterator_cte=iterator_cte, ctx=ctx, ) toplevel.ctes.append(data_cte) delqry: Optional[pgast.DeleteStmt] data_select = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()]), ), ], from_clause=[ pgast.RelRangeVar(relation=data_cte), ], ) if not is_insert and shape_op is not qlast.ShapeOp.APPEND: if shape_op is qlast.ShapeOp.SUBTRACT: data_rvar = relctx.rvar_for_rel(data_select, ctx=ctx) # Drop requested link records. delqry = pgast.DeleteStmt( relation=target_rvar, where_clause=astutils.new_binop( lexpr=astutils.new_binop( lexpr=col_data['source'], op='=', rexpr=pgast.ColumnRef(name=[target_alias, 'source'], ), ), op='AND', rexpr=astutils.new_binop( lexpr=pgast.ColumnRef(name=[target_alias, 'target'], ), op='=', rexpr=pgast.ColumnRef( name=[data_rvar.alias.aliasname, 'target'], ), ), ), using_clause=[ dml_cte_rvar, data_rvar, ], returning_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[target_alias, pgast.Star()], ), ) ]) else: # Drop all previous link records for this source. delqry = pgast.DeleteStmt( relation=target_rvar, where_clause=astutils.new_binop( lexpr=col_data['source'], op='=', rexpr=pgast.ColumnRef(name=[target_alias, 'source'], ), ), using_clause=[dml_cte_rvar], returning_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[target_alias, pgast.Star()], ), ) ]) delcte = pgast.CommonTableExpr( name=ctx.env.aliases.get(hint='d'), query=delqry, ) pathctx.put_path_value_rvar(delcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this removal in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'except', delcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(delcte) else: delqry = None if shape_op is qlast.ShapeOp.SUBTRACT: return data_cte cols = [pgast.ColumnRef(name=[col]) for col in specified_cols] conflict_cols = ['source', 'target', 'ptr_item_id'] if is_insert: conflict_clause = None elif len(cols) == len(conflict_cols) and delqry is not None: # There are no link properties, so we can optimize the # link replacement operation by omitting the overlapping # link rows from deletion. filter_select = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=['source']), ), pgast.ResTarget(val=pgast.ColumnRef(name=['target']), ), ], from_clause=[pgast.RelRangeVar(relation=data_cte)], ) delqry.where_clause = astutils.extend_binop( delqry.where_clause, astutils.new_binop( lexpr=pgast.ImplicitRowExpr(args=[ pgast.ColumnRef(name=['source']), pgast.ColumnRef(name=['target']), ], ), rexpr=pgast.SubLink( type=pgast.SubLinkType.ALL, expr=filter_select, ), op='!=', )) conflict_clause = pgast.OnConflictClause( action='nothing', infer=pgast.InferClause(index_elems=[ pgast.ColumnRef(name=[col]) for col in conflict_cols ]), ) else: # Inserting rows into the link table may produce cardinality # constraint violations, since the INSERT into the link table # is executed in the snapshot where the above DELETE from # the link table is not visible. Hence, we need to use # the ON CONFLICT clause to resolve this. conflict_inference = [] conflict_exc_row = [] for col in conflict_cols: conflict_inference.append(pgast.ColumnRef(name=[col])) conflict_exc_row.append(pgast.ColumnRef(name=['excluded', col])) conflict_data = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RelRangeVar(relation=data_cte)], where_clause=astutils.new_binop( lexpr=pgast.ImplicitRowExpr(args=conflict_inference), rexpr=pgast.ImplicitRowExpr(args=conflict_exc_row), op='=')) conflict_clause = pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=conflict_inference), target_list=[ pgast.MultiAssignRef(columns=cols, source=conflict_data) ]) updcte = pgast.CommonTableExpr( name=ctx.env.aliases.get(hint='i'), query=pgast.InsertStmt( relation=target_rvar, select_stmt=data_select, cols=cols, on_conflict=conflict_clause, returning_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[pgast.Star()])) ])) pathctx.put_path_value_rvar(updcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this insertion in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'union', updcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(updcte) return data_cte