def compile_FunctionCall(expr: irast.FunctionCall, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: if expr.typemod is ql_ft.TypeModifier.SetOfType: raise errors.UnsupportedFeatureError( 'set returning functions are not supported in simple expressions') args = _compile_call_args(expr, ctx=ctx) if expr.has_empty_variadic and expr.variadic_param_type is not None: var = pgast.TypeCast( arg=pgast.ArrayExpr(elements=[]), type_name=pgast.TypeName(name=pg_types.pg_type_from_ir_typeref( expr.variadic_param_type))) args.append(pgast.VariadicArgument(expr=var)) name = relgen.get_func_call_backend_name(expr, ctx=ctx) result: pgast.BaseExpr = pgast.FuncCall(name=name, args=args) if expr.force_return_cast: # The underlying function has a return value type # different from that of the EdgeQL function declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref))) return result
def compile_FunctionCall(expr: irast.FunctionCall, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: if expr.typemod is ql_ft.TypeModifier.SET_OF: raise RuntimeError( 'set returning functions are not supported in simple expressions') args = [dispatch.compile(a.expr, ctx=ctx) for a in expr.args] if expr.has_empty_variadic and expr.variadic_param_type is not None: var = pgast.TypeCast( arg=pgast.ArrayExpr(elements=[]), type_name=pgast.TypeName(name=pg_types.pg_type_from_ir_typeref( expr.variadic_param_type))) args.append(pgast.VariadicArgument(expr=var)) if expr.func_sql_function: name = (expr.func_sql_function, ) else: name = common.get_function_backend_name(expr.func_shortname, expr.func_module_id) result: pgast.BaseExpr = pgast.FuncCall(name=name, args=args) if expr.force_return_cast: # The underlying function has a return value type # different from that of the EdgeQL function declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref))) return result
def get_pg_type(typeref: irast.TypeRef, *, ctx: context.CompilerContextLevel) -> typing.Tuple[str]: if in_serialization_ctx(ctx): if ctx.env.output_format is context.OutputFormat.JSONB: return ('jsonb', ) elif ctx.env.output_format is context.OutputFormat.JSON: return ('json', ) elif irtyputils.is_object(typeref): return ('record', ) else: return pgtypes.pg_type_from_ir_typeref(typeref) else: return pgtypes.pg_type_from_ir_typeref(typeref)
def fini_toplevel(stmt: pgast.Query, ctx: context.CompilerContextLevel) -> None: scan_check_ctes(stmt, ctx.env.check_ctes, ctx=ctx) # Type rewrites go first. if stmt.ctes is None: stmt.ctes = [] stmt.ctes[:0] = list(ctx.type_ctes.values()) stmt.argnames = argmap = ctx.argmap if not ctx.env.use_named_params: # Adding unused parameters into a CTE targets = [] for param in ctx.env.query_params: pgparam = argmap[param.name] if pgparam.used: continue targets.append( pgast.ResTarget(val=pgast.TypeCast( arg=pgast.ParamRef(number=pgparam.index), type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref( param.ir_type))))) if targets: stmt.append_cte( pgast.CommonTableExpr( name="__unused_vars", query=pgast.SelectStmt(target_list=targets)))
def compile_TypeCast(expr: irast.TypeCast, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: pg_expr = dispatch.compile(expr.expr, ctx=ctx) if expr.sql_cast: # Use explicit SQL cast. pg_type = pg_types.pg_type_from_ir_typeref(expr.to_type) return pgast.TypeCast(arg=pg_expr, type_name=pgast.TypeName(name=pg_type)) elif expr.sql_function or expr.sql_expr: # Cast implemented as a function. if expr.sql_expr: func_name = common.get_cast_backend_name(expr.cast_name, expr.cast_module_id, aspect='function') else: func_name = tuple(expr.sql_function.split('.')) return pgast.FuncCall( name=func_name, args=[pg_expr], ) else: raise RuntimeError('cast not supported')
def named_tuple_as_json_object(expr, *, styperef, env): keyvals = [] for el_idx, el_type in enumerate(styperef.subtypes): keyvals.append(pgast.StringConstant(val=el_type.element_name)) type_sentinel = pgast.TypeCast( arg=pgast.NullConstant(), type_name=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type) ) ) val = pgast.FuncCall( name=('edgedb', 'row_getattr_by_num'), args=[ expr, pgast.NumericConstant(val=str(el_idx + 1)), type_sentinel ]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) return pgast.FuncCall( name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable)
def unnamed_tuple_as_json_object(expr, *, styperef, env): has_colls = any(irtyputils.is_collection(st) for st in styperef.subtypes) if not has_colls: # No nested collections, take the fast path. return pgast.FuncCall( name=('edgedb',) + _get_json_func('row_to_array', env=env), args=[expr], null_safe=True, ser_safe=True, nullable=expr.nullable) vals = [] for el_idx, el_type in enumerate(styperef.subtypes): type_sentinel = pgast.TypeCast( arg=pgast.NullConstant(), type_name=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type) ) ) val = pgast.FuncCall( name=('edgedb', 'row_getattr_by_num'), args=[ expr, pgast.NumericConstant(val=str(el_idx + 1)), type_sentinel ]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) return pgast.FuncCall( name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable)
def fini_stmt(stmt: pgast.Query, ctx: context.CompilerContextLevel, parent_ctx: context.CompilerContextLevel) -> None: if stmt is ctx.toplevel_stmt: stmt.argnames = argmap = ctx.argmap if not ctx.env.use_named_params: # Adding unused parameters into a CTE targets = [] for param in ctx.env.query_params: if param.name in argmap: continue if param.name.isdecimal(): idx = int(param.name) + 1 else: idx = len(argmap) + 1 argmap[param.name] = pgast.Param( index=idx, required=param.required, ) targets.append( pgast.ResTarget(val=pgast.TypeCast( arg=pgast.ParamRef(number=idx), type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref( param.ir_type))))) if targets: ctx.toplevel_stmt.ctes.append( pgast.CommonTableExpr( name="__unused_vars", query=pgast.SelectStmt(target_list=targets)))
def compile_FloatConstant(expr: irast.BaseConstant, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: return pgast.TypeCast( arg=pgast.NumericConstant(val=expr.value), type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref)))
def compile_Parameter(expr: irast.Parameter, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: result: pgast.BaseParamRef is_decimal: bool = expr.name.isdecimal() if not is_decimal and ctx.env.use_named_params: result = pgast.NamedParamRef( name=expr.name, nullable=not expr.required, ) else: try: index = ctx.argmap[expr.name].index except KeyError: if expr.name in ctx.argmap: index = ctx.argmap[expr.name].index else: if expr.name.startswith('__edb_arg_'): index = int(expr.name[10:]) + 1 elif is_decimal: index = int(expr.name) + 1 else: index = next(ctx.next_argument) ctx.argmap[expr.name] = pgast.Param( index=index, required=expr.required, ) result = pgast.ParamRef(number=index, nullable=not expr.required) return pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref)))
def compile_StringConstant(expr: irast.StringConstant, *, ctx: context.CompilerContextLevel) -> pgast.Base: return pgast.TypeCast( arg=pgast.EscapedStringConstant(val=expr.value), type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref)))
def output_as_value( expr: pgast.BaseExpr, *, env: context.Environment) -> pgast.BaseExpr: val = expr if isinstance(expr, pgast.TupleVar): RowCls: Union[Type[pgast.ImplicitRowExpr], Type[pgast.RowExpr]] if len(expr.elements) > 1: RowCls = pgast.ImplicitRowExpr else: RowCls = pgast.RowExpr val = RowCls(args=[ output_as_value(e.val, env=env) for e in expr.elements ]) if (expr.typeref is not None and not env.singleton_mode and irtyputils.is_persistent_tuple(expr.typeref)): pg_type = pgtypes.pg_type_from_ir_typeref(expr.typeref) val = pgast.TypeCast( arg=val, type_name=pgast.TypeName( name=pg_type, ), ) return val
def top_output_as_value( stmt: pgast.SelectStmt, ir_set: irast.Set, *, env: context.Environment) -> pgast.SelectStmt: """Finalize output serialization on the top level.""" if (env.output_format is context.OutputFormat.JSON and not env.expected_cardinality_one): # For JSON we just want to aggregate the whole thing # into a JSON array. return aggregate_json_output(stmt, ir_set, env=env) elif (env.output_format is context.OutputFormat.NATIVE and env.explicit_top_cast is not None): typecast = pgast.TypeCast( arg=stmt.target_list[0].val, type_name=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref( env.explicit_top_cast, persistent_tuples=True, ), ), ) stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=typecast, ) return stmt else: # JSON_ELEMENTS and BINARY don't require any wrapping return stmt
def compile_Parameter( expr: irast.Parameter, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: result: pgast.BaseParamRef if expr.name.isdecimal(): index = int(expr.name) + 1 result = pgast.ParamRef(number=index) else: if ctx.env.use_named_params: result = pgast.NamedParamRef(name=expr.name) else: if expr.name in ctx.argmap: index = ctx.argmap[expr.name] else: index = len(ctx.argmap) + 1 ctx.argmap[expr.name] = index result = pgast.ParamRef(number=index) return pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref) ) )
def output_as_value(expr: pgast.BaseExpr, *, env: context.Environment) -> pgast.BaseExpr: val = expr if isinstance(expr, pgast.TupleVar): RowCls: Union[Type[pgast.ImplicitRowExpr], Type[pgast.RowExpr]] if (env.output_format is context.OutputFormat.NATIVE_INTERNAL and len(expr.elements) == 1 and (path_id := (el0 := expr.elements[0]).path_id) is not None and (rptr_name := path_id.rptr_name()) is not None and (rptr_name.name == 'id')): # This is is a special mode whereby bare refs to objects # are serialized to UUID values. return output_as_value(el0.val, env=env) elif len(expr.elements) > 1: RowCls = pgast.ImplicitRowExpr else: RowCls = pgast.RowExpr val = RowCls( args=[output_as_value(e.val, env=env) for e in expr.elements]) if (expr.typeref is not None and not env.singleton_mode and irtyputils.is_persistent_tuple(expr.typeref)): pg_type = pgtypes.pg_type_from_ir_typeref(expr.typeref) val = pgast.TypeCast( arg=val, type_name=pgast.TypeName(name=pg_type, ), )
def unnamed_tuple_as_json_object(expr, *, styperef, env): vals = [] if styperef.in_schema: for el_idx, el_type in enumerate(styperef.subtypes): val = pgast.Indirection( arg=expr, indirection=[ pgast.ColumnRef(name=[str(el_idx)], ), ], ) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) return pgast.FuncCall(name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable) else: coldeflist = [] for el_idx, el_type in enumerate(styperef.subtypes): coldeflist.append( pgast.ColumnDef( name=str(el_idx), typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[str(el_idx)]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) res = pgast.FuncCall(name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=res, ), ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall( name=('unnest', ), args=[pgast.ArrayExpr(elements=[expr], )], coldeflist=coldeflist, ) ]) ])
def tuple_getattr(tuple_val, tuple_typeref, attr): ttypes = [] pgtypes = [] for i, st in enumerate(tuple_typeref.subtypes): pgtype = pg_types.pg_type_from_ir_typeref(st) pgtypes.append(pgtype) if st.element_name: ttypes.append(st.element_name) else: ttypes.append(str(i)) index = ttypes.index(attr) if tuple_typeref.in_schema: set_expr = pgast.Indirection( arg=tuple_val, indirection=[ pgast.ColumnRef( name=[attr], ), ], ) else: set_expr = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.ColumnRef( name=[str(index)], ), ), ], from_clause=[ pgast.RangeFunction( functions=[ pgast.FuncCall( name=('unnest',), args=[ pgast.ArrayExpr( elements=[tuple_val], ) ], coldeflist=[ pgast.ColumnDef( name=str(i), typename=pgast.TypeName( name=t ) ) for i, t in enumerate(pgtypes) ] ) ] ) ] ) return set_expr
def compile_Array(expr: irast.Base, *, ctx: context.CompilerContextLevel) -> pgast.Base: elements = [dispatch.compile(e, ctx=ctx) for e in expr.elements] array = astutils.safe_array_expr(elements) if irutils.is_empty_array_expr(expr): serialized = output.in_serialization_ctx(ctx=ctx) return pgast.TypeCast( arg=array, type_name=pgast.TypeName(name=pg_types.pg_type_from_ir_typeref( expr.typeref, serialized=serialized))) else: return array
def compile_TypeCast( expr: irast.TypeCast, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: pg_expr = dispatch.compile(expr.expr, ctx=ctx) if expr.sql_cast: # Use explicit SQL cast. pg_type = pg_types.pg_type_from_ir_typeref(expr.to_type) res: pgast.BaseExpr = pgast.TypeCast( arg=pg_expr, type_name=pgast.TypeName( name=pg_type ) ) elif expr.sql_function or expr.sql_expr: # Cast implemented as a function. if expr.sql_expr: func_name = common.get_cast_backend_name( expr.cast_name, expr.cast_module_id, aspect='function') else: func_name = tuple(expr.sql_function.split('.')) res = pgast.FuncCall( name=func_name, args=[pg_expr], ) else: raise RuntimeError('cast not supported') if expr.cardinality_mod is qlast.CardinalityModifier.Required: res = pgast.FuncCall( name=('edgedb', '_raise_exception_on_null'), args=[ res, pgast.StringConstant( val='invalid_parameter_value', ), pgast.StringConstant( val='invalid null value in cast', ), pgast.StringConstant(val=''), ] ) return res
def compile_Parameter(expr: irast.Parameter, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: result: pgast.BaseParamRef is_decimal: bool = expr.name.isdecimal() if not is_decimal and ctx.env.use_named_params: result = pgast.NamedParamRef( name=expr.name, nullable=not expr.required, ) else: index = ctx.argmap[expr.name].index ctx.argmap[expr.name].used = True result = pgast.ParamRef(number=index, nullable=not expr.required) return pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref)))
def array_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: el_type = styperef.subtypes[0] is_tuple = irtyputils.is_tuple(el_type) # Tuples and bytes might need underlying casts to be done if is_tuple or irtyputils.is_bytes(el_type): coldeflist = [] out_alias = env.aliases.get('q') val: pgast.BaseExpr if is_tuple: json_args: List[pgast.BaseExpr] = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = st.element_name assert colname json_args.append(pgast.StringConstant(val=colname)) else: colname = str(i) val = pgast.ColumnRef(name=[colname]) val = serialize_expr_to_json(val, styperef=st, nested=True, env=env) json_args.append(val) if not irtyputils.is_persistent_tuple(el_type): # Column definition list is only allowed for functions # returning "record", i.e. an anonymous tuple, which # would not be the case for schema-persistent tuple types. coldeflist.append( pgast.ColumnDef( name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st)))) json_func = 'build_object' if is_named else 'build_array' agg_arg = _build_json(json_func, json_args, env=env) needs_unnest = bool(el_type.subtypes) else: assert not el_type.subtypes val = pgast.ColumnRef(name=[out_alias]) agg_arg = serialize_expr_to_json(val, styperef=el_type, nested=True, env=env) needs_unnest = True return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr(args=[ pgast.FuncCall( name=_get_json_func('agg', env=env), args=[agg_arg], ), pgast.StringConstant(val='[]'), ]), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction(alias=pgast.Alias(aliasname=out_alias), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest', ), args=[expr], coldeflist=coldeflist, ) ]) ] if needs_unnest else [], ) else: return pgast.FuncCall(name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)
def array_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: el_type = styperef.subtypes[0] if irtyputils.is_tuple(el_type): coldeflist = [] json_args: List[pgast.BaseExpr] = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = st.element_name json_args.append(pgast.StringConstant(val=st.element_name)) else: colname = str(i) val: pgast.BaseExpr = pgast.ColumnRef(name=[colname]) if irtyputils.is_collection(st): val = coll_as_json_object(val, styperef=st, env=env) json_args.append(val) if not irtyputils.is_persistent_tuple(el_type): # Column definition list is only allowed for functions # returning "record", i.e. an anonymous tuple, which # would not be the case for schema-persistent tuple types. coldeflist.append( pgast.ColumnDef( name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st) ) ) ) if is_named: json_func = _get_json_func('build_object', env=env) else: json_func = _get_json_func('build_array', env=env) return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr( args=[ pgast.FuncCall( name=_get_json_func('agg', env=env), args=[ pgast.FuncCall( name=json_func, args=json_args, ) ] ), pgast.StringConstant(val='[]'), ] ), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction( alias=pgast.Alias( aliasname=env.aliases.get('q'), ), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest',), args=[expr], coldeflist=coldeflist, ) ] ) ] ) else: return pgast.FuncCall( name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)
def named_tuple_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: keyvals: List[pgast.BaseExpr] = [] if irtyputils.is_persistent_tuple(styperef): for el_type in styperef.subtypes: keyvals.append(pgast.StringConstant(val=el_type.element_name)) val: pgast.BaseExpr = pgast.Indirection( arg=expr, indirection=[ pgast.ColumnRef( name=[el_type.element_name] ) ] ) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) return pgast.FuncCall( name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable) else: coldeflist = [] for el_type in styperef.subtypes: keyvals.append(pgast.StringConstant(val=el_type.element_name)) coldeflist.append(pgast.ColumnDef( name=el_type.element_name, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[el_type.element_name]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) res = pgast.FuncCall( name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable) return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=res, ), ], from_clause=[ pgast.RangeFunction( functions=[ pgast.FuncCall( name=('unnest',), args=[ pgast.ArrayExpr( elements=[expr], ) ], coldeflist=coldeflist, ) ] ) ] )
def unnamed_tuple_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: vals: List[pgast.BaseExpr] = [] if irtyputils.is_persistent_tuple(styperef): for el_idx, el_type in enumerate(styperef.subtypes): val: pgast.BaseExpr = pgast.Indirection( arg=expr, indirection=[ pgast.ColumnRef(name=[str(el_idx)], ), ], ) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) return _build_json( 'build_array', args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable, env=env, ) else: coldeflist = [] for el_idx, el_type in enumerate(styperef.subtypes): coldeflist.append( pgast.ColumnDef( name=str(el_idx), typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[str(el_idx)]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) res = _build_json( 'build_array', args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable, env=env, ) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=res, ), ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall( name=('unnest', ), args=[pgast.ArrayExpr(elements=[expr], )], coldeflist=coldeflist, ) ]) ])
def compile_operator(expr: irast.OperatorCall, args: Sequence[pgast.BaseExpr], *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: lexpr = rexpr = None result: Optional[pgast.BaseExpr] = None if expr.operator_kind is ql_ft.OperatorKind.Infix: lexpr, rexpr = args elif expr.operator_kind is ql_ft.OperatorKind.Prefix: rexpr = args[0] elif expr.operator_kind is ql_ft.OperatorKind.Postfix: lexpr = args[0] else: raise RuntimeError(f'unexpected operator kind: {expr.operator_kind!r}') str_func_name = str(expr.func_shortname) if ((str_func_name in {'std::=', 'std::!='} or str(expr.origin_name) in {'std::=', 'std::!='}) and expr.args[0].expr.typeref is not None and irtyputils.is_object(expr.args[0].expr.typeref) and expr.args[1].expr.typeref is not None and irtyputils.is_object(expr.args[1].expr.typeref)): if str_func_name == 'std::=' or str(expr.origin_name) == 'std::=': sql_oper = '=' else: sql_oper = '!=' elif str_func_name == 'std::EXISTS': assert rexpr result = pgast.NullTest(arg=rexpr, negated=True) elif expr.sql_operator: sql_oper = expr.sql_operator[0] if len(expr.sql_operator) > 1: # Explicit operand types given in FROM SQL OPERATOR lexpr, rexpr = _cast_operands(lexpr, rexpr, expr.sql_operator[1:]) elif expr.sql_function: sql_func = expr.sql_function[0] func_name = tuple(sql_func.split('.', 1)) if len(expr.sql_function) > 1: # Explicit operand types given in FROM SQL FUNCTION lexpr, rexpr = _cast_operands(lexpr, rexpr, expr.sql_function[1:]) args = [] if lexpr is not None: args.append(lexpr) if rexpr is not None: args.append(rexpr) result = pgast.FuncCall(name=func_name, args=args) elif expr.origin_name is not None: sql_oper = common.get_operator_backend_name(expr.origin_name)[1] else: sql_oper = common.get_operator_backend_name(expr.func_shortname)[1] # If result was not already computed, it's going to be a generic Expr. if result is None: result = pgast.Expr( kind=pgast.ExprKind.OP, name=sql_oper, lexpr=lexpr, rexpr=rexpr, ) if expr.force_return_cast: # The underlying operator has a return value type # different from that of the EdgeQL operator declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref))) return result
def compile_operator(expr: irast.OperatorCall, args: Sequence[pgast.BaseExpr], *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: lexpr = rexpr = None result: Optional[pgast.BaseExpr] = None if expr.operator_kind is ql_ft.OperatorKind.Infix: lexpr, rexpr = args elif expr.operator_kind is ql_ft.OperatorKind.Prefix: rexpr = args[0] elif expr.operator_kind is ql_ft.OperatorKind.Postfix: lexpr = args[0] else: raise RuntimeError(f'unexpected operator kind: {expr.operator_kind!r}') str_func_name = str(expr.func_shortname) if ((str_func_name in {'std::=', 'std::!='} or str(expr.origin_name) in {'std::=', 'std::!='}) and expr.args[0].expr.typeref is not None and irtyputils.is_object(expr.args[0].expr.typeref) and expr.args[1].expr.typeref is not None and irtyputils.is_object(expr.args[1].expr.typeref)): if str_func_name == 'std::=' or str(expr.origin_name) == 'std::=': sql_oper = '=' else: sql_oper = '!=' elif str_func_name == 'std::EXISTS': result = pgast.NullTest(arg=rexpr, negated=True) elif expr.sql_operator: sql_oper = expr.sql_operator[0] if len(expr.sql_operator) > 1: # Explicit operand types given in FROM SQL OPERATOR if lexpr is not None: lexpr = pgast.TypeCast( arg=lexpr, type_name=pgast.TypeName(name=(expr.sql_operator[1], ))) if rexpr is not None: rexpr_qry = None if (isinstance(rexpr, pgast.SubLink) and isinstance(rexpr.expr, pgast.SelectStmt)): rexpr_qry = rexpr.expr elif isinstance(rexpr, pgast.SelectStmt): rexpr_qry = rexpr if rexpr_qry is not None: # Handle cases like foo <op> ANY (SELECT) and # foo <OP> (SELECT). rexpr_qry.target_list[0] = pgast.ResTarget( name=rexpr_qry.target_list[0].name, val=pgast.TypeCast(arg=rexpr_qry.target_list[0].val, type_name=pgast.TypeName( name=(expr.sql_operator[2], )))) else: rexpr = pgast.TypeCast(arg=rexpr, type_name=pgast.TypeName( name=(expr.sql_operator[2], ))) elif expr.origin_name is not None: sql_oper = common.get_operator_backend_name(expr.origin_name)[1] else: sql_oper = common.get_operator_backend_name(expr.func_shortname)[1] # If result was not already computed, it's going to be a generic Expr. if result is None: result = pgast.Expr( kind=pgast.ExprKind.OP, name=sql_oper, lexpr=lexpr, rexpr=rexpr, ) if expr.force_return_cast: # The underlying operator has a return value type # different from that of the EdgeQL operator declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref))) return result
def compile_OperatorCall(expr: irast.OperatorCall, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: if (expr.func_shortname == 'std::IF' and expr.args[0].cardinality is ql_ft.Cardinality.ONE and expr.args[2].cardinality is ql_ft.Cardinality.ONE): if_expr, condition, else_expr = (a.expr for a in expr.args) return pgast.CaseExpr(args=[ pgast.CaseWhen(expr=dispatch.compile(condition, ctx=ctx), result=dispatch.compile(if_expr, ctx=ctx)) ], defresult=dispatch.compile(else_expr, ctx=ctx)) if expr.typemod is ql_ft.TypeModifier.SET_OF: raise RuntimeError( f'set returning operator {expr.func_shortname!r} is not supported ' f'in simple expressions') args = [dispatch.compile(a.expr, ctx=ctx) for a in expr.args] lexpr = rexpr = None if expr.operator_kind is ql_ft.OperatorKind.INFIX: lexpr, rexpr = args elif expr.operator_kind is ql_ft.OperatorKind.PREFIX: rexpr = args[0] elif expr.operator_kind is ql_ft.OperatorKind.POSTFIX: lexpr = args[0] else: raise RuntimeError(f'unexpected operator kind: {expr.operator_kind!r}') if (expr.func_shortname == 'std::=' and expr.args[0].expr.typeref is not None and irtyputils.is_object(expr.args[0].expr.typeref) and expr.args[1].expr.typeref is not None and irtyputils.is_object(expr.args[1].expr.typeref)): sql_oper = '=' elif expr.sql_operator: sql_oper = expr.sql_operator[0] if len(expr.sql_operator) > 1: # Explicit operand types given in FROM SQL OPERATOR if lexpr is not None: lexpr = pgast.TypeCast( arg=lexpr, type_name=pgast.TypeName(name=(expr.sql_operator[1], ))) if rexpr is not None: rexpr = pgast.TypeCast( arg=rexpr, type_name=pgast.TypeName(name=(expr.sql_operator[2], ))) else: sql_oper = common.get_operator_backend_name(expr.func_shortname, expr.func_module_id)[1] result: pgast.BaseExpr = pgast.Expr( kind=pgast.ExprKind.OP, name=sql_oper, lexpr=lexpr, rexpr=rexpr, ) if expr.force_return_cast: # The underlying operator has a return value type # different from that of the EdgeQL operator declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref))) return result
def _get_rel_path_output(rel: pgast.BaseRelation, path_id: irast.PathId, *, aspect: str, ptr_info: typing.Optional[ pg_types.PointerStorageInfo] = None, env: context.Environment) -> pgast.OutputVar: if path_id.is_objtype_path(): if aspect == 'identity': aspect = 'value' if aspect != 'value': raise LookupError( f'invalid request for non-scalar path {path_id} {aspect}') if (path_id == rel.path_id or (rel.path_id.is_type_indirection_path() and path_id == rel.path_id.src_path())): return _get_rel_object_id_output(rel, path_id, aspect=aspect, env=env) else: if aspect == 'identity': raise LookupError( f'invalid request for scalar path {path_id} {aspect}') elif aspect == 'serialized': aspect = 'value' var = rel.path_outputs.get((path_id, aspect)) if var is not None: return var ptrref = path_id.rptr() rptr_dir = path_id.rptr_dir() if (rptr_dir is not None and rptr_dir != s_pointers.PointerDirection.Outbound): raise LookupError( f'{path_id} is an inbound pointer and cannot be resolved ' f'on a base relation') if isinstance(rel, pgast.NullRelation): if ptrref is not None: target = ptrref.out_target else: target = path_id.target pg_type = pg_types.pg_type_from_ir_typeref(target) if ptr_info is not None: name = env.aliases.get(ptr_info.column_name) else: name = env.aliases.get('v') val = pgast.TypeCast(arg=pgast.NullConstant(), type_name=pgast.TypeName(name=pg_type, )) rel.target_list.append(pgast.ResTarget(name=name, val=val)) result = pgast.ColumnRef(name=[name], nullable=True) else: if ptrref is None: raise ValueError( f'could not resolve trailing pointer class for {path_id}') if ptr_info is None: ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=False) result = pgast.ColumnRef(name=[ptr_info.column_name], nullable=not ptrref.required) _put_path_output_var(rel, path_id, aspect, result, env=env) return result
def array_as_json_object(expr, *, styperef, env): el_type = styperef.subtypes[0] if irtyputils.is_tuple(el_type): coldeflist = [] json_args = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = env.aliases.get(st.element_name) json_args.append(pgast.StringConstant(val=st.element_name)) else: colname = env.aliases.get(str(i)) val = pgast.ColumnRef(name=[colname]) if irtyputils.is_collection(st): val = coll_as_json_object(val, styperef=st, env=env) json_args.append(val) coldeflist.append( pgast.ColumnDef(name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st)))) if is_named: json_func = _get_json_func('build_object', env=env) else: json_func = _get_json_func('build_array', env=env) return pgast.SelectStmt(target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr(args=[ pgast.FuncCall(name=_get_json_func('agg', env=env), args=[ pgast.FuncCall( name=json_func, args=json_args, ) ]), pgast.StringConstant(val='[]'), ]), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction( alias=pgast.Alias( aliasname=env.aliases.get('q'), ), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest', ), args=[expr], coldeflist=coldeflist, ) ]) ]) else: return pgast.FuncCall(name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)
def named_tuple_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: keyvals: List[pgast.BaseExpr] = [] if irtyputils.is_persistent_tuple(styperef): for el_type in styperef.subtypes: assert el_type.element_name keyvals.append(pgast.StringConstant(val=el_type.element_name)) val: pgast.BaseExpr = pgast.Indirection( arg=expr, indirection=[pgast.ColumnRef(name=[el_type.element_name])]) val = serialize_expr_to_json(val, styperef=el_type, nested=True, env=env) keyvals.append(val) obj = _build_json( 'build_object', args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable, env=env, ) else: coldeflist = [] for el_type in styperef.subtypes: assert el_type.element_name keyvals.append(pgast.StringConstant(val=el_type.element_name)) coldeflist.append( pgast.ColumnDef( name=el_type.element_name, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[el_type.element_name]) val = serialize_expr_to_json(val, styperef=el_type, nested=True, env=env) keyvals.append(val) obj = _build_json( 'build_object', args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable, env=env, ) obj = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=obj, ), ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall( name=('unnest', ), args=[pgast.ArrayExpr(elements=[expr], )], coldeflist=coldeflist, ) ]) ] if styperef.subtypes else []) if expr.nullable: obj = pgast.SelectStmt(target_list=[pgast.ResTarget(val=obj)], where_clause=pgast.NullTest(arg=expr, negated=True)) return obj