def top_output_as_config_op( ir_set: irast.Set, stmt: pgast.SelectStmt, *, env: context.Environment) -> pgast.Query: assert isinstance(ir_set.expr, irast.ConfigCommand) if ir_set.expr.scope is qltypes.ConfigScope.SYSTEM: alias = env.aliases.get('cfg') subrvar = pgast.RangeSubselect( subquery=stmt, alias=pgast.Alias( aliasname=alias, ) ) stmt_res = stmt.target_list[0] if stmt_res.name is None: stmt_res = stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=stmt_res.val, ) result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='ADD'), pgast.StringConstant(val=str(ir_set.expr.scope)), pgast.StringConstant(val=ir_set.expr.name), pgast.ColumnRef(name=[stmt_res.name]), ] ) array = pgast.FuncCall( name=('jsonb_build_array',), args=result_row.args, null_safe=True, ser_safe=True, ) result = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=array, ), ], from_clause=[ subrvar, ], ) result.ctes = stmt.ctes result.argnames = stmt.argnames stmt.ctes = [] return result else: raise errors.InternalServerError( f'CONFIGURE {ir_set.expr.scope} INSERT is not supported')
def named_tuple_as_json_object(expr, *, styperef, env): keyvals = [] if styperef.in_schema: for el_idx, el_type in enumerate(styperef.subtypes): keyvals.append(pgast.StringConstant(val=el_type.element_name)) val = pgast.Indirection( arg=expr, indirection=[pgast.ColumnRef(name=[el_type.element_name])]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) return pgast.FuncCall(name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable) else: coldeflist = [] for el_idx, el_type in enumerate(styperef.subtypes): keyvals.append(pgast.StringConstant(val=el_type.element_name)) coldeflist.append( pgast.ColumnDef( name=el_type.element_name, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[el_type.element_name]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) res = pgast.FuncCall(name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=res, ), ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall( name=('unnest', ), args=[pgast.ArrayExpr(elements=[expr], )], coldeflist=coldeflist, ) ]) ])
def top_output_as_config_op( ir_set: irast.Set, stmt: pgast.Query, *, env: context.Environment) -> pgast.Query: if ir_set.expr.system: alias = env.aliases.get('cfg') subrvar = pgast.RangeSubselect( subquery=stmt, alias=pgast.Alias( aliasname=alias, ) ) stmt_res = stmt.target_list[0] if stmt_res.name is None: stmt_res = stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=stmt_res.val, ) result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='ADD'), pgast.StringConstant( val='SYSTEM' if ir_set.expr.system else 'SESSION'), pgast.StringConstant(val=ir_set.expr.name), pgast.ColumnRef(name=[stmt_res.name]), ] ) result = pgast.FuncCall( name=('jsonb_build_array',), args=result_row.args, null_safe=True, ser_safe=True, ) return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=result, ), ], from_clause=[ subrvar, ], ) else: raise errors.InternalServerError( 'CONFIGURE SESSION INSERT is not supported')
def compile_TypeCast( expr: irast.TypeCast, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: pg_expr = dispatch.compile(expr.expr, ctx=ctx) if expr.sql_cast: # Use explicit SQL cast. pg_type = pg_types.pg_type_from_ir_typeref(expr.to_type) res: pgast.BaseExpr = pgast.TypeCast( arg=pg_expr, type_name=pgast.TypeName( name=pg_type ) ) elif expr.sql_function or expr.sql_expr: # Cast implemented as a function. if expr.sql_expr: func_name = common.get_cast_backend_name( expr.cast_name, expr.cast_module_id, aspect='function') else: func_name = tuple(expr.sql_function.split('.')) res = pgast.FuncCall( name=func_name, args=[pg_expr], ) else: raise RuntimeError('cast not supported') if expr.cardinality_mod is qlast.CardinalityModifier.Required: res = pgast.FuncCall( name=('edgedb', '_raise_exception_on_null'), args=[ res, pgast.StringConstant( val='invalid_parameter_value', ), pgast.StringConstant( val='invalid null value in cast', ), pgast.StringConstant(val=''), ] ) return res
def named_tuple_as_json_object(expr, *, styperef, env): keyvals = [] for el_idx, el_type in enumerate(styperef.subtypes): keyvals.append(pgast.StringConstant(val=el_type.element_name)) type_sentinel = pgast.TypeCast( arg=pgast.NullConstant(), type_name=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type) ) ) val = pgast.FuncCall( name=('edgedb', 'row_getattr_by_num'), args=[ expr, pgast.NumericConstant(val=str(el_idx + 1)), type_sentinel ]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) return pgast.FuncCall( name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable)
def aggregate_json_output(stmt: pgast.Query, ir_set: irast.Set, *, env: context.Environment) -> pgast.Query: subrvar = pgast.RangeSubselect( subquery=stmt, alias=pgast.Alias(aliasname=env.aliases.get('aggw'))) stmt_res = stmt.target_list[0] if stmt_res.name is None: stmt_res = stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=stmt_res.val, ) new_val = pgast.FuncCall(name=_get_json_func('agg', env=env), args=[pgast.ColumnRef(name=[stmt_res.name])]) new_val = pgast.CoalesceExpr( args=[new_val, pgast.StringConstant(val='[]')]) result = pgast.SelectStmt(target_list=[pgast.ResTarget(val=new_val)], from_clause=[subrvar]) result.ctes = stmt.ctes result.argnames = stmt.argnames stmt.ctes = [] return result
def compile_IndexIndirection( expr: irast.IndexIndirection, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: # Handle Expr[Index], where Expr may be std::str, array<T> or # std::json. For strings we translate this into substr calls. # Arrays use the native index access. JSON is handled by using the # `->` accessor. Additionally, in all of the above cases a # boundary-check is performed on the index and an exception is # potentially raised. # line, column and filename are captured here to be used with the # error message srcctx = pgast.StringConstant( val=irutils.get_source_context_as_json(expr.index, errors.InvalidValueError)) with ctx.new() as subctx: subctx.expr_exposed = False subj = dispatch.compile(expr.expr, ctx=subctx) index = dispatch.compile(expr.index, ctx=subctx) result = pgast.FuncCall( name=('edgedb', '_index'), args=[subj, index, srcctx] ) return result
def tuple_var_as_json_object(tvar, *, path_id, env): if not tvar.named: return pgast.FuncCall(name=_get_json_func('build_array', env=env), args=[ serialize_expr(t.val, path_id=t.path_id, nested=True, env=env) for t in tvar.elements ], null_safe=True, ser_safe=True, nullable=tvar.nullable) else: keyvals = [] for element in tvar.elements: rptr = element.path_id.rptr() name = rptr.shortname.name if rptr.parent_ptr is not None: name = '@' + name keyvals.append(pgast.StringConstant(val=name)) val = serialize_expr(element.val, path_id=element.path_id, nested=True, env=env) keyvals.append(val) return pgast.FuncCall(name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=tvar.nullable)
def tuple_var_as_json_object( tvar: pgast.TupleVar, *, path_id: irast.PathId, env: context.Environment, ) -> pgast.BaseExpr: if not tvar.named: return pgast.FuncCall( name=_get_json_func('build_array', env=env), args=[ serialize_expr(t.val, path_id=t.path_id, nested=True, env=env) for t in tvar.elements ], null_safe=True, ser_safe=True, nullable=tvar.nullable) else: keyvals: List[pgast.BaseExpr] = [] for element in tvar.elements: rptr = element.path_id.rptr() assert rptr is not None name = rptr.shortname.name if rptr.source_ptr is not None: name = '@' + name keyvals.append(pgast.StringConstant(val=name)) val = serialize_expr( element.val, path_id=element.path_id, nested=True, env=env) keyvals.append(val) return pgast.FuncCall( name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=tvar.nullable)
def compile_RawStringConstant(expr: irast.RawStringConstant, *, ctx: context.CompilerContextLevel) -> pgast.Base: return pgast.TypeCast( arg=pgast.StringConstant(val=expr.value), type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref)))
def compile_typeref(expr: irast.TypeRef) -> pgast.BaseExpr: if expr.collection: raise NotImplementedError() else: result = pgast.TypeCast(arg=pgast.StringConstant(val=str(expr.id)), type_name=pgast.TypeName(name=('uuid', ))) return result
def array_as_json_object(expr, *, styperef, env): el_type = styperef.subtypes[0] if irtyputils.is_tuple(el_type): coldeflist = [] json_args = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = env.aliases.get(st.element_name) json_args.append(pgast.StringConstant(val=st.element_name)) else: colname = env.aliases.get(str(i)) val = pgast.ColumnRef(name=[colname]) if irtyputils.is_collection(st): val = coll_as_json_object(val, styperef=st, env=env) json_args.append(val) coldeflist.append( pgast.ColumnDef(name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st)))) if is_named: json_func = _get_json_func('build_object', env=env) else: json_func = _get_json_func('build_array', env=env) return pgast.SelectStmt(target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr(args=[ pgast.FuncCall(name=_get_json_func('agg', env=env), args=[ pgast.FuncCall( name=json_func, args=json_args, ) ]), pgast.StringConstant(val='[]'), ]), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction( alias=pgast.Alias( aliasname=env.aliases.get('q'), ), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest', ), args=[expr], coldeflist=coldeflist, ) ]) ]) else: return pgast.FuncCall(name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)
def compile_ConfigSet( op: irast.ConfigSet, *, ctx: context.CompilerContextLevel, ) -> pgast.BaseExpr: val = _compile_config_value(op, ctx=ctx) result: pgast.BaseExpr if op.scope is qltypes.ConfigScope.INSTANCE and op.backend_setting: if not ctx.env.backend_runtime_params.has_configfile_access: raise errors.UnsupportedBackendFeatureError( "configuring backend parameters via CONFIGURE INSTANCE" " is not supported by the current backend") result = pgast.AlterSystem( name=op.backend_setting, value=val, ) elif op.scope is qltypes.ConfigScope.DATABASE and op.backend_setting: if not isinstance(val, pgast.StringConstant): val = pgast.TypeCast( arg=val, type_name=pgast.TypeName(name=('text', )), ) fcall = pgast.FuncCall( name=('edgedb', '_alter_current_database_set'), args=[pgast.StringConstant(val=op.backend_setting), val], ) result = output.wrap_script_stmt( pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]), suppress_all_output=True, env=ctx.env, ) elif op.scope is qltypes.ConfigScope.SESSION and op.backend_setting: if not isinstance(val, pgast.StringConstant): val = pgast.TypeCast( arg=val, type_name=pgast.TypeName(name=('text', )), ) fcall = pgast.FuncCall( name=('pg_catalog', 'set_config'), args=[ pgast.StringConstant(val=op.backend_setting), val, pgast.BooleanConstant(val='false'), ], ) result = output.wrap_script_stmt( pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]), suppress_all_output=True, env=ctx.env, ) elif op.scope is qltypes.ConfigScope.INSTANCE: result_row = pgast.RowExpr(args=[ pgast.StringConstant(val='SET'), pgast.StringConstant(val=str(op.scope)), pgast.StringConstant(val=op.name), val, ]) result = pgast.FuncCall( name=('jsonb_build_array', ), args=result_row.args, null_safe=True, ser_safe=True, ) result = pgast.SelectStmt(target_list=[ pgast.ResTarget(val=result, ), ], ) elif op.scope in (qltypes.ConfigScope.SESSION, qltypes.ConfigScope.GLOBAL): flag = 'G' if op.scope is qltypes.ConfigScope.GLOBAL else 'C' result = pgast.InsertStmt( relation=pgast.RelRangeVar(relation=pgast.Relation( name='_edgecon_state', ), ), select_stmt=pgast.SelectStmt(values=[ pgast.ImplicitRowExpr(args=[ pgast.StringConstant(val=op.name, ), val, pgast.StringConstant(val=flag, ), ]) ]), cols=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['value']), pgast.ColumnRef(name=['type']), ], on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['type']), ], ), target_list=[ pgast.MultiAssignRef( columns=[pgast.ColumnRef(name=['value'])], source=pgast.RowExpr(args=[ val, ], ), ), ], ), ) if op.scope is qltypes.ConfigScope.GLOBAL: result_row = pgast.RowExpr(args=[ pgast.StringConstant(val='SET'), pgast.StringConstant(val=str(op.scope)), pgast.StringConstant(val=op.name), val, ]) build_array = pgast.FuncCall( name=('jsonb_build_array', ), args=result_row.args, null_safe=True, ser_safe=True, ) result = pgast.SelectStmt( ctes=[pgast.CommonTableExpr( name='ins', query=result, )], target_list=[pgast.ResTarget(val=build_array)], ) elif op.scope is qltypes.ConfigScope.DATABASE: result = pgast.InsertStmt( relation=pgast.RelRangeVar(relation=pgast.Relation( name='_db_config', schemaname='edgedb', ), ), select_stmt=pgast.SelectStmt(values=[ pgast.ImplicitRowExpr(args=[ pgast.StringConstant(val=op.name, ), val, ]) ]), cols=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['value']), ], on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=[ pgast.ColumnRef(name=['name']), ], ), target_list=[ pgast.MultiAssignRef( columns=[pgast.ColumnRef(name=['value'])], source=pgast.RowExpr(args=[ val, ], ), ), ], ), ) else: raise AssertionError(f'unexpected configuration scope: {op.scope}') return result
def _compile_config_value( op: irast.ConfigSet, *, ctx: context.CompilerContextLevel, ) -> pgast.BaseExpr: val: pgast.BaseExpr if op.backend_setting: assert op.backend_expr is not None expr = op.backend_expr else: expr = op.expr with ctx.new() as subctx: if op.backend_setting or op.scope == qltypes.ConfigScope.GLOBAL: output_format = context.OutputFormat.NATIVE else: output_format = context.OutputFormat.JSONB with context.output_format(ctx, output_format): if isinstance(expr, irast.EmptySet): # Special handling for empty sets, because we want a # singleton representation of the value and not an empty rel # in this context. if op.cardinality is qltypes.SchemaCardinality.One: val = pgast.NullConstant() elif subctx.env.output_format is context.OutputFormat.JSONB: val = pgast.TypeCast( arg=pgast.StringConstant(val='[]'), type_name=pgast.TypeName(name=('jsonb', ), ), ) else: val = pgast.TypeCast( arg=pgast.ArrayExpr(elements=[]), type_name=pgast.TypeName(name=('text[]', ), ), ) else: val = dispatch.compile(expr, ctx=subctx) assert isinstance(val, pgast.SelectStmt), "expected SelectStmt" pathctx.get_path_serialized_output(val, expr.path_id, env=ctx.env) if op.cardinality is qltypes.SchemaCardinality.Many: val = output.aggregate_json_output(val, expr, env=ctx.env) # For globals, we need to output the binary encoding so that we # can just hand it back to the server. We abuse `record_send` to # act as a generic `_send` function if op.scope is qltypes.ConfigScope.GLOBAL: val = pgast.FuncCall( name=('substring', ), args=[ pgast.FuncCall( name=('record_send', ), args=[pgast.RowExpr(args=[val])], ), # The first twelve bytes are header, the rest is the # encoding of the actual element pgast.NumericConstant(val="13"), ], ) cast_name = s_casts.get_cast_fullname_from_names( 'std', 'std::bytes', 'std::json') val = pgast.FuncCall( name=common.get_cast_backend_name(cast_name, aspect='function'), args=[val], ) if op.backend_setting and op.scope is qltypes.ConfigScope.INSTANCE: assert isinstance(val, pgast.SelectStmt) and len(val.target_list) == 1 val = val.target_list[0].val if isinstance(val, pgast.TypeCast): val = val.arg if not isinstance(val, pgast.BaseConstant): raise AssertionError('value is not a constant in ConfigSet') return val
def process_insert_body(ir_stmt: irast.MutatingStmt, wrapper: pgast.Query, insert_cte: pgast.CommonTableExpr, insert_rvar: pgast.PathRangeVar, *, ctx: context.CompilerContextLevel) -> None: """Generate SQL DML CTEs from an InsertStmt IR. :param ir_stmt: IR of the statement. :param wrapper: Top-level SQL query. :param insert_cte: CTE representing the SQL INSERT to the main relation of the Object. """ cols = [pgast.ColumnRef(name=['__type__'])] select = pgast.SelectStmt(target_list=[]) values = select.target_list # The main INSERT query of this statement will always be # present to insert at least the `id` and `__type__` # properties. insert_stmt = insert_cte.query assert isinstance(insert_stmt, pgast.InsertStmt) insert_stmt.cols = cols insert_stmt.select_stmt = select if ir_stmt.parent_stmt is not None: iterator_set = ir_stmt.parent_stmt.iterator_stmt else: iterator_set = None if iterator_set is not None: with ctx.substmt() as ictx: ictx.path_scope = ictx.path_scope.new_child() ictx.path_scope[iterator_set.path_id] = ictx.rel clauses.compile_iterator_expr(ictx.rel, iterator_set, ctx=ictx) ictx.rel.path_id = iterator_set.path_id pathctx.put_path_bond(ictx.rel, iterator_set.path_id) iterator_cte = pgast.CommonTableExpr( query=ictx.rel, name=ctx.env.aliases.get('iter')) ictx.toplevel_stmt.ctes.append(iterator_cte) iterator_rvar = relctx.rvar_for_rel(iterator_cte, ctx=ctx) relctx.include_rvar(select, iterator_rvar, path_id=ictx.rel.path_id, ctx=ctx) iterator_id = pathctx.get_path_identity_var(select, iterator_set.path_id, env=ctx.env) else: iterator_cte = None iterator_id = None typeref = ir_stmt.subject.typeref if typeref.material_type is not None: typeref = typeref.material_type values.append( pgast.ResTarget(val=pgast.TypeCast( arg=pgast.StringConstant(val=str(typeref.id)), type_name=pgast.TypeName(name=('uuid', ))), )) external_inserts = [] parent_link_props = [] with ctx.newrel() as subctx: subctx.rel = select subctx.rel_hierarchy[select] = insert_stmt subctx.expr_exposed = False if iterator_cte is not None: subctx.path_scope = ctx.path_scope.new_child() subctx.path_scope[iterator_cte.query.path_id] = select # Process the Insert IR and separate links that go # into the main table from links that are inserted into # a separate link table. for shape_el in ir_stmt.subject.shape: rptr = shape_el.rptr ptrref = rptr.ptrref if ptrref.material_ptr is not None: ptrref = ptrref.material_ptr if (ptrref.parent_ptr is not None and rptr.source.path_id != ir_stmt.subject.path_id): parent_link_props.append(shape_el) continue ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=True, link_bias=False) props_only = False # First, process all local link inserts. if ptr_info.table_type == 'ObjectType': props_only = True field = pgast.ColumnRef(name=[ptr_info.column_name]) cols.append(field) insvalue = insert_value_for_shape_element(insert_stmt, wrapper, ir_stmt, shape_el, iterator_id, ptr_info=ptr_info, ctx=subctx) values.append(pgast.ResTarget(val=insvalue)) ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=True) if ptr_info and ptr_info.table_type == 'link': external_inserts.append((shape_el, props_only)) if iterator_cte is not None: cols.append(pgast.ColumnRef(name=['__edb_token'])) values.append(pgast.ResTarget(val=iterator_id)) pathctx.put_path_identity_var(insert_stmt, iterator_set.path_id, cols[-1], force=True, env=subctx.env) pathctx.put_path_bond(insert_stmt, iterator_set.path_id) toplevel = ctx.toplevel_stmt toplevel.ctes.append(insert_cte) # Process necessary updates to the link tables. for shape_el, props_only in external_inserts: process_link_update(ir_stmt=ir_stmt, ir_set=shape_el, props_only=props_only, wrapper=wrapper, dml_cte=insert_cte, iterator_cte=iterator_cte, is_insert=True, ctx=ctx) if parent_link_props: prop_elements = [] with ctx.newscope() as scopectx: scopectx.rel = wrapper for shape_el in parent_link_props: rptr = shape_el.rptr scopectx.path_scope[rptr.source.path_id] = wrapper pathctx.put_path_rvar_if_not_exists(wrapper, rptr.source.path_id, insert_rvar, aspect='value', env=scopectx.env) dispatch.visit(shape_el, ctx=scopectx) tuple_el = astutils.tuple_element_for_shape_el(shape_el, None, ctx=scopectx) prop_elements.append(tuple_el) valtuple = pgast.TupleVar(elements=prop_elements, named=True) pathctx.put_path_value_var(wrapper, ir_stmt.subject.path_id, valtuple, force=True, env=ctx.env)
def process_insert_body(ir_stmt: irast.MutatingStmt, wrapper: pgast.SelectStmt, insert_cte: pgast.CommonTableExpr, insert_rvar: pgast.PathRangeVar, *, ctx: context.CompilerContextLevel) -> None: """Generate SQL DML CTEs from an InsertStmt IR. :param ir_stmt: IR of the statement. :param wrapper: Top-level SQL query. :param insert_cte: CTE representing the SQL INSERT to the main relation of the Object. """ cols = [pgast.ColumnRef(name=['__type__'])] select = pgast.SelectStmt(target_list=[]) values = select.target_list # The main INSERT query of this statement will always be # present to insert at least the `id` and `__type__` # properties. insert_stmt = insert_cte.query assert isinstance(insert_stmt, pgast.InsertStmt) insert_stmt.cols = cols insert_stmt.select_stmt = select if ir_stmt.parent_stmt is not None: iterator_set = ir_stmt.parent_stmt.iterator_stmt else: iterator_set = None iterator_cte: Optional[pgast.CommonTableExpr] iterator_id: Optional[pgast.BaseExpr] if iterator_set is not None: with ctx.substmt() as ictx: ictx.path_scope = ictx.path_scope.new_child() ictx.path_scope[iterator_set.path_id] = ictx.rel clauses.compile_iterator_expr(ictx.rel, iterator_set, ctx=ictx) ictx.rel.path_id = iterator_set.path_id pathctx.put_path_bond(ictx.rel, iterator_set.path_id) iterator_cte = pgast.CommonTableExpr( query=ictx.rel, name=ctx.env.aliases.get('iter')) ictx.toplevel_stmt.ctes.append(iterator_cte) iterator_rvar = relctx.rvar_for_rel(iterator_cte, ctx=ctx) relctx.include_rvar(select, iterator_rvar, path_id=ictx.rel.path_id, ctx=ctx) iterator_id = pathctx.get_path_identity_var(select, iterator_set.path_id, env=ctx.env) else: iterator_cte = None iterator_id = None typeref = ir_stmt.subject.typeref if typeref.material_type is not None: typeref = typeref.material_type values.append( pgast.ResTarget(val=pgast.TypeCast( arg=pgast.StringConstant(val=str(typeref.id)), type_name=pgast.TypeName(name=('uuid', ))), )) external_inserts = [] with ctx.newrel() as subctx: subctx.rel = select subctx.rel_hierarchy[select] = insert_stmt subctx.expr_exposed = False if iterator_cte is not None: subctx.path_scope = ctx.path_scope.new_child() subctx.path_scope[iterator_cte.query.path_id] = select # Process the Insert IR and separate links that go # into the main table from links that are inserted into # a separate link table. for shape_el, shape_op in ir_stmt.subject.shape: assert shape_op is qlast.ShapeOp.ASSIGN rptr = shape_el.rptr ptrref = rptr.ptrref if ptrref.material_ptr is not None: ptrref = ptrref.material_ptr if (ptrref.source_ptr is not None and rptr.source.path_id != ir_stmt.subject.path_id): continue ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=True, link_bias=False) props_only = False # First, process all local link inserts. if ptr_info.table_type == 'ObjectType': props_only = True field = pgast.ColumnRef(name=[ptr_info.column_name]) cols.append(field) rel = compile_insert_shape_element(insert_stmt, wrapper, ir_stmt, shape_el, iterator_id, ctx=ctx) insvalue = pathctx.get_path_value_var(rel, shape_el.path_id, env=ctx.env) if irtyputils.is_tuple(shape_el.typeref): # Tuples require an explicit cast. insvalue = pgast.TypeCast( arg=output.output_as_value(insvalue, env=ctx.env), type_name=pgast.TypeName(name=ptr_info.column_type, ), ) values.append(pgast.ResTarget(val=insvalue)) ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=True) if ptr_info and ptr_info.table_type == 'link': external_inserts.append((shape_el, props_only)) if iterator_set is not None: cols.append(pgast.ColumnRef(name=['__edb_token'])) values.append(pgast.ResTarget(val=iterator_id)) pathctx.put_path_identity_var(insert_stmt, iterator_set.path_id, cols[-1], force=True, env=subctx.env) pathctx.put_path_bond(insert_stmt, iterator_set.path_id) pathctx.put_path_rvar( wrapper, path_id=iterator_set.path_id, rvar=insert_rvar, aspect='identity', env=subctx.env, ) if isinstance(ir_stmt, irast.InsertStmt) and ir_stmt.on_conflict: assert not insert_stmt.on_conflict constraint_name = f'"{ir_stmt.on_conflict.id};schemaconstr"' insert_stmt.on_conflict = pgast.OnConflictClause( action='nothing', infer=pgast.InferClause(conname=constraint_name), ) toplevel = ctx.toplevel_stmt toplevel.ctes.append(insert_cte) # Process necessary updates to the link tables. for shape_el, props_only in external_inserts: process_link_update( ir_stmt=ir_stmt, ir_set=shape_el, props_only=props_only, wrapper=wrapper, dml_cte=insert_cte, source_typeref=typeref, iterator_cte=iterator_cte, is_insert=True, ctx=ctx, )
def compile_ConfigReset(op: irast.ConfigReset, *, ctx: context.CompilerContextLevel) -> pgast.Query: if op.selector is None: # Scalar reset result_row = pgast.RowExpr(args=[ pgast.StringConstant(val='RESET'), pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'), pgast.StringConstant(val=op.name), pgast.NullConstant(), ]) rvar = None else: selector = dispatch.compile(op.selector, ctx=ctx) assert isinstance(selector, pgast.SelectStmt), \ "expected ast.SelectStmt" target = selector.target_list[0] if not target.name: target = selector.target_list[0] = pgast.ResTarget( name=ctx.env.aliases.get('res'), val=target.val, ) rvar = relctx.rvar_for_rel(selector, ctx=ctx) result_row = pgast.RowExpr(args=[ pgast.StringConstant(val='REM'), pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'), pgast.StringConstant(val=op.name), astutils.get_column(rvar, target.name), ]) result = pgast.FuncCall( name=('jsonb_build_array', ), args=result_row.args, null_safe=True, ser_safe=True, ) stmt: pgast.Query if not op.system: stmt = pgast.DeleteStmt( relation=pgast.RelRangeVar(relation=pgast.Relation( name='_edgecon_state', ), ), where_clause=astutils.new_binop( lexpr=astutils.new_binop( lexpr=pgast.ColumnRef(name=['name']), rexpr=pgast.StringConstant(val=op.name), op='=', ), rexpr=astutils.new_binop( lexpr=pgast.ColumnRef(name=['type']), rexpr=pgast.StringConstant(val='C'), op='=', ), op='AND', )) else: stmt = pgast.SelectStmt(target_list=[ pgast.ResTarget(val=result, ), ], ) if rvar is not None: stmt.from_clause = [rvar] return stmt
def named_tuple_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: keyvals: List[pgast.BaseExpr] = [] if irtyputils.is_persistent_tuple(styperef): for el_type in styperef.subtypes: keyvals.append(pgast.StringConstant(val=el_type.element_name)) val: pgast.BaseExpr = pgast.Indirection( arg=expr, indirection=[pgast.ColumnRef(name=[el_type.element_name])]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) return _build_json( 'build_object', args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable, env=env, ) else: coldeflist = [] for el_type in styperef.subtypes: keyvals.append(pgast.StringConstant(val=el_type.element_name)) coldeflist.append( pgast.ColumnDef( name=el_type.element_name, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[el_type.element_name]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) res = _build_json( 'build_object', args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable, env=env, ) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=res, ), ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall( name=('unnest', ), args=[pgast.ArrayExpr(elements=[expr], )], coldeflist=coldeflist, ) ]) ])
def _compile_ql_config_op(self, ctx: CompileContext, ql: qlast.Base): current_tx = ctx.state.current_tx() schema = current_tx.get_schema() modaliases = ctx.state.current_tx().get_modaliases() session_config = ctx.state.current_tx().get_session_config() if ql.system and not current_tx.is_implicit(): raise errors.QueryError('CONFIGURE SYSTEM cannot be executed in a ' 'transaction block') ir = ql_compiler.compile_ast_to_ir( ql, schema=schema, modaliases=modaliases, ) is_backend_setting = bool(getattr(ir, 'backend_setting', None)) requires_restart = bool(getattr(ir, 'requires_restart', False)) if is_backend_setting: if isinstance(ql, qlast.ConfigReset): val = None else: # Postgres is fine with all setting types to be passed # as strings. value = ireval.evaluate_to_python_val(ir.expr, schema=schema) val = pg_ast.StringConstant(val=str(value)) if ir.system: sql_ast = pg_ast.AlterSystem( name=ir.backend_setting, value=val, ) else: sql_ast = pg_ast.Set( name=ir.backend_setting, value=val, ) sql_text = pg_codegen.generate_source(sql_ast) + ';' sql = (sql_text.encode(), ) else: sql_text, _ = pg_compiler.compile_ir_to_sql( ir, pretty=debug.flags.edgeql_compile, output_format=pg_compiler.OutputFormat.JSONB) sql = (sql_text.encode(), ) if not ql.system: config_op = ireval.evaluate_to_config_op(ir, schema=schema) session_config = config_op.apply(config.get_settings(), session_config) ctx.state.current_tx().update_session_config(session_config) else: config_op = None return dbstate.SessionStateQuery( sql=sql, is_backend_setting=is_backend_setting, is_system_setting=ql.system, requires_restart=requires_restart, config_op=config_op, )
def _compile_grouping_value( stmt: irast.GroupStmt, used_args: AbstractSet[str], *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: '''Produce the value for the grouping binding saying what is grouped on''' assert stmt.grouping_binding grouprel = ctx.rel # If there is only one thing grouped on, just output the hardcoded if len(used_args) == 1: return pgast.ArrayExpr(elements=[ pgast.StringConstant( val=desugar_group.key_name(list(used_args)[0])) ]) using = {k: stmt.using[k] for k in used_args} args = [ pathctx.get_path_var(grouprel, alias_set.path_id, aspect='value', env=ctx.env) for alias_set, _ in using.values() ] # Call grouping on each element we group on to produce a bitmask grouping_alias = ctx.env.aliases.get('g') grouping_call = pgast.FuncCall(name=('grouping', ), args=args) subq = pgast.SelectStmt(target_list=[ pgast.ResTarget(name=grouping_alias, val=grouping_call), ]) q = pgast.SelectStmt(from_clause=[ pgast.RangeSubselect( subquery=subq, alias=pgast.Alias(aliasname=ctx.env.aliases.get())) ]) grouping_ref = pgast.ColumnRef(name=(grouping_alias, )) # Generate a call to ARRAY[...] with a case for each grouping # element, then array_remove out the NULLs. els: List[pgast.BaseExpr] = [] for i, name in enumerate(using): name = desugar_group.key_name(name) mask = 1 << (len(using) - i - 1) # (CASE (e & <mask>) WHEN 0 THEN '<name>' ELSE NULL END) els.append( pgast.CaseExpr( arg=pgast.Expr(kind=pgast.ExprKind.OP, name='&', lexpr=grouping_ref, rexpr=pgast.LiteralExpr(expr=str(mask))), args=[ pgast.CaseWhen(expr=pgast.LiteralExpr(expr='0'), result=pgast.StringConstant(val=name)) ], defresult=pgast.NullConstant())) val = pgast.FuncCall( name=('array_remove', ), args=[pgast.ArrayExpr(elements=els), pgast.NullConstant()]) q.target_list.append(pgast.ResTarget(val=val)) return q
def compile_ConfigSet(op: irast.ConfigSet, *, ctx: context.CompilerContextLevel) -> pgast.Query: val: pgast.BaseExpr with ctx.new() as subctx: if isinstance(op.expr, irast.EmptySet): # Special handling for empty sets, because we want a # singleton representation of the value and not an empty rel # in this context. if op.cardinality is qltypes.SchemaCardinality.ONE: val = pgast.NullConstant() else: val = pgast.TypeCast( arg=pgast.StringConstant(val='[]'), type_name=pgast.TypeName(name=('jsonb', ), ), ) else: val = dispatch.compile(op.expr, ctx=subctx) assert isinstance(val, pgast.SelectStmt), "expected ast.SelectStmt" pathctx.get_path_serialized_output(val, op.expr.path_id, env=ctx.env) if op.cardinality is qltypes.SchemaCardinality.MANY: val = output.aggregate_json_output(val, op.expr, env=ctx.env) result_row = pgast.RowExpr(args=[ pgast.StringConstant(val='SET'), pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'), pgast.StringConstant(val=op.name), val, ]) result = pgast.FuncCall( name=('jsonb_build_array', ), args=result_row.args, null_safe=True, ser_safe=True, ) stmt: pgast.Query if not op.system: stmt = pgast.InsertStmt( relation=pgast.RelRangeVar(relation=pgast.Relation( name='_edgecon_state', ), ), select_stmt=pgast.SelectStmt(values=[ pgast.ImplicitRowExpr(args=[ pgast.StringConstant(val=op.name, ), val, pgast.StringConstant(val='C', ), ]) ]), cols=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['value']), pgast.ColumnRef(name=['type']), ], on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['type']), ], ), target_list=[ pgast.MultiAssignRef( columns=[pgast.ColumnRef(name=['value'])], source=pgast.RowExpr(args=[ val, ], ), ), ], ), ) else: stmt = pgast.SelectStmt(target_list=[ pgast.ResTarget(val=result, ), ], ) return stmt
def compile_ConfigSet( op: irast.ConfigSet, *, ctx: context.CompilerContextLevel) -> pgast.Query: with ctx.new() as subctx: val = dispatch.compile(op.expr, ctx=subctx) pathctx.get_path_serialized_output( val, op.expr.path_id, env=ctx.env) if op.cardinality is qltypes.Cardinality.MANY: val = output.aggregate_json_output(val, op.expr, env=ctx.env) result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='SET'), pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'), pgast.StringConstant(val=op.name), val, ] ) result = pgast.FuncCall( name=('jsonb_build_array',), args=result_row.args, null_safe=True, ser_safe=True, ) if not op.system: stmt = pgast.InsertStmt( relation=pgast.RangeVar( relation=pgast.Relation( name='_edgecon_state', ), ), select_stmt=pgast.SelectStmt( values=[ pgast.ImplicitRowExpr( args=[ pgast.StringConstant( val=op.name, ), val, pgast.StringConstant( val='C', ), ] ) ] ), cols=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['value']), pgast.ColumnRef(name=['type']), ], on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause( index_elems=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['type']), ], ), target_list=[ pgast.MultiAssignRef( columns=[pgast.ColumnRef(name=['value'])], source=pgast.RowExpr( args=[ val, ], ), ), ], ), ) else: stmt = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=result, ), ], ) return stmt
def process_link_update( *, ir_stmt: irast.MutatingStmt, ir_set: irast.Set, props_only: bool, is_insert: bool, shape_op: qlast.ShapeOp = qlast.ShapeOp.ASSIGN, source_typeref: irast.TypeRef, wrapper: pgast.Query, dml_cte: pgast.CommonTableExpr, iterator_cte: Optional[pgast.CommonTableExpr], ctx: context.CompilerContextLevel, ) -> pgast.CommonTableExpr: """Perform updates to a link relation as part of a DML statement. :param ir_stmt: IR of the statement. :param ir_set: IR of the INSERT/UPDATE body element. :param props_only: Whether this link update only touches link properties. :param wrapper: Top-level SQL query. :param dml_cte: CTE representing the SQL INSERT or UPDATE to the main relation of the Object. :param iterator_cte: CTE representing the iterator range in the FOR clause of the EdgeQL DML statement. """ toplevel = ctx.toplevel_stmt rptr = ir_set.rptr ptrref = rptr.ptrref assert isinstance(ptrref, irast.PointerRef) target_is_scalar = irtyputils.is_scalar(ir_set.typeref) path_id = ir_set.path_id # The links in the dml class shape have been derived, # but we must use the correct specialized link class for the # base material type. if ptrref.material_ptr is not None: mptrref = ptrref.material_ptr else: mptrref = ptrref if mptrref.out_source.id != source_typeref.id: for descendant in mptrref.descendants: if descendant.out_source.id == source_typeref.id: mptrref = descendant break else: raise errors.InternalServerError( 'missing PointerRef descriptor for source typeref') assert isinstance(mptrref, irast.PointerRef) target_rvar = relctx.range_for_ptrref(mptrref, for_mutation=True, only_self=True, ctx=ctx) assert isinstance(target_rvar, pgast.RelRangeVar) assert isinstance(target_rvar.relation, pgast.Relation) target_alias = target_rvar.alias.aliasname target_tab_name = (target_rvar.relation.schemaname, target_rvar.relation.name) dml_cte_rvar = pgast.RelRangeVar( relation=dml_cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get('m'))) col_data = { 'ptr_item_id': pgast.TypeCast(arg=pgast.StringConstant(val=str(mptrref.id)), type_name=pgast.TypeName(name=('uuid', ))), 'source': pathctx.get_rvar_path_identity_var(dml_cte_rvar, ir_stmt.subject.path_id, env=ctx.env) } # Turn the IR of the expression on the right side of := # into a subquery returning records for the link table. data_cte, specified_cols = process_link_values( ir_stmt=ir_stmt, ir_expr=ir_set, target_tab=target_tab_name, col_data=col_data, dml_rvar=dml_cte_rvar, sources=[], props_only=props_only, target_is_scalar=target_is_scalar, iterator_cte=iterator_cte, ctx=ctx, ) toplevel.ctes.append(data_cte) delqry: Optional[pgast.DeleteStmt] data_select = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()]), ), ], from_clause=[ pgast.RelRangeVar(relation=data_cte), ], ) if not is_insert and shape_op is not qlast.ShapeOp.APPEND: if shape_op is qlast.ShapeOp.SUBTRACT: data_rvar = relctx.rvar_for_rel(data_select, ctx=ctx) # Drop requested link records. delqry = pgast.DeleteStmt( relation=target_rvar, where_clause=astutils.new_binop( lexpr=astutils.new_binop( lexpr=col_data['source'], op='=', rexpr=pgast.ColumnRef(name=[target_alias, 'source'], ), ), op='AND', rexpr=astutils.new_binop( lexpr=pgast.ColumnRef(name=[target_alias, 'target'], ), op='=', rexpr=pgast.ColumnRef( name=[data_rvar.alias.aliasname, 'target'], ), ), ), using_clause=[ dml_cte_rvar, data_rvar, ], returning_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[target_alias, pgast.Star()], ), ) ]) else: # Drop all previous link records for this source. delqry = pgast.DeleteStmt( relation=target_rvar, where_clause=astutils.new_binop( lexpr=col_data['source'], op='=', rexpr=pgast.ColumnRef(name=[target_alias, 'source'], ), ), using_clause=[dml_cte_rvar], returning_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[target_alias, pgast.Star()], ), ) ]) delcte = pgast.CommonTableExpr( name=ctx.env.aliases.get(hint='d'), query=delqry, ) pathctx.put_path_value_rvar(delcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this removal in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'except', delcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(delcte) else: delqry = None if shape_op is qlast.ShapeOp.SUBTRACT: return data_cte cols = [pgast.ColumnRef(name=[col]) for col in specified_cols] conflict_cols = ['source', 'target', 'ptr_item_id'] if is_insert: conflict_clause = None elif len(cols) == len(conflict_cols) and delqry is not None: # There are no link properties, so we can optimize the # link replacement operation by omitting the overlapping # link rows from deletion. filter_select = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=['source']), ), pgast.ResTarget(val=pgast.ColumnRef(name=['target']), ), ], from_clause=[pgast.RelRangeVar(relation=data_cte)], ) delqry.where_clause = astutils.extend_binop( delqry.where_clause, astutils.new_binop( lexpr=pgast.ImplicitRowExpr(args=[ pgast.ColumnRef(name=['source']), pgast.ColumnRef(name=['target']), ], ), rexpr=pgast.SubLink( type=pgast.SubLinkType.ALL, expr=filter_select, ), op='!=', )) conflict_clause = pgast.OnConflictClause( action='nothing', infer=pgast.InferClause(index_elems=[ pgast.ColumnRef(name=[col]) for col in conflict_cols ]), ) else: # Inserting rows into the link table may produce cardinality # constraint violations, since the INSERT into the link table # is executed in the snapshot where the above DELETE from # the link table is not visible. Hence, we need to use # the ON CONFLICT clause to resolve this. conflict_inference = [] conflict_exc_row = [] for col in conflict_cols: conflict_inference.append(pgast.ColumnRef(name=[col])) conflict_exc_row.append(pgast.ColumnRef(name=['excluded', col])) conflict_data = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RelRangeVar(relation=data_cte)], where_clause=astutils.new_binop( lexpr=pgast.ImplicitRowExpr(args=conflict_inference), rexpr=pgast.ImplicitRowExpr(args=conflict_exc_row), op='=')) conflict_clause = pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=conflict_inference), target_list=[ pgast.MultiAssignRef(columns=cols, source=conflict_data) ]) updcte = pgast.CommonTableExpr( name=ctx.env.aliases.get(hint='i'), query=pgast.InsertStmt( relation=target_rvar, select_stmt=data_select, cols=cols, on_conflict=conflict_clause, returning_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[pgast.Star()])) ])) pathctx.put_path_value_rvar(updcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this insertion in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'union', updcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(updcte) return data_cte
def compile_ConfigReset( op: irast.ConfigReset, *, ctx: context.CompilerContextLevel, ) -> pgast.BaseExpr: stmt: pgast.BaseExpr if op.scope is qltypes.ConfigScope.SYSTEM and op.backend_setting: stmt = pgast.AlterSystem( name=op.backend_setting, value=None, ) elif op.scope is qltypes.ConfigScope.DATABASE and op.backend_setting: fcall = pgast.FuncCall( name=('edgedb', '_alter_current_database_set'), args=[ pgast.StringConstant(val=op.backend_setting), pgast.NullConstant(), ], ) stmt = output.wrap_script_stmt( pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]), suppress_all_output=True, env=ctx.env, ) elif op.scope is qltypes.ConfigScope.SESSION and op.backend_setting: fcall = pgast.FuncCall( name=('pg_catalog', 'set_config'), args=[ pgast.StringConstant(val=op.backend_setting), pgast.NullConstant(), pgast.BooleanConstant(val='false'), ], ) stmt = output.wrap_script_stmt( pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]), suppress_all_output=True, env=ctx.env, ) elif op.scope is qltypes.ConfigScope.SYSTEM: if op.selector is None: # Scalar reset result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='RESET'), pgast.StringConstant(val=str(op.scope)), pgast.StringConstant(val=op.name), pgast.NullConstant(), ] ) rvar = None else: with context.output_format(ctx, context.OutputFormat.JSONB): selector = dispatch.compile(op.selector, ctx=ctx) assert isinstance(selector, pgast.SelectStmt), \ "expected ast.SelectStmt" target = selector.target_list[0] if not target.name: target = selector.target_list[0] = pgast.ResTarget( name=ctx.env.aliases.get('res'), val=target.val, ) rvar = relctx.rvar_for_rel(selector, ctx=ctx) result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='REM'), pgast.StringConstant(val=str(op.scope)), pgast.StringConstant(val=op.name), astutils.get_column(rvar, target.name), ] ) result = pgast.FuncCall( name=('jsonb_build_array',), args=result_row.args, null_safe=True, ser_safe=True, ) stmt = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=result, ), ], ) if rvar is not None: stmt.from_clause = [rvar] elif op.scope is qltypes.ConfigScope.DATABASE: stmt = pgast.DeleteStmt( relation=pgast.RelRangeVar( relation=pgast.Relation( name='_db_config', schemaname='edgedb', ), ), where_clause=astutils.new_binop( lexpr=pgast.ColumnRef(name=['name']), rexpr=pgast.StringConstant(val=op.name), op='=', ), ) elif op.scope is qltypes.ConfigScope.SESSION: stmt = pgast.DeleteStmt( relation=pgast.RelRangeVar( relation=pgast.Relation( name='_edgecon_state', ), ), where_clause=astutils.new_binop( lexpr=astutils.new_binop( lexpr=pgast.ColumnRef(name=['name']), rexpr=pgast.StringConstant(val=op.name), op='=', ), rexpr=astutils.new_binop( lexpr=pgast.ColumnRef(name=['type']), rexpr=pgast.StringConstant(val='C'), op='=', ), op='AND', ) ) else: raise AssertionError(f'unexpected configuration scope: {op.scope}') return stmt
def process_link_update( *, ir_stmt: irast.MutatingStmt, ir_set: irast.Set, props_only: bool, is_insert: bool, wrapper: pgast.Query, dml_cte: pgast.CommonTableExpr, iterator_cte: typing.Optional[pgast.CommonTableExpr], ctx: context.CompilerContextLevel) -> pgast.CommonTableExpr: """Perform updates to a link relation as part of a DML statement. :param ir_stmt: IR of the statement. :param ir_set: IR of the INSERT/UPDATE body element. :param props_only: Whether this link update only touches link properties. :param wrapper: Top-level SQL query. :param dml_cte: CTE representing the SQL INSERT or UPDATE to the main relation of the Object. :param iterator_cte: CTE representing the iterator range in the FOR clause of the EdgeQL DML statement. """ toplevel = ctx.toplevel_stmt rptr = ir_set.rptr ptrref = rptr.ptrref assert isinstance(ptrref, irast.PointerRef) target_is_scalar = irtyputils.is_scalar(ptrref.dir_target) path_id = ir_set.path_id # The links in the dml class shape have been derived, # but we must use the correct specialized link class for the # base material type. if ptrref.material_ptr is not None: mptrref = ptrref.material_ptr assert isinstance(mptrref, irast.PointerRef) else: mptrref = ptrref target_rvar = relctx.range_for_ptrref(mptrref, include_overlays=False, only_self=True, ctx=ctx) assert isinstance(target_rvar, pgast.RelRangeVar) assert isinstance(target_rvar.relation, pgast.Relation) target_alias = target_rvar.alias.aliasname target_tab_name = (target_rvar.relation.schemaname, target_rvar.relation.name) dml_cte_rvar = pgast.RelRangeVar( relation=dml_cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get('m'))) col_data = { 'ptr_item_id': pgast.TypeCast(arg=pgast.StringConstant(val=str(mptrref.id)), type_name=pgast.TypeName(name=('uuid', ))), 'source': pathctx.get_rvar_path_identity_var(dml_cte_rvar, ir_stmt.subject.path_id, env=ctx.env) } if not is_insert: # Drop all previous link records for this source. delcte = pgast.CommonTableExpr(query=pgast.DeleteStmt( relation=target_rvar, where_clause=astutils.new_binop( lexpr=col_data['source'], op='=', rexpr=pgast.ColumnRef(name=[target_alias, 'source'])), using_clause=[dml_cte_rvar], returning_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[target_alias, pgast.Star()])) ]), name=ctx.env.aliases.get(hint='d')) pathctx.put_path_value_rvar(delcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this removal in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'except', delcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(delcte) # Turn the IR of the expression on the right side of := # into a subquery returning records for the link table. data_cte, specified_cols = process_link_values(ir_stmt, ir_set, target_tab_name, col_data, dml_cte_rvar, [], props_only, target_is_scalar, iterator_cte, ctx=ctx) toplevel.ctes.append(data_cte) data_select = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RelRangeVar(relation=data_cte)]) cols = [pgast.ColumnRef(name=[col]) for col in specified_cols] if is_insert: conflict_clause = None else: # Inserting rows into the link table may produce cardinality # constraint violations, since the INSERT into the link table # is executed in the snapshot where the above DELETE from # the link table is not visible. Hence, we need to use # the ON CONFLICT clause to resolve this. conflict_cols = ['source', 'target', 'ptr_item_id'] conflict_inference = [] conflict_exc_row = [] for col in conflict_cols: conflict_inference.append(pgast.ColumnRef(name=[col])) conflict_exc_row.append(pgast.ColumnRef(name=['excluded', col])) conflict_data = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RelRangeVar(relation=data_cte)], where_clause=astutils.new_binop( lexpr=pgast.ImplicitRowExpr(args=conflict_inference), rexpr=pgast.ImplicitRowExpr(args=conflict_exc_row), op='=')) conflict_clause = pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=conflict_inference), target_list=[ pgast.MultiAssignRef(columns=cols, source=conflict_data) ]) updcte = pgast.CommonTableExpr( name=ctx.env.aliases.get(hint='i'), query=pgast.InsertStmt( relation=target_rvar, select_stmt=data_select, cols=cols, on_conflict=conflict_clause, returning_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[pgast.Star()])) ])) pathctx.put_path_value_rvar(updcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this insertion in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'union', updcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(updcte) return data_cte
def compile_ConfigSet( op: irast.ConfigSet, *, ctx: context.CompilerContextLevel, ) -> pgast.BaseExpr: val: pgast.BaseExpr with ctx.new() as subctx: if op.backend_setting: output_format = context.OutputFormat.NATIVE else: output_format = context.OutputFormat.JSONB with context.output_format(ctx, output_format): if isinstance(op.expr, irast.EmptySet): # Special handling for empty sets, because we want a # singleton representation of the value and not an empty rel # in this context. if op.cardinality is qltypes.SchemaCardinality.One: val = pgast.NullConstant() elif subctx.env.output_format is context.OutputFormat.JSONB: val = pgast.TypeCast( arg=pgast.StringConstant(val='[]'), type_name=pgast.TypeName( name=('jsonb',), ), ) else: val = pgast.TypeCast( arg=pgast.ArrayExpr(), type_name=pgast.TypeName( name=('text[]',), ), ) else: val = dispatch.compile(op.expr, ctx=subctx) assert isinstance(val, pgast.SelectStmt), "expected SelectStmt" pathctx.get_path_serialized_output( val, op.expr.path_id, env=ctx.env) if op.cardinality is qltypes.SchemaCardinality.Many: val = output.aggregate_json_output( val, op.expr, env=ctx.env) result: pgast.BaseExpr if op.scope is qltypes.ConfigScope.SYSTEM and op.backend_setting: assert isinstance(val, pgast.SelectStmt) and len(val.target_list) == 1 valval = val.target_list[0].val if isinstance(valval, pgast.TypeCast): valval = valval.arg if not isinstance(valval, pgast.BaseConstant): raise AssertionError('value is not a constant in ConfigSet') result = pgast.AlterSystem( name=op.backend_setting, value=valval, ) elif op.scope is qltypes.ConfigScope.DATABASE and op.backend_setting: fcall = pgast.FuncCall( name=('edgedb', '_alter_current_database_set'), args=[pgast.StringConstant(val=op.backend_setting), val], ) result = output.wrap_script_stmt( pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]), suppress_all_output=True, env=ctx.env, ) elif op.scope is qltypes.ConfigScope.SESSION and op.backend_setting: fcall = pgast.FuncCall( name=('pg_catalog', 'set_config'), args=[ pgast.StringConstant(val=op.backend_setting), pgast.TypeCast( arg=val, type_name=pgast.TypeName(name=('text',)), ), pgast.BooleanConstant(val='false'), ], ) result = output.wrap_script_stmt( pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]), suppress_all_output=True, env=ctx.env, ) elif op.scope is qltypes.ConfigScope.SYSTEM: result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='SET'), pgast.StringConstant(val=str(op.scope)), pgast.StringConstant(val=op.name), val, ] ) result = pgast.FuncCall( name=('jsonb_build_array',), args=result_row.args, null_safe=True, ser_safe=True, ) result = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=result, ), ], ) elif op.scope is qltypes.ConfigScope.SESSION: result = pgast.InsertStmt( relation=pgast.RelRangeVar( relation=pgast.Relation( name='_edgecon_state', ), ), select_stmt=pgast.SelectStmt( values=[ pgast.ImplicitRowExpr( args=[ pgast.StringConstant( val=op.name, ), val, pgast.StringConstant( val='C', ), ] ) ] ), cols=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['value']), pgast.ColumnRef(name=['type']), ], on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause( index_elems=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['type']), ], ), target_list=[ pgast.MultiAssignRef( columns=[pgast.ColumnRef(name=['value'])], source=pgast.RowExpr( args=[ val, ], ), ), ], ), ) elif op.scope is qltypes.ConfigScope.DATABASE: result = pgast.InsertStmt( relation=pgast.RelRangeVar( relation=pgast.Relation( name='_db_config', schemaname='edgedb', ), ), select_stmt=pgast.SelectStmt( values=[ pgast.ImplicitRowExpr( args=[ pgast.StringConstant( val=op.name, ), val, ] ) ] ), cols=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['value']), ], on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause( index_elems=[ pgast.ColumnRef(name=['name']), ], ), target_list=[ pgast.MultiAssignRef( columns=[pgast.ColumnRef(name=['value'])], source=pgast.RowExpr( args=[ val, ], ), ), ], ), ) else: raise AssertionError(f'unexpected configuration scope: {op.scope}') return result
def array_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: el_type = styperef.subtypes[0] if irtyputils.is_tuple(el_type): coldeflist = [] json_args: List[pgast.BaseExpr] = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = st.element_name json_args.append(pgast.StringConstant(val=st.element_name)) else: colname = str(i) val: pgast.BaseExpr = pgast.ColumnRef(name=[colname]) if irtyputils.is_collection(st): val = coll_as_json_object(val, styperef=st, env=env) json_args.append(val) if not irtyputils.is_persistent_tuple(el_type): # Column definition list is only allowed for functions # returning "record", i.e. an anonymous tuple, which # would not be the case for schema-persistent tuple types. coldeflist.append( pgast.ColumnDef( name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st) ) ) ) if is_named: json_func = _get_json_func('build_object', env=env) else: json_func = _get_json_func('build_array', env=env) return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr( args=[ pgast.FuncCall( name=_get_json_func('agg', env=env), args=[ pgast.FuncCall( name=json_func, args=json_args, ) ] ), pgast.StringConstant(val='[]'), ] ), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction( alias=pgast.Alias( aliasname=env.aliases.get('q'), ), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest',), args=[expr], coldeflist=coldeflist, ) ] ) ] ) else: return pgast.FuncCall( name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)
def array_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: el_type = styperef.subtypes[0] is_tuple = irtyputils.is_tuple(el_type) # Tuples and bytes might need underlying casts to be done if is_tuple or irtyputils.is_bytes(el_type): coldeflist = [] out_alias = env.aliases.get('q') val: pgast.BaseExpr if is_tuple: json_args: List[pgast.BaseExpr] = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = st.element_name assert colname json_args.append(pgast.StringConstant(val=colname)) else: colname = str(i) val = pgast.ColumnRef(name=[colname]) val = serialize_expr_to_json(val, styperef=st, nested=True, env=env) json_args.append(val) if not irtyputils.is_persistent_tuple(el_type): # Column definition list is only allowed for functions # returning "record", i.e. an anonymous tuple, which # would not be the case for schema-persistent tuple types. coldeflist.append( pgast.ColumnDef( name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st)))) json_func = 'build_object' if is_named else 'build_array' agg_arg = _build_json(json_func, json_args, env=env) needs_unnest = bool(el_type.subtypes) else: assert not el_type.subtypes val = pgast.ColumnRef(name=[out_alias]) agg_arg = serialize_expr_to_json(val, styperef=el_type, nested=True, env=env) needs_unnest = True return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr(args=[ pgast.FuncCall( name=_get_json_func('agg', env=env), args=[agg_arg], ), pgast.StringConstant(val='[]'), ]), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction(alias=pgast.Alias(aliasname=out_alias), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest', ), args=[expr], coldeflist=coldeflist, ) ]) ] if needs_unnest else [], ) else: return pgast.FuncCall(name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)