def top_output_as_value(stmt: pgast.Query, *, env: context.Environment) -> pgast.Query: """Finalize output serialization on the top level.""" if env.output_format == context.OutputFormat.JSON: # For JSON we just want to aggregate the whole thing # into a JSON array. subrvar = pgast.RangeSubselect( subquery=stmt, alias=pgast.Alias(aliasname=env.aliases.get('aggw'))) stmt_res = stmt.target_list[0] if stmt_res.name is None: stmt_res = stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=stmt_res.val, ) new_val = pgast.FuncCall(name=('jsonb_agg', ), args=[pgast.ColumnRef(name=[stmt_res.name])]) new_val = pgast.CoalesceExpr( args=[new_val, pgast.StringConstant(val='[]')]) result = pgast.SelectStmt(target_list=[pgast.ResTarget(val=new_val)], from_clause=[subrvar]) result.ctes = stmt.ctes stmt.ctes = [] return result else: return stmt
def named_tuple_as_json_object(expr, *, stype, env): assert stype.is_tuple() and stype.named keyvals = [] subtypes = stype.iter_subtypes() for el_idx, (el_name, el_type) in enumerate(subtypes): keyvals.append(pgast.StringConstant(val=el_name)) type_sentinel = pgast.TypeCast( arg=pgast.NullConstant(), type_name=pgast.TypeName( name=pgtypes.pg_type_from_object(env.schema, el_type))) val = pgast.FuncCall(name=('edgedb', 'row_getattr_by_num'), args=[ expr, pgast.NumericConstant(val=str(el_idx + 1)), type_sentinel ]) if el_type.is_collection(): val = coll_as_json_object(val, stype=el_type, env=env) keyvals.append(val) return pgast.FuncCall(name=('jsonb_build_object', ), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable)
def array_as_json_object(expr, *, stype, env): if stype.element_type.is_tuple(): coldeflist = [] json_args = [] is_named = stype.element_type.named for n, st in stype.element_type.iter_subtypes(): colname = env.aliases.get(str(n)) if is_named: json_args.append(pgast.StringConstant(val=n)) val = pgast.ColumnRef(name=[colname]) if st.is_collection(): val = coll_as_json_object(val, stype=st, env=env) json_args.append(val) coldeflist.append( pgast.ColumnDef( name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_object(env.schema, st)))) if is_named: json_func = 'jsonb_build_object' else: json_func = 'jsonb_build_array' return pgast.SelectStmt(target_list=[ pgast.ResTarget( val=pgast.FuncCall(name=('jsonb_agg', ), args=[ pgast.FuncCall( name=(json_func, ), args=json_args, ) ]), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction( alias=pgast.Alias( aliasname=env.aliases.get('q'), ), coldeflist=coldeflist, functions=[ pgast.FuncCall( name=('unnest', ), args=[expr], ) ]) ]) else: return pgast.FuncCall(name=('to_jsonb', ), args=[expr], null_safe=True, ser_safe=True)
def compile_TypeRef( expr: irast.Base, *, ctx: context.CompilerContextLevel) -> pgast.Base: if expr.subtypes: raise NotImplementedError() else: result = pgast.FuncCall( name=('edgedb', '_resolve_type_id'), args=[pgast.StringConstant(val=expr.maintype)], ) return result
def compile_RawStringConstant( expr: irast.RawStringConstant, *, ctx: context.CompilerContextLevel) -> pgast.Base: return pgast.TypeCast( arg=pgast.StringConstant(val=expr.value), type_name=pgast.TypeName( name=pg_types.pg_type_from_object( ctx.env.schema, expr.stype) ) )
def new_static_class_rvar( ir_set: irast.Set, *, lateral: bool=True, ctx: context.CompilerContextLevel) -> pgast.BaseRangeVar: set_rvar = new_root_rvar(ir_set, ctx=ctx) clsname = pgast.StringConstant( val=ir_set.rptr.source.stype.material_type(ctx.env.schema).get_name( ctx.env.schema)) nameref = dbobj.get_column(set_rvar, 'name', nullable=False) condition = astutils.new_binop(nameref, clsname, op='=') substmt = pgast.SelectStmt() include_rvar(substmt, set_rvar, ir_set.path_id, ctx=ctx) substmt.where_clause = astutils.extend_binop( substmt.where_clause, condition) return new_rel_rvar(ir_set, substmt, ctx=ctx)
def tuple_var_as_json_object(tvar, *, path_id, env): if not tvar.named: return pgast.FuncCall(name=('jsonb_build_array', ), args=[ serialize_expr(t.val, path_id=t.path_id, nested=True, env=env) for t in tvar.elements ], null_safe=True, ser_safe=True, nullable=tvar.nullable) else: keyvals = [] for element in tvar.elements: rptr = element.path_id.rptr() if rptr is None: name = element.path_id.target_name.name else: name = rptr.get_shortname(env.schema).name if rptr.is_link_property(env.schema): name = '@' + name keyvals.append(pgast.StringConstant(val=name)) if isinstance(element.val, pgast.TupleVar): val = serialize_expr(element.val, path_id=element.path_id, nested=True, env=env) else: val = element.val keyvals.append(val) return pgast.FuncCall(name=('jsonb_build_object', ), args=keyvals, null_safe=True, ser_safe=True, nullable=tvar.nullable)
def compile_IndexIndirection( expr: irast.Base, *, ctx: context.CompilerContextLevel) -> pgast.Base: # Handle Expr[Index], where Expr may be std::str, array<T> or # std::json. For strings we translate this into substr calls. # Arrays use the native index access. JSON is handled by using the # `->` accessor. Additionally, in all of the above cases a # boundary-check is performed on the index and an exception is # potentially raised. # line, column and filename are captured here to be used with the # error message srcctx = pgast.StringConstant( val=irutils.get_source_context_as_json(expr.index, errors.InvalidValueError)) with ctx.new() as subctx: subctx.expr_exposed = False subj = dispatch.compile(expr.expr, ctx=subctx) index = dispatch.compile(expr.index, ctx=subctx) # If the index is some integer, cast it into int, because there's # no backend function that handles indexes larger than int. index_t = expr.index.stype int_t = ctx.env.schema.get('std::anyint') if index_t.issubclass(ctx.env.schema, int_t): index = pgast.TypeCast( arg=index, type_name=pgast.TypeName( name=('int',) ) ) result = pgast.FuncCall( name=('edgedb', '_index'), args=[subj, index, srcctx] ) return result
def process_link_update( ir_stmt: irast.MutatingStmt, ir_expr: irast.Base, props_only: bool, wrapper: pgast.Query, dml_cte: pgast.CommonTableExpr, iterator_cte: pgast.CommonTableExpr, *, ctx: context.CompilerContextLevel) -> typing.Optional[pgast.Query]: """Perform updates to a link relation as part of a DML statement. :param ir_stmt: IR of the statement. :param ir_expr: IR of the INSERT/UPDATE body element. :param props_only: Whether this link update only touches link properties. :param wrapper: Top-level SQL query. :param dml_cte: CTE representing the SQL INSERT or UPDATE to the main relation of the Object. :param iterator_cte: CTE representing the iterator range in the FOR clause of the EdgeQL DML statement. """ toplevel = ctx.toplevel_stmt edgedb_ptr_tab = pgast.RangeVar( relation=pgast.Relation(schemaname='edgedb', name='pointer'), alias=pgast.Alias(aliasname=ctx.env.aliases.get(hint='ptr'))) ltab_alias = edgedb_ptr_tab.alias.aliasname rptr = ir_expr.rptr ptrcls = rptr.ptrcls target_is_scalar = isinstance(ptrcls.get_target(ctx.env.schema), s_scalars.ScalarType) path_id = rptr.source.path_id.extend(ptrcls, rptr.direction, rptr.target.stype, schema=ctx.env.schema) # The links in the dml class shape have been derived, # but we must use the correct specialized link class for the # base material type. mptrcls = ptrcls.material_type(ctx.env.schema) # Lookup link class id by link name. lname_to_id = pgast.CommonTableExpr(query=pgast.SelectStmt( from_clause=[edgedb_ptr_tab], target_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[ltab_alias, 'id'])) ], where_clause=astutils.new_binop( lexpr=pgast.ColumnRef(name=[ltab_alias, 'name']), rexpr=pgast.StringConstant(val=mptrcls.get_name(ctx.env.schema)), op='=')), name=ctx.env.aliases.get(hint='lid')) lname_to_id_rvar = pgast.RangeVar(relation=lname_to_id) toplevel.ctes.append(lname_to_id) target_rvar = dbobj.range_for_ptrcls(mptrcls, '>', include_overlays=False, env=ctx.env) target_alias = target_rvar.alias.aliasname target_tab_name = (target_rvar.relation.schemaname, target_rvar.relation.name) tab_cols = dbobj.cols_for_pointer(mptrcls, env=ctx.env) dml_cte_rvar = pgast.RangeVar( relation=dml_cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get('m'))) col_data = { 'ptr_item_id': pgast.ColumnRef(name=[lname_to_id.name, 'id']), 'source': pathctx.get_rvar_path_identity_var(dml_cte_rvar, ir_stmt.subject.path_id, env=ctx.env) } # Drop all previous link records for this source. delcte = pgast.CommonTableExpr(query=pgast.DeleteStmt( relation=target_rvar, where_clause=astutils.new_binop( lexpr=col_data['source'], op='=', rexpr=pgast.ColumnRef(name=[target_alias, 'source'])), using_clause=[dml_cte_rvar], returning_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[target_alias, pgast.Star()])) ]), name=ctx.env.aliases.get(hint='d')) pathctx.put_path_value_rvar(delcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this removal in the relation overlay # context to ensure that the RETURNING clause potentially # referencing this link yields the expected results. overlays = ctx.env.rel_overlays[ptrcls.get_shortname(ctx.env.schema)] overlays.append(('except', delcte)) toplevel.ctes.append(delcte) # Turn the IR of the expression on the right side of := # into a subquery returning records for the link table. data_cte, specified_cols = process_link_values(ir_stmt, ir_expr, target_tab_name, tab_cols, col_data, dml_cte_rvar, [lname_to_id_rvar], props_only, target_is_scalar, iterator_cte, ctx=ctx) toplevel.ctes.append(data_cte) data_select = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RangeVar(relation=data_cte)]) # Inserting rows into the link table may produce cardinality # constraint violations, since the INSERT into the link table # is executed in the snapshot where the above DELETE from # the link table is not visible. Hence, we need to use # the ON CONFLICT clause to resolve this. conflict_cols = ['source', 'target', 'ptr_item_id'] conflict_inference = [] conflict_exc_row = [] for col in conflict_cols: conflict_inference.append(pgast.ColumnRef(name=[col])) conflict_exc_row.append(pgast.ColumnRef(name=['excluded', col])) conflict_data = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RangeVar(relation=data_cte)], where_clause=astutils.new_binop( lexpr=pgast.ImplicitRowExpr(args=conflict_inference), rexpr=pgast.ImplicitRowExpr(args=conflict_exc_row), op='=')) cols = [pgast.ColumnRef(name=[col]) for col in specified_cols] updcte = pgast.CommonTableExpr( name=ctx.env.aliases.get(hint='i'), query=pgast.InsertStmt( relation=target_rvar, select_stmt=data_select, cols=cols, on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=conflict_inference), target_list=[ pgast.MultiAssignRef(columns=cols, source=conflict_data) ]), returning_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[pgast.Star()])) ])) pathctx.put_path_value_rvar(updcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this insertion in the relation overlay # context to ensure that the RETURNING clause potentially # referencing this link yields the expected results. overlays = ctx.env.rel_overlays[ptrcls.get_shortname(ctx.env.schema)] overlays.append(('union', updcte)) toplevel.ctes.append(updcte) return data_cte
def process_insert_body(ir_stmt: irast.MutatingStmt, wrapper: pgast.Query, insert_cte: pgast.CommonTableExpr, insert_rvar: pgast.BaseRangeVar, *, ctx: context.CompilerContextLevel) -> None: """Generate SQL DML CTEs from an InsertStmt IR. :param ir_stmt: IR of the statement. :param wrapper: Top-level SQL query. :param insert_cte: CTE representing the SQL INSERT to the main relation of the Object. """ cols = [pgast.ColumnRef(name=['__type__'])] select = pgast.SelectStmt(target_list=[]) values = select.target_list # The main INSERT query of this statement will always be # present to insert at least the `id` and `__type__` # properties. insert_stmt = insert_cte.query insert_stmt.cols = cols insert_stmt.select_stmt = select if ir_stmt.parent_stmt is not None: iterator_set = ir_stmt.parent_stmt.iterator_stmt else: iterator_set = None if iterator_set is not None: with ctx.substmt() as ictx: ictx.path_scope = ictx.path_scope.new_child() ictx.path_scope[iterator_set.path_id] = ictx.rel clauses.compile_iterator_expr(ictx.rel, iterator_set, ctx=ictx) ictx.rel.path_id = iterator_set.path_id pathctx.put_path_bond(ictx.rel, iterator_set.path_id) iterator_cte = pgast.CommonTableExpr( query=ictx.rel, name=ctx.env.aliases.get('iter')) ictx.toplevel_stmt.ctes.append(iterator_cte) iterator_rvar = dbobj.rvar_for_rel(iterator_cte, env=ctx.env) relctx.include_rvar(select, iterator_rvar, path_id=ictx.rel.path_id, ctx=ctx) iterator_id = pathctx.get_path_identity_var(select, iterator_set.path_id, env=ctx.env) else: iterator_cte = None iterator_id = None values.append( pgast.ResTarget(val=pgast.SelectStmt( target_list=[pgast.ResTarget(val=pgast.ColumnRef(name=['id']))], from_clause=[ pgast.RangeVar(relation=pgast.Relation(name='objecttype', schemaname='edgedb')) ], where_clause=astutils.new_binop( op='=', lexpr=pgast.ColumnRef(name=['name']), rexpr=pgast.StringConstant(val=ir_stmt.subject.stype. get_shortname(ctx.env.schema)))))) external_inserts = [] tuple_elements = [] parent_link_props = [] with ctx.newrel() as subctx: subctx.rel = select subctx.rel_hierarchy[select] = insert_stmt subctx.expr_exposed = False if iterator_cte is not None: subctx.path_scope = ctx.path_scope.new_child() subctx.path_scope[iterator_cte.query.path_id] = select # Process the Insert IR and separate links that go # into the main table from links that are inserted into # a separate link table. for shape_el in ir_stmt.subject.shape: rptr = shape_el.rptr ptrcls = rptr.ptrcls.material_type(ctx.env.schema) if (ptrcls.is_link_property(ctx.env.schema) and rptr.source.path_id != ir_stmt.subject.path_id): parent_link_props.append(shape_el) continue ptr_info = pg_types.get_pointer_storage_info( ptrcls, schema=subctx.env.schema, resolve_type=True, link_bias=False) props_only = False # First, process all local link inserts. if ptr_info.table_type == 'ObjectType': props_only = True field = pgast.ColumnRef(name=[ptr_info.column_name]) cols.append(field) insvalue = insert_value_for_shape_element(insert_stmt, wrapper, ir_stmt, shape_el, iterator_id, ptr_info=ptr_info, ctx=subctx) tuple_el = astutils.tuple_element_for_shape_el(shape_el, field, ctx=subctx) tuple_elements.append(tuple_el) values.append(pgast.ResTarget(val=insvalue)) ptr_info = pg_types.get_pointer_storage_info(ptrcls, resolve_type=False, link_bias=True, schema=ctx.env.schema) if ptr_info and ptr_info.table_type == 'link': external_inserts.append((shape_el, props_only)) if iterator_cte is not None: cols.append(pgast.ColumnRef(name=['__edb_token'])) values.append(pgast.ResTarget(val=iterator_id)) pathctx.put_path_identity_var(insert_stmt, iterator_set.path_id, cols[-1], force=True, env=subctx.env) pathctx.put_path_bond(insert_stmt, iterator_set.path_id) toplevel = ctx.toplevel_stmt toplevel.ctes.append(insert_cte) # Process necessary updates to the link tables. for shape_el, props_only in external_inserts: process_link_update(ir_stmt, shape_el, props_only, wrapper, insert_cte, iterator_cte, ctx=ctx) if parent_link_props: prop_elements = [] with ctx.newscope() as scopectx: scopectx.rel = wrapper for shape_el in parent_link_props: rptr = shape_el.rptr scopectx.path_scope[rptr.source.path_id] = wrapper pathctx.put_path_rvar_if_not_exists(wrapper, rptr.source.path_id, insert_rvar, aspect='value', env=scopectx.env) dispatch.visit(shape_el, ctx=scopectx) tuple_el = astutils.tuple_element_for_shape_el(shape_el, None, ctx=scopectx) prop_elements.append(tuple_el) valtuple = pgast.TupleVar(elements=prop_elements, named=True) pathctx.put_path_value_var(wrapper, ir_stmt.subject.path_id, valtuple, force=True, env=ctx.env)