def tuple_getattr( tuple_val: pgast.BaseExpr, tuple_typeref: irast.TypeRef, attr: str, ) -> pgast.BaseExpr: ttypes = [] pgtypes = [] for i, st in enumerate(tuple_typeref.subtypes): pgtype = pg_types.pg_type_from_ir_typeref(st) pgtypes.append(pgtype) if st.element_name: ttypes.append(st.element_name) else: ttypes.append(str(i)) index = ttypes.index(attr) set_expr: pgast.BaseExpr if tuple_typeref.in_schema: set_expr = pgast.Indirection( arg=tuple_val, indirection=[ pgast.ColumnRef( name=[attr], ), ], ) else: set_expr = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.ColumnRef( name=[str(index)], ), ), ], from_clause=[ pgast.RangeFunction( functions=[ pgast.FuncCall( name=('unnest',), args=[ pgast.ArrayExpr( elements=[tuple_val], ) ], coldeflist=[ pgast.ColumnDef( name=str(i), typename=pgast.TypeName( name=t ) ) for i, t in enumerate(pgtypes) ] ) ] ) ] ) return set_expr
def process_link_update( *, ir_stmt: irast.MutatingStmt, ir_set: irast.Set, props_only: bool, is_insert: bool, wrapper: pgast.Query, dml_cte: pgast.CommonTableExpr, iterator_cte: typing.Optional[pgast.CommonTableExpr], ctx: context.CompilerContextLevel) -> pgast.CommonTableExpr: """Perform updates to a link relation as part of a DML statement. :param ir_stmt: IR of the statement. :param ir_set: IR of the INSERT/UPDATE body element. :param props_only: Whether this link update only touches link properties. :param wrapper: Top-level SQL query. :param dml_cte: CTE representing the SQL INSERT or UPDATE to the main relation of the Object. :param iterator_cte: CTE representing the iterator range in the FOR clause of the EdgeQL DML statement. """ toplevel = ctx.toplevel_stmt rptr = ir_set.rptr ptrref = rptr.ptrref assert isinstance(ptrref, irast.PointerRef) target_is_scalar = irtyputils.is_scalar(ptrref.dir_target) path_id = ir_set.path_id # The links in the dml class shape have been derived, # but we must use the correct specialized link class for the # base material type. if ptrref.material_ptr is not None: mptrref = ptrref.material_ptr assert isinstance(mptrref, irast.PointerRef) else: mptrref = ptrref target_rvar = relctx.range_for_ptrref(mptrref, include_overlays=False, only_self=True, ctx=ctx) assert isinstance(target_rvar, pgast.RelRangeVar) assert isinstance(target_rvar.relation, pgast.Relation) target_alias = target_rvar.alias.aliasname target_tab_name = (target_rvar.relation.schemaname, target_rvar.relation.name) dml_cte_rvar = pgast.RelRangeVar( relation=dml_cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get('m'))) col_data = { 'ptr_item_id': pgast.TypeCast(arg=pgast.StringConstant(val=str(mptrref.id)), type_name=pgast.TypeName(name=('uuid', ))), 'source': pathctx.get_rvar_path_identity_var(dml_cte_rvar, ir_stmt.subject.path_id, env=ctx.env) } if not is_insert: # Drop all previous link records for this source. delcte = pgast.CommonTableExpr(query=pgast.DeleteStmt( relation=target_rvar, where_clause=astutils.new_binop( lexpr=col_data['source'], op='=', rexpr=pgast.ColumnRef(name=[target_alias, 'source'])), using_clause=[dml_cte_rvar], returning_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[target_alias, pgast.Star()])) ]), name=ctx.env.aliases.get(hint='d')) pathctx.put_path_value_rvar(delcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this removal in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'except', delcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(delcte) # Turn the IR of the expression on the right side of := # into a subquery returning records for the link table. data_cte, specified_cols = process_link_values(ir_stmt, ir_set, target_tab_name, col_data, dml_cte_rvar, [], props_only, target_is_scalar, iterator_cte, ctx=ctx) toplevel.ctes.append(data_cte) data_select = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RelRangeVar(relation=data_cte)]) cols = [pgast.ColumnRef(name=[col]) for col in specified_cols] if is_insert: conflict_clause = None else: # Inserting rows into the link table may produce cardinality # constraint violations, since the INSERT into the link table # is executed in the snapshot where the above DELETE from # the link table is not visible. Hence, we need to use # the ON CONFLICT clause to resolve this. conflict_cols = ['source', 'target', 'ptr_item_id'] conflict_inference = [] conflict_exc_row = [] for col in conflict_cols: conflict_inference.append(pgast.ColumnRef(name=[col])) conflict_exc_row.append(pgast.ColumnRef(name=['excluded', col])) conflict_data = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RelRangeVar(relation=data_cte)], where_clause=astutils.new_binop( lexpr=pgast.ImplicitRowExpr(args=conflict_inference), rexpr=pgast.ImplicitRowExpr(args=conflict_exc_row), op='=')) conflict_clause = pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=conflict_inference), target_list=[ pgast.MultiAssignRef(columns=cols, source=conflict_data) ]) updcte = pgast.CommonTableExpr( name=ctx.env.aliases.get(hint='i'), query=pgast.InsertStmt( relation=target_rvar, select_stmt=data_select, cols=cols, on_conflict=conflict_clause, returning_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[pgast.Star()])) ])) pathctx.put_path_value_rvar(updcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this insertion in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'union', updcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(updcte) return data_cte
def range_for_ptrref( ptrref: irast.BasePointerRef, *, dml_source: Optional[irast.MutatingStmt] = None, for_mutation: bool = False, only_self: bool = False, ctx: context.CompilerContextLevel, ) -> pgast.PathRangeVar: """"Return a Range subclass corresponding to a given ptr step. The return value may potentially be a UNION of all tables corresponding to a set of specialized links computed from the given `ptrref` taking source inheritance into account. """ tgt_col = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=True).column_name cols = ['source', tgt_col] set_ops = [] if ptrref.union_components: refs = ptrref.union_components if only_self and len(refs) > 1: raise errors.InternalServerError('unexpected union link') else: refs = {ptrref} assert isinstance(ptrref, irast.PointerRef), \ "expected regular PointerRef" overlays = get_ptr_rel_overlays(ptrref, dml_source=dml_source, ctx=ctx) for src_ptrref in refs: assert isinstance(src_ptrref, irast.PointerRef), \ "expected regular PointerRef" table = table_from_ptrref( src_ptrref, include_descendants=not ptrref.union_is_concrete, for_mutation=for_mutation, ctx=ctx, ) qry = pgast.SelectStmt() qry.from_clause.append(table) # Make sure all property references are pulled up properly for colname in cols: selexpr = pgast.ColumnRef(name=[table.alias.aliasname, colname]) qry.target_list.append(pgast.ResTarget(val=selexpr, name=colname)) set_ops.append(('union', qry)) overlays = get_ptr_rel_overlays(src_ptrref, dml_source=dml_source, ctx=ctx) if overlays and not for_mutation: for op, cte in overlays: rvar = pgast.RelRangeVar( relation=cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get(cte.name))) qry = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[col])) for col in cols ], from_clause=[rvar], ) set_ops.append((op, qry)) return range_from_queryset(set_ops, ptrref.shortname, ctx=ctx)
def range_for_ptrref( ptrref: irast.BasePointerRef, *, include_overlays: bool=True, only_self: bool=False, env: context.Environment) -> pgast.BaseRangeVar: """"Return a Range subclass corresponding to a given ptr step. The return value may potentially be a UNION of all tables corresponding to a set of specialized links computed from the given `ptrref` taking source inheritance into account. """ tgt_col = pgtypes.get_ptrref_storage_info( ptrref, resolve_type=False, link_bias=True).column_name cols = [ 'source', tgt_col ] set_ops = [] if only_self: refs = {ptrref} else: if ptrref.union_components: refs = ptrref.union_components else: refs = {ptrref} | ptrref.descendants for src_ptrref in refs: table = table_from_ptrref(src_ptrref, env=env) qry = pgast.SelectStmt() qry.from_clause.append(table) qry.rptr_rvar = table # Make sure all property references are pulled up properly for colname in cols: selexpr = pgast.ColumnRef( name=[table.alias.aliasname, colname]) qry.target_list.append( pgast.ResTarget(val=selexpr, name=colname)) set_ops.append(('union', qry)) overlays = env.rel_overlays.get(src_ptrref.shortname) if overlays and include_overlays: for op, cte in overlays: rvar = pgast.RangeVar( relation=cte, alias=pgast.Alias( aliasname=env.aliases.get(cte.name) ) ) qry = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.ColumnRef( name=[col] ) ) for col in cols ], from_clause=[rvar], ) set_ops.append((op, qry)) rvar = range_from_queryset(set_ops, ptrref.shortname, env=env) return rvar
def process_insert_body(ir_stmt: irast.MutatingStmt, wrapper: pgast.Query, insert_cte: pgast.CommonTableExpr, insert_rvar: pgast.PathRangeVar, *, ctx: context.CompilerContextLevel) -> None: """Generate SQL DML CTEs from an InsertStmt IR. :param ir_stmt: IR of the statement. :param wrapper: Top-level SQL query. :param insert_cte: CTE representing the SQL INSERT to the main relation of the Object. """ cols = [pgast.ColumnRef(name=['__type__'])] select = pgast.SelectStmt(target_list=[]) values = select.target_list # The main INSERT query of this statement will always be # present to insert at least the `id` and `__type__` # properties. insert_stmt = insert_cte.query assert isinstance(insert_stmt, pgast.InsertStmt) insert_stmt.cols = cols insert_stmt.select_stmt = select if ir_stmt.parent_stmt is not None: iterator_set = ir_stmt.parent_stmt.iterator_stmt else: iterator_set = None if iterator_set is not None: with ctx.substmt() as ictx: ictx.path_scope = ictx.path_scope.new_child() ictx.path_scope[iterator_set.path_id] = ictx.rel clauses.compile_iterator_expr(ictx.rel, iterator_set, ctx=ictx) ictx.rel.path_id = iterator_set.path_id pathctx.put_path_bond(ictx.rel, iterator_set.path_id) iterator_cte = pgast.CommonTableExpr( query=ictx.rel, name=ctx.env.aliases.get('iter')) ictx.toplevel_stmt.ctes.append(iterator_cte) iterator_rvar = relctx.rvar_for_rel(iterator_cte, ctx=ctx) relctx.include_rvar(select, iterator_rvar, path_id=ictx.rel.path_id, ctx=ctx) iterator_id = pathctx.get_path_identity_var(select, iterator_set.path_id, env=ctx.env) else: iterator_cte = None iterator_id = None typeref = ir_stmt.subject.typeref if typeref.material_type is not None: typeref = typeref.material_type values.append( pgast.ResTarget(val=pgast.TypeCast( arg=pgast.StringConstant(val=str(typeref.id)), type_name=pgast.TypeName(name=('uuid', ))), )) external_inserts = [] parent_link_props = [] with ctx.newrel() as subctx: subctx.rel = select subctx.rel_hierarchy[select] = insert_stmt subctx.expr_exposed = False if iterator_cte is not None: subctx.path_scope = ctx.path_scope.new_child() subctx.path_scope[iterator_cte.query.path_id] = select # Process the Insert IR and separate links that go # into the main table from links that are inserted into # a separate link table. for shape_el in ir_stmt.subject.shape: rptr = shape_el.rptr ptrref = rptr.ptrref if ptrref.material_ptr is not None: ptrref = ptrref.material_ptr if (ptrref.parent_ptr is not None and rptr.source.path_id != ir_stmt.subject.path_id): parent_link_props.append(shape_el) continue ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=True, link_bias=False) props_only = False # First, process all local link inserts. if ptr_info.table_type == 'ObjectType': props_only = True field = pgast.ColumnRef(name=[ptr_info.column_name]) cols.append(field) insvalue = insert_value_for_shape_element(insert_stmt, wrapper, ir_stmt, shape_el, iterator_id, ptr_info=ptr_info, ctx=subctx) values.append(pgast.ResTarget(val=insvalue)) ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=True) if ptr_info and ptr_info.table_type == 'link': external_inserts.append((shape_el, props_only)) if iterator_cte is not None: cols.append(pgast.ColumnRef(name=['__edb_token'])) values.append(pgast.ResTarget(val=iterator_id)) pathctx.put_path_identity_var(insert_stmt, iterator_set.path_id, cols[-1], force=True, env=subctx.env) pathctx.put_path_bond(insert_stmt, iterator_set.path_id) toplevel = ctx.toplevel_stmt toplevel.ctes.append(insert_cte) # Process necessary updates to the link tables. for shape_el, props_only in external_inserts: process_link_update(ir_stmt=ir_stmt, ir_set=shape_el, props_only=props_only, wrapper=wrapper, dml_cte=insert_cte, iterator_cte=iterator_cte, is_insert=True, ctx=ctx) if parent_link_props: prop_elements = [] with ctx.newscope() as scopectx: scopectx.rel = wrapper for shape_el in parent_link_props: rptr = shape_el.rptr scopectx.path_scope[rptr.source.path_id] = wrapper pathctx.put_path_rvar_if_not_exists(wrapper, rptr.source.path_id, insert_rvar, aspect='value', env=scopectx.env) dispatch.visit(shape_el, ctx=scopectx) tuple_el = astutils.tuple_element_for_shape_el(shape_el, None, ctx=scopectx) prop_elements.append(tuple_el) valtuple = pgast.TupleVar(elements=prop_elements, named=True) pathctx.put_path_value_var(wrapper, ir_stmt.subject.path_id, valtuple, force=True, env=ctx.env)
def process_insert_body(ir_stmt: irast.MutatingStmt, wrapper: pgast.SelectStmt, insert_cte: pgast.CommonTableExpr, insert_rvar: pgast.PathRangeVar, *, ctx: context.CompilerContextLevel) -> None: """Generate SQL DML CTEs from an InsertStmt IR. :param ir_stmt: IR of the statement. :param wrapper: Top-level SQL query. :param insert_cte: CTE representing the SQL INSERT to the main relation of the Object. """ cols = [pgast.ColumnRef(name=['__type__'])] select = pgast.SelectStmt(target_list=[]) values = select.target_list # The main INSERT query of this statement will always be # present to insert at least the `id` and `__type__` # properties. insert_stmt = insert_cte.query assert isinstance(insert_stmt, pgast.InsertStmt) insert_stmt.cols = cols insert_stmt.select_stmt = select if ir_stmt.parent_stmt is not None: iterator_set = ir_stmt.parent_stmt.iterator_stmt else: iterator_set = None iterator_cte: Optional[pgast.CommonTableExpr] iterator_id: Optional[pgast.BaseExpr] if iterator_set is not None: with ctx.substmt() as ictx: ictx.path_scope = ictx.path_scope.new_child() ictx.path_scope[iterator_set.path_id] = ictx.rel clauses.compile_iterator_expr(ictx.rel, iterator_set, ctx=ictx) ictx.rel.path_id = iterator_set.path_id pathctx.put_path_bond(ictx.rel, iterator_set.path_id) iterator_cte = pgast.CommonTableExpr( query=ictx.rel, name=ctx.env.aliases.get('iter')) ictx.toplevel_stmt.ctes.append(iterator_cte) iterator_rvar = relctx.rvar_for_rel(iterator_cte, ctx=ctx) relctx.include_rvar(select, iterator_rvar, path_id=ictx.rel.path_id, ctx=ctx) iterator_id = pathctx.get_path_identity_var(select, iterator_set.path_id, env=ctx.env) else: iterator_cte = None iterator_id = None typeref = ir_stmt.subject.typeref if typeref.material_type is not None: typeref = typeref.material_type values.append( pgast.ResTarget(val=pgast.TypeCast( arg=pgast.StringConstant(val=str(typeref.id)), type_name=pgast.TypeName(name=('uuid', ))), )) external_inserts = [] with ctx.newrel() as subctx: subctx.rel = select subctx.rel_hierarchy[select] = insert_stmt subctx.expr_exposed = False if iterator_cte is not None: subctx.path_scope = ctx.path_scope.new_child() subctx.path_scope[iterator_cte.query.path_id] = select # Process the Insert IR and separate links that go # into the main table from links that are inserted into # a separate link table. for shape_el, shape_op in ir_stmt.subject.shape: assert shape_op is qlast.ShapeOp.ASSIGN rptr = shape_el.rptr ptrref = rptr.ptrref if ptrref.material_ptr is not None: ptrref = ptrref.material_ptr if (ptrref.source_ptr is not None and rptr.source.path_id != ir_stmt.subject.path_id): continue ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=True, link_bias=False) props_only = False # First, process all local link inserts. if ptr_info.table_type == 'ObjectType': props_only = True field = pgast.ColumnRef(name=[ptr_info.column_name]) cols.append(field) rel = compile_insert_shape_element(insert_stmt, wrapper, ir_stmt, shape_el, iterator_id, ctx=ctx) insvalue = pathctx.get_path_value_var(rel, shape_el.path_id, env=ctx.env) if irtyputils.is_tuple(shape_el.typeref): # Tuples require an explicit cast. insvalue = pgast.TypeCast( arg=output.output_as_value(insvalue, env=ctx.env), type_name=pgast.TypeName(name=ptr_info.column_type, ), ) values.append(pgast.ResTarget(val=insvalue)) ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=True) if ptr_info and ptr_info.table_type == 'link': external_inserts.append((shape_el, props_only)) if iterator_set is not None: cols.append(pgast.ColumnRef(name=['__edb_token'])) values.append(pgast.ResTarget(val=iterator_id)) pathctx.put_path_identity_var(insert_stmt, iterator_set.path_id, cols[-1], force=True, env=subctx.env) pathctx.put_path_bond(insert_stmt, iterator_set.path_id) pathctx.put_path_rvar( wrapper, path_id=iterator_set.path_id, rvar=insert_rvar, aspect='identity', env=subctx.env, ) if isinstance(ir_stmt, irast.InsertStmt) and ir_stmt.on_conflict: assert not insert_stmt.on_conflict constraint_name = f'"{ir_stmt.on_conflict.id};schemaconstr"' insert_stmt.on_conflict = pgast.OnConflictClause( action='nothing', infer=pgast.InferClause(conname=constraint_name), ) toplevel = ctx.toplevel_stmt toplevel.ctes.append(insert_cte) # Process necessary updates to the link tables. for shape_el, props_only in external_inserts: process_link_update( ir_stmt=ir_stmt, ir_set=shape_el, props_only=props_only, wrapper=wrapper, dml_cte=insert_cte, source_typeref=typeref, iterator_cte=iterator_cte, is_insert=True, ctx=ctx, )
def init_dml_stmt( ir_stmt: irast.MutatingStmt, *, ctx: context.CompilerContextLevel, parent_ctx: context.CompilerContextLevel, ) -> DMLParts: """Prepare the common structure of the query representing a DML stmt. :param ir_stmt: IR of the DML statement. :return: A DMLParts tuple containing a map of DML CTEs as well as the common range CTE for UPDATE/DELETE statements. """ clauses.init_stmt(ir_stmt, ctx, parent_ctx) range_cte: Optional[pgast.CommonTableExpr] range_rvar: Optional[pgast.RelRangeVar] if isinstance(ir_stmt, (irast.UpdateStmt, irast.DeleteStmt)): # UPDATE and DELETE operate over a range, so generate # the corresponding CTE and connect it to the DML stetements. range_cte = get_dml_range(ir_stmt, ctx=ctx) range_rvar = pgast.RelRangeVar( relation=range_cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get(hint='range'))) else: range_cte = None range_rvar = None top_typeref = ir_stmt.subject.typeref if top_typeref.material_type: top_typeref = top_typeref.material_type typerefs = [top_typeref] if isinstance(ir_stmt, (irast.UpdateStmt, irast.DeleteStmt)): if top_typeref.union: for component in top_typeref.union: if component.material_type: component = component.material_type typerefs.append(component) if component.descendants: typerefs.extend(component.descendants) if top_typeref.descendants: typerefs.extend(top_typeref.descendants) dml_map = {} for typeref in typerefs: dml_cte, dml_rvar = gen_dml_cte( ir_stmt, range_rvar=range_rvar, typeref=typeref, ctx=ctx, ) dml_map[typeref] = (dml_cte, dml_rvar) if len(dml_map) == 1: union_cte, union_rvar = next(iter(dml_map.values())) else: union_components = [] for _, dml_rvar in dml_map.values(): union_component = pgast.SelectStmt() relctx.include_rvar( union_component, dml_rvar, ir_stmt.subject.path_id, ctx=ctx, ) union_components.append(union_component) qry = pgast.SelectStmt( all=True, larg=union_components[0], ) for union_component in union_components[1:]: qry.op = 'UNION' qry.rarg = union_component qry = pgast.SelectStmt( all=True, larg=qry, ) union_cte = pgast.CommonTableExpr(query=qry.larg, name=ctx.env.aliases.get(hint='ma')) union_rvar = relctx.rvar_for_rel( union_cte, typeref=ir_stmt.subject.typeref, ctx=ctx, ) relctx.include_rvar(ctx.rel, union_rvar, ir_stmt.subject.path_id, ctx=ctx) pathctx.put_path_bond(ctx.rel, ir_stmt.subject.path_id) ctx.dml_stmts[ir_stmt] = union_cte return DMLParts(dml_ctes=dml_map, range_cte=range_cte, union_cte=union_cte)
def compile_ConfigSet( op: irast.ConfigSet, *, ctx: context.CompilerContextLevel, ) -> pgast.BaseExpr: val: pgast.BaseExpr with ctx.new() as subctx: if op.backend_setting: output_format = context.OutputFormat.NATIVE else: output_format = context.OutputFormat.JSONB with context.output_format(ctx, output_format): if isinstance(op.expr, irast.EmptySet): # Special handling for empty sets, because we want a # singleton representation of the value and not an empty rel # in this context. if op.cardinality is qltypes.SchemaCardinality.One: val = pgast.NullConstant() elif subctx.env.output_format is context.OutputFormat.JSONB: val = pgast.TypeCast( arg=pgast.StringConstant(val='[]'), type_name=pgast.TypeName( name=('jsonb',), ), ) else: val = pgast.TypeCast( arg=pgast.ArrayExpr(), type_name=pgast.TypeName( name=('text[]',), ), ) else: val = dispatch.compile(op.expr, ctx=subctx) assert isinstance(val, pgast.SelectStmt), "expected SelectStmt" pathctx.get_path_serialized_output( val, op.expr.path_id, env=ctx.env) if op.cardinality is qltypes.SchemaCardinality.Many: val = output.aggregate_json_output( val, op.expr, env=ctx.env) result: pgast.BaseExpr if op.scope is qltypes.ConfigScope.SYSTEM and op.backend_setting: assert isinstance(val, pgast.SelectStmt) and len(val.target_list) == 1 valval = val.target_list[0].val if isinstance(valval, pgast.TypeCast): valval = valval.arg if not isinstance(valval, pgast.BaseConstant): raise AssertionError('value is not a constant in ConfigSet') result = pgast.AlterSystem( name=op.backend_setting, value=valval, ) elif op.scope is qltypes.ConfigScope.DATABASE and op.backend_setting: fcall = pgast.FuncCall( name=('edgedb', '_alter_current_database_set'), args=[pgast.StringConstant(val=op.backend_setting), val], ) result = output.wrap_script_stmt( pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]), suppress_all_output=True, env=ctx.env, ) elif op.scope is qltypes.ConfigScope.SESSION and op.backend_setting: fcall = pgast.FuncCall( name=('pg_catalog', 'set_config'), args=[ pgast.StringConstant(val=op.backend_setting), pgast.TypeCast( arg=val, type_name=pgast.TypeName(name=('text',)), ), pgast.BooleanConstant(val='false'), ], ) result = output.wrap_script_stmt( pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]), suppress_all_output=True, env=ctx.env, ) elif op.scope is qltypes.ConfigScope.SYSTEM: result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='SET'), pgast.StringConstant(val=str(op.scope)), pgast.StringConstant(val=op.name), val, ] ) result = pgast.FuncCall( name=('jsonb_build_array',), args=result_row.args, null_safe=True, ser_safe=True, ) result = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=result, ), ], ) elif op.scope is qltypes.ConfigScope.SESSION: result = pgast.InsertStmt( relation=pgast.RelRangeVar( relation=pgast.Relation( name='_edgecon_state', ), ), select_stmt=pgast.SelectStmt( values=[ pgast.ImplicitRowExpr( args=[ pgast.StringConstant( val=op.name, ), val, pgast.StringConstant( val='C', ), ] ) ] ), cols=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['value']), pgast.ColumnRef(name=['type']), ], on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause( index_elems=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['type']), ], ), target_list=[ pgast.MultiAssignRef( columns=[pgast.ColumnRef(name=['value'])], source=pgast.RowExpr( args=[ val, ], ), ), ], ), ) elif op.scope is qltypes.ConfigScope.DATABASE: result = pgast.InsertStmt( relation=pgast.RelRangeVar( relation=pgast.Relation( name='_db_config', schemaname='edgedb', ), ), select_stmt=pgast.SelectStmt( values=[ pgast.ImplicitRowExpr( args=[ pgast.StringConstant( val=op.name, ), val, ] ) ] ), cols=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['value']), ], on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause( index_elems=[ pgast.ColumnRef(name=['name']), ], ), target_list=[ pgast.MultiAssignRef( columns=[pgast.ColumnRef(name=['value'])], source=pgast.RowExpr( args=[ val, ], ), ), ], ), ) else: raise AssertionError(f'unexpected configuration scope: {op.scope}') return result
def __init__( self, prevlevel: Optional[CompilerContextLevel], mode: ContextSwitchMode, *, env: Optional[Environment] = None, scope_tree: Optional[irast.ScopeTreeNode] = None, ) -> None: if prevlevel is None: assert env is not None assert scope_tree is not None self.env = env self.argmap = collections.OrderedDict() self.next_argument = itertools.count(1) self.singleton_mode = False self.toplevel_stmt = NO_STMT self.stmt = NO_STMT self.rel = NO_STMT self.rel_hierarchy = {} self.dml_stmts = {} self.parent_rel = None self.pending_query = None self.expr_exposed = None self.volatility_ref = None self.group_by_rels = {} self.disable_semi_join = set() self.force_optional = set() self.join_target_type_filter = {} self.path_scope = collections.ChainMap() self.scope_tree = scope_tree self.type_rel_overlays = collections.defaultdict(list) self.ptr_rel_overlays = collections.defaultdict(list) else: self.env = prevlevel.env self.argmap = prevlevel.argmap self.next_argument = prevlevel.next_argument self.singleton_mode = prevlevel.singleton_mode self.toplevel_stmt = prevlevel.toplevel_stmt self.stmt = prevlevel.stmt self.rel = prevlevel.rel self.rel_hierarchy = prevlevel.rel_hierarchy self.dml_stmts = prevlevel.dml_stmts self.parent_rel = prevlevel.parent_rel self.pending_query = prevlevel.pending_query self.expr_exposed = prevlevel.expr_exposed self.volatility_ref = prevlevel.volatility_ref self.group_by_rels = prevlevel.group_by_rels self.disable_semi_join = prevlevel.disable_semi_join.copy() self.force_optional = prevlevel.force_optional.copy() self.join_target_type_filter = prevlevel.join_target_type_filter self.path_scope = prevlevel.path_scope self.scope_tree = prevlevel.scope_tree self.type_rel_overlays = prevlevel.type_rel_overlays self.ptr_rel_overlays = prevlevel.ptr_rel_overlays if mode in { ContextSwitchMode.SUBREL, ContextSwitchMode.NEWREL, ContextSwitchMode.SUBSTMT }: if self.pending_query and mode == ContextSwitchMode.SUBSTMT: self.rel = self.pending_query else: self.rel = pgast.SelectStmt() if mode != ContextSwitchMode.NEWREL: if prevlevel.parent_rel is not None: parent_rel = prevlevel.parent_rel else: parent_rel = prevlevel.rel self.rel_hierarchy[self.rel] = parent_rel self.pending_query = None self.parent_rel = None if mode == ContextSwitchMode.SUBSTMT: self.stmt = self.rel if mode == ContextSwitchMode.NEWSCOPE: self.path_scope = prevlevel.path_scope.new_child()
def compile_ConfigSet( op: irast.ConfigSet, *, ctx: context.CompilerContextLevel) -> pgast.Query: with ctx.new() as subctx: val = dispatch.compile(op.expr, ctx=subctx) pathctx.get_path_serialized_output( val, op.expr.path_id, env=ctx.env) if op.cardinality is qltypes.Cardinality.MANY: val = output.aggregate_json_output(val, op.expr, env=ctx.env) result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='SET'), pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'), pgast.StringConstant(val=op.name), val, ] ) result = pgast.FuncCall( name=('jsonb_build_array',), args=result_row.args, null_safe=True, ser_safe=True, ) if not op.system: stmt = pgast.InsertStmt( relation=pgast.RangeVar( relation=pgast.Relation( name='_edgecon_state', ), ), select_stmt=pgast.SelectStmt( values=[ pgast.ImplicitRowExpr( args=[ pgast.StringConstant( val=op.name, ), val, pgast.StringConstant( val='C', ), ] ) ] ), cols=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['value']), pgast.ColumnRef(name=['type']), ], on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause( index_elems=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['type']), ], ), target_list=[ pgast.MultiAssignRef( columns=[pgast.ColumnRef(name=['value'])], source=pgast.RowExpr( args=[ val, ], ), ), ], ), ) else: stmt = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=result, ), ], ) return stmt
def compile_ConfigReset( op: irast.ConfigReset, *, ctx: context.CompilerContextLevel, ) -> pgast.BaseExpr: stmt: pgast.BaseExpr if op.scope is qltypes.ConfigScope.SYSTEM and op.backend_setting: stmt = pgast.AlterSystem( name=op.backend_setting, value=None, ) elif op.scope is qltypes.ConfigScope.DATABASE and op.backend_setting: fcall = pgast.FuncCall( name=('edgedb', '_alter_current_database_set'), args=[ pgast.StringConstant(val=op.backend_setting), pgast.NullConstant(), ], ) stmt = output.wrap_script_stmt( pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]), suppress_all_output=True, env=ctx.env, ) elif op.scope is qltypes.ConfigScope.SESSION and op.backend_setting: fcall = pgast.FuncCall( name=('pg_catalog', 'set_config'), args=[ pgast.StringConstant(val=op.backend_setting), pgast.NullConstant(), pgast.BooleanConstant(val='false'), ], ) stmt = output.wrap_script_stmt( pgast.SelectStmt(target_list=[pgast.ResTarget(val=fcall)]), suppress_all_output=True, env=ctx.env, ) elif op.scope is qltypes.ConfigScope.SYSTEM: if op.selector is None: # Scalar reset result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='RESET'), pgast.StringConstant(val=str(op.scope)), pgast.StringConstant(val=op.name), pgast.NullConstant(), ] ) rvar = None else: with context.output_format(ctx, context.OutputFormat.JSONB): selector = dispatch.compile(op.selector, ctx=ctx) assert isinstance(selector, pgast.SelectStmt), \ "expected ast.SelectStmt" target = selector.target_list[0] if not target.name: target = selector.target_list[0] = pgast.ResTarget( name=ctx.env.aliases.get('res'), val=target.val, ) rvar = relctx.rvar_for_rel(selector, ctx=ctx) result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='REM'), pgast.StringConstant(val=str(op.scope)), pgast.StringConstant(val=op.name), astutils.get_column(rvar, target.name), ] ) result = pgast.FuncCall( name=('jsonb_build_array',), args=result_row.args, null_safe=True, ser_safe=True, ) stmt = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=result, ), ], ) if rvar is not None: stmt.from_clause = [rvar] elif op.scope is qltypes.ConfigScope.DATABASE: stmt = pgast.DeleteStmt( relation=pgast.RelRangeVar( relation=pgast.Relation( name='_db_config', schemaname='edgedb', ), ), where_clause=astutils.new_binop( lexpr=pgast.ColumnRef(name=['name']), rexpr=pgast.StringConstant(val=op.name), op='=', ), ) elif op.scope is qltypes.ConfigScope.SESSION: stmt = pgast.DeleteStmt( relation=pgast.RelRangeVar( relation=pgast.Relation( name='_edgecon_state', ), ), where_clause=astutils.new_binop( lexpr=astutils.new_binop( lexpr=pgast.ColumnRef(name=['name']), rexpr=pgast.StringConstant(val=op.name), op='=', ), rexpr=astutils.new_binop( lexpr=pgast.ColumnRef(name=['type']), rexpr=pgast.StringConstant(val='C'), op='=', ), op='AND', ) ) else: raise AssertionError(f'unexpected configuration scope: {op.scope}') return stmt
def __init__(self, prevlevel, mode): if prevlevel is None: self.env = None self.argmap = collections.OrderedDict() self.singleton_mode = False self.toplevel_stmt = None self.stmt = None self.rel = None self.rel_hierarchy = {} self.pending_query = None self.clause = None self.toplevel_clause = None self.expr_exposed = None self.volatility_ref = None self.group_by_rels = {} self.disable_semi_join = set() self.unique_paths = set() self.force_optional = set() self.join_target_type_filter = {} self.path_scope = collections.ChainMap() self.scope_tree = None self.rel_overlays = collections.defaultdict(list) else: self.env = prevlevel.env self.argmap = prevlevel.argmap self.singleton_mode = prevlevel.singleton_mode self.toplevel_stmt = prevlevel.toplevel_stmt self.stmt = prevlevel.stmt self.rel = prevlevel.rel self.rel_hierarchy = prevlevel.rel_hierarchy self.pending_query = prevlevel.pending_query self.clause = prevlevel.clause self.toplevel_clause = prevlevel.toplevel_clause self.expr_exposed = prevlevel.expr_exposed self.volatility_ref = prevlevel.volatility_ref self.group_by_rels = prevlevel.group_by_rels self.disable_semi_join = prevlevel.disable_semi_join.copy() self.unique_paths = prevlevel.unique_paths.copy() self.force_optional = prevlevel.force_optional.copy() self.join_target_type_filter = prevlevel.join_target_type_filter self.path_scope = prevlevel.path_scope self.scope_tree = prevlevel.scope_tree self.rel_overlays = prevlevel.rel_overlays if mode in {ContextSwitchMode.SUBREL, ContextSwitchMode.NEWREL, ContextSwitchMode.SUBSTMT}: if self.pending_query and mode == ContextSwitchMode.SUBSTMT: self.rel = self.pending_query else: self.rel = pgast.SelectStmt() if mode != ContextSwitchMode.NEWREL: self.rel_hierarchy[self.rel] = prevlevel.rel self.pending_query = None self.clause = 'result' if mode == ContextSwitchMode.SUBSTMT: self.stmt = self.rel if mode == ContextSwitchMode.NEWSCOPE: self.path_scope = prevlevel.path_scope.new_child()
def named_tuple_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: keyvals: List[pgast.BaseExpr] = [] if irtyputils.is_persistent_tuple(styperef): for el_type in styperef.subtypes: keyvals.append(pgast.StringConstant(val=el_type.element_name)) val: pgast.BaseExpr = pgast.Indirection( arg=expr, indirection=[pgast.ColumnRef(name=[el_type.element_name])]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) return _build_json( 'build_object', args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable, env=env, ) else: coldeflist = [] for el_type in styperef.subtypes: keyvals.append(pgast.StringConstant(val=el_type.element_name)) coldeflist.append( pgast.ColumnDef( name=el_type.element_name, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[el_type.element_name]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) res = _build_json( 'build_object', args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable, env=env, ) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=res, ), ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall( name=('unnest', ), args=[pgast.ArrayExpr(elements=[expr], )], coldeflist=coldeflist, ) ]) ])
def array_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: el_type = styperef.subtypes[0] if irtyputils.is_tuple(el_type): coldeflist = [] json_args: List[pgast.BaseExpr] = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = st.element_name json_args.append(pgast.StringConstant(val=st.element_name)) else: colname = str(i) val: pgast.BaseExpr = pgast.ColumnRef(name=[colname]) if irtyputils.is_collection(st): val = coll_as_json_object(val, styperef=st, env=env) json_args.append(val) if not irtyputils.is_persistent_tuple(el_type): # Column definition list is only allowed for functions # returning "record", i.e. an anonymous tuple, which # would not be the case for schema-persistent tuple types. coldeflist.append( pgast.ColumnDef( name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st)))) json_func = 'build_object' if is_named else 'build_array' return pgast.SelectStmt(target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr(args=[ pgast.FuncCall( name=_get_json_func('agg', env=env), args=[_build_json(json_func, json_args, env=env)]), pgast.StringConstant(val='[]'), ]), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction( alias=pgast.Alias( aliasname=env.aliases.get('q'), ), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest', ), args=[expr], coldeflist=coldeflist, ) ]) ]) else: return pgast.FuncCall(name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)
def new_root_rvar(ir_set: irast.Set, *, typeref: Optional[irast.TypeRef] = None, as_intersection_el: bool = False, ctx: context.CompilerContextLevel) -> pgast.PathRangeVar: if not ir_set.path_id.is_objtype_path(): raise ValueError('cannot create root rvar for non-object path') if typeref is None: typeref = ir_set.typeref if typeref.intersection: wrapper = pgast.SelectStmt() component_rvars = [] for component in typeref.intersection: component_rvar = new_root_rvar( ir_set, typeref=component, as_intersection_el=True, ctx=ctx, ) component_rvars.append(component_rvar) include_rvar(wrapper, component_rvar, ir_set.path_id, ctx=ctx) int_rvar = pgast.IntersectionRangeVar(component_rvars=component_rvars) for aspect in ('source', 'value'): pathctx.put_path_rvar(wrapper, ir_set.path_id, int_rvar, aspect=aspect, env=ctx.env) result_rvar = rvar_for_rel(wrapper, ctx=ctx) pathctx.put_rvar_path_bond(result_rvar, ir_set.path_id) return result_rvar dml_source = irutils.get_nearest_dml_stmt(ir_set) set_rvar = range_for_typeref(typeref, ir_set.path_id, dml_source=dml_source, ctx=ctx) pathctx.put_rvar_path_bond(set_rvar, ir_set.path_id) set_rvar.query.value_scope.add(ir_set.path_id) if ir_set.rptr and ir_set.rptr.is_inbound: ptrref = ir_set.rptr.ptrref ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=False) if (ptr_info.table_type == 'ObjectType' and (not as_intersection_el or typeref == ptrref.dir_target)): # Inline link prefix_path_id = ir_set.path_id.src_path() assert prefix_path_id is not None, 'expected a path' rref = pgast.ColumnRef(name=[ptr_info.column_name], nullable=not ptrref.required) pathctx.put_rvar_path_bond(set_rvar, prefix_path_id) pathctx.put_rvar_path_output(set_rvar, prefix_path_id, aspect='identity', var=rref, env=ctx.env) if astutils.is_set_op_query(set_rvar.query): assert isinstance(set_rvar.query, pgast.SelectStmt) astutils.for_each_query_in_set( set_rvar.query, lambda qry: qry.target_list.append( pgast.ResTarget( val=rref, name=ptr_info.column_name, ))) return set_rvar
def array_as_json_object(expr, *, styperef, env): el_type = styperef.subtypes[0] if irtyputils.is_tuple(el_type): coldeflist = [] json_args = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = env.aliases.get(st.element_name) json_args.append(pgast.StringConstant(val=st.element_name)) else: colname = env.aliases.get(str(i)) val = pgast.ColumnRef(name=[colname]) if irtyputils.is_collection(st): val = coll_as_json_object(val, styperef=st, env=env) json_args.append(val) coldeflist.append( pgast.ColumnDef(name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st)))) if is_named: json_func = _get_json_func('build_object', env=env) else: json_func = _get_json_func('build_array', env=env) return pgast.SelectStmt(target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr(args=[ pgast.FuncCall(name=_get_json_func('agg', env=env), args=[ pgast.FuncCall( name=json_func, args=json_args, ) ]), pgast.StringConstant(val='[]'), ]), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction( alias=pgast.Alias( aliasname=env.aliases.get('q'), ), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest', ), args=[expr], coldeflist=coldeflist, ) ]) ]) else: return pgast.FuncCall(name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)
def range_for_material_objtype( typeref: irast.TypeRef, path_id: irast.PathId, *, include_overlays: bool = True, include_descendants: bool = True, dml_source: Optional[irast.MutatingStmt] = None, ctx: context.CompilerContextLevel) -> pgast.PathRangeVar: env = ctx.env if typeref.material_type is not None: typeref = typeref.material_type table_schema_name, table_name = common.get_objtype_backend_name( typeref.id, typeref.module_id, catenate=False) if typeref.name_hint.module in {'cfg', 'sys'}: # Redirect all queries to schema tables to edgedbss table_schema_name = 'edgedbss' relation = pgast.Relation( schemaname=table_schema_name, name=table_name, path_id=path_id, ) rvar: pgast.PathRangeVar = pgast.RelRangeVar( relation=relation, typeref=typeref, include_inherited=include_descendants, alias=pgast.Alias(aliasname=env.aliases.get(typeref.name_hint.name))) overlays = get_type_rel_overlays(typeref, dml_source=dml_source, ctx=ctx) if overlays and include_overlays: set_ops = [] qry = pgast.SelectStmt() qry.from_clause.append(rvar) pathctx.put_path_value_rvar(qry, path_id, rvar, env=env) if path_id.is_objtype_path(): pathctx.put_path_source_rvar(qry, path_id, rvar, env=env) pathctx.put_path_bond(qry, path_id) set_ops.append(('union', qry)) for op, cte, cte_path_id in overlays: rvar = pgast.RelRangeVar( relation=cte, typeref=typeref, alias=pgast.Alias(aliasname=env.aliases.get(hint=cte.name))) qry = pgast.SelectStmt(from_clause=[rvar], ) pathctx.put_path_value_rvar(qry, cte_path_id, rvar, env=env) if path_id.is_objtype_path(): pathctx.put_path_source_rvar(qry, cte_path_id, rvar, env=env) pathctx.put_path_bond(qry, cte_path_id) qry.view_path_id_map[path_id] = cte_path_id qry_rvar = pgast.RangeSubselect( subquery=qry, alias=pgast.Alias(aliasname=env.aliases.get(hint=cte.name))) qry2 = pgast.SelectStmt(from_clause=[qry_rvar]) pathctx.put_path_value_rvar(qry2, path_id, qry_rvar, env=env) if path_id.is_objtype_path(): pathctx.put_path_source_rvar(qry2, path_id, qry_rvar, env=env) pathctx.put_path_bond(qry2, path_id) if op == 'replace': op = 'union' set_ops = [] set_ops.append((op, qry2)) rvar = range_from_queryset(set_ops, typeref.name_hint, ctx=ctx) return rvar
def compile_ConfigReset(op: irast.ConfigReset, *, ctx: context.CompilerContextLevel) -> pgast.Query: if op.selector is None: # Scalar reset result_row = pgast.RowExpr(args=[ pgast.StringConstant(val='RESET'), pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'), pgast.StringConstant(val=op.name), pgast.NullConstant(), ]) rvar = None else: selector = dispatch.compile(op.selector, ctx=ctx) assert isinstance(selector, pgast.SelectStmt), \ "expected ast.SelectStmt" target = selector.target_list[0] if not target.name: target = selector.target_list[0] = pgast.ResTarget( name=ctx.env.aliases.get('res'), val=target.val, ) rvar = relctx.rvar_for_rel(selector, ctx=ctx) result_row = pgast.RowExpr(args=[ pgast.StringConstant(val='REM'), pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'), pgast.StringConstant(val=op.name), astutils.get_column(rvar, target.name), ]) result = pgast.FuncCall( name=('jsonb_build_array', ), args=result_row.args, null_safe=True, ser_safe=True, ) stmt: pgast.Query if not op.system: stmt = pgast.DeleteStmt( relation=pgast.RelRangeVar(relation=pgast.Relation( name='_edgecon_state', ), ), where_clause=astutils.new_binop( lexpr=astutils.new_binop( lexpr=pgast.ColumnRef(name=['name']), rexpr=pgast.StringConstant(val=op.name), op='=', ), rexpr=astutils.new_binop( lexpr=pgast.ColumnRef(name=['type']), rexpr=pgast.StringConstant(val='C'), op='=', ), op='AND', )) else: stmt = pgast.SelectStmt(target_list=[ pgast.ResTarget(val=result, ), ], ) if rvar is not None: stmt.from_clause = [rvar] return stmt
def process_update_body( ir_stmt: irast.MutatingStmt, wrapper: pgast.Query, update_cte: pgast.CommonTableExpr, typeref: irast.TypeRef, *, ctx: context.CompilerContextLevel, ) -> None: """Generate SQL DML CTEs from an UpdateStmt IR. :param ir_stmt: IR of the statement. :param wrapper: Top-level SQL query. :param update_cte: CTE representing the SQL UPDATE to the main relation of the Object. :param typeref: The specific TypeRef of a set being updated. """ update_stmt = update_cte.query assert isinstance(update_stmt, pgast.UpdateStmt) external_updates = [] with ctx.newscope() as subctx: # It is necessary to process the expressions in # the UpdateStmt shape body in the context of the # UPDATE statement so that references to the current # values of the updated object are resolved correctly. subctx.parent_rel = update_stmt subctx.expr_exposed = False for shape_el, shape_op in ir_stmt.subject.shape: ptrref = shape_el.rptr.ptrref updvalue = shape_el.expr ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=True, link_bias=False) if ptr_info.table_type == 'ObjectType' and updvalue is not None: with subctx.newscope() as scopectx: val: pgast.BaseExpr if irtyputils.is_tuple(shape_el.typeref): # When target is a tuple type, make sure # the expression is compiled into a subquery # returning a single column that is explicitly # cast into the appropriate composite type. val = relgen.set_as_subquery( shape_el, as_value=True, explicit_cast=ptr_info.column_type, ctx=scopectx, ) else: if (isinstance(updvalue, irast.MutatingStmt) and updvalue in ctx.dml_stmts): with scopectx.substmt() as relctx: dml_cte = ctx.dml_stmts[updvalue] wrap_dml_cte(updvalue, dml_cte, ctx=relctx) pathctx.get_path_identity_output( relctx.rel, updvalue.subject.path_id, env=relctx.env, ) val = relctx.rel else: val = dispatch.compile(updvalue, ctx=scopectx) val = pgast.TypeCast(arg=val, type_name=pgast.TypeName( name=ptr_info.column_type)) if shape_op is qlast.ShapeOp.SUBTRACT: val = pgast.FuncCall( name=('nullif', ), args=[ pgast.ColumnRef(name=(ptr_info.column_name, )), val, ], ) updtarget = pgast.UpdateTarget( name=ptr_info.column_name, val=val, ) update_stmt.targets.append(updtarget) props_only = is_props_only_update(shape_el, ctx=subctx) ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=True) if ptr_info and ptr_info.table_type == 'link': external_updates.append((shape_el, shape_op, props_only)) if not update_stmt.targets: # No updates directly to the set target table, # so convert the UPDATE statement into a SELECT. from_clause: List[pgast.BaseRangeVar] = [update_stmt.relation] from_clause.extend(update_stmt.from_clause) update_cte.query = pgast.SelectStmt( ctes=update_stmt.ctes, target_list=update_stmt.returning_list, from_clause=from_clause, where_clause=update_stmt.where_clause, path_namespace=update_stmt.path_namespace, path_outputs=update_stmt.path_outputs, path_scope=update_stmt.path_scope, path_rvar_map=update_stmt.path_rvar_map.copy(), view_path_id_map=update_stmt.view_path_id_map.copy(), ptr_join_map=update_stmt.ptr_join_map.copy(), ) toplevel = ctx.toplevel_stmt toplevel.ctes.append(update_cte) # Process necessary updates to the link tables. for expr, shape_op, _ in external_updates: process_link_update( ir_stmt=ir_stmt, ir_set=expr, props_only=False, wrapper=wrapper, dml_cte=update_cte, iterator_cte=None, is_insert=False, shape_op=shape_op, source_typeref=typeref, ctx=ctx, )
def compile_ConfigSet(op: irast.ConfigSet, *, ctx: context.CompilerContextLevel) -> pgast.Query: val: pgast.BaseExpr with ctx.new() as subctx: if isinstance(op.expr, irast.EmptySet): # Special handling for empty sets, because we want a # singleton representation of the value and not an empty rel # in this context. if op.cardinality is qltypes.SchemaCardinality.ONE: val = pgast.NullConstant() else: val = pgast.TypeCast( arg=pgast.StringConstant(val='[]'), type_name=pgast.TypeName(name=('jsonb', ), ), ) else: val = dispatch.compile(op.expr, ctx=subctx) assert isinstance(val, pgast.SelectStmt), "expected ast.SelectStmt" pathctx.get_path_serialized_output(val, op.expr.path_id, env=ctx.env) if op.cardinality is qltypes.SchemaCardinality.MANY: val = output.aggregate_json_output(val, op.expr, env=ctx.env) result_row = pgast.RowExpr(args=[ pgast.StringConstant(val='SET'), pgast.StringConstant(val='SYSTEM' if op.system else 'SESSION'), pgast.StringConstant(val=op.name), val, ]) result = pgast.FuncCall( name=('jsonb_build_array', ), args=result_row.args, null_safe=True, ser_safe=True, ) stmt: pgast.Query if not op.system: stmt = pgast.InsertStmt( relation=pgast.RelRangeVar(relation=pgast.Relation( name='_edgecon_state', ), ), select_stmt=pgast.SelectStmt(values=[ pgast.ImplicitRowExpr(args=[ pgast.StringConstant(val=op.name, ), val, pgast.StringConstant(val='C', ), ]) ]), cols=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['value']), pgast.ColumnRef(name=['type']), ], on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=[ pgast.ColumnRef(name=['name']), pgast.ColumnRef(name=['type']), ], ), target_list=[ pgast.MultiAssignRef( columns=[pgast.ColumnRef(name=['value'])], source=pgast.RowExpr(args=[ val, ], ), ), ], ), ) else: stmt = pgast.SelectStmt(target_list=[ pgast.ResTarget(val=result, ), ], ) return stmt
def process_link_update( *, ir_stmt: irast.MutatingStmt, ir_set: irast.Set, props_only: bool, is_insert: bool, shape_op: qlast.ShapeOp = qlast.ShapeOp.ASSIGN, source_typeref: irast.TypeRef, wrapper: pgast.Query, dml_cte: pgast.CommonTableExpr, iterator_cte: Optional[pgast.CommonTableExpr], ctx: context.CompilerContextLevel, ) -> pgast.CommonTableExpr: """Perform updates to a link relation as part of a DML statement. :param ir_stmt: IR of the statement. :param ir_set: IR of the INSERT/UPDATE body element. :param props_only: Whether this link update only touches link properties. :param wrapper: Top-level SQL query. :param dml_cte: CTE representing the SQL INSERT or UPDATE to the main relation of the Object. :param iterator_cte: CTE representing the iterator range in the FOR clause of the EdgeQL DML statement. """ toplevel = ctx.toplevel_stmt rptr = ir_set.rptr ptrref = rptr.ptrref assert isinstance(ptrref, irast.PointerRef) target_is_scalar = irtyputils.is_scalar(ir_set.typeref) path_id = ir_set.path_id # The links in the dml class shape have been derived, # but we must use the correct specialized link class for the # base material type. if ptrref.material_ptr is not None: mptrref = ptrref.material_ptr else: mptrref = ptrref if mptrref.out_source.id != source_typeref.id: for descendant in mptrref.descendants: if descendant.out_source.id == source_typeref.id: mptrref = descendant break else: raise errors.InternalServerError( 'missing PointerRef descriptor for source typeref') assert isinstance(mptrref, irast.PointerRef) target_rvar = relctx.range_for_ptrref(mptrref, for_mutation=True, only_self=True, ctx=ctx) assert isinstance(target_rvar, pgast.RelRangeVar) assert isinstance(target_rvar.relation, pgast.Relation) target_alias = target_rvar.alias.aliasname target_tab_name = (target_rvar.relation.schemaname, target_rvar.relation.name) dml_cte_rvar = pgast.RelRangeVar( relation=dml_cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get('m'))) col_data = { 'ptr_item_id': pgast.TypeCast(arg=pgast.StringConstant(val=str(mptrref.id)), type_name=pgast.TypeName(name=('uuid', ))), 'source': pathctx.get_rvar_path_identity_var(dml_cte_rvar, ir_stmt.subject.path_id, env=ctx.env) } # Turn the IR of the expression on the right side of := # into a subquery returning records for the link table. data_cte, specified_cols = process_link_values( ir_stmt=ir_stmt, ir_expr=ir_set, target_tab=target_tab_name, col_data=col_data, dml_rvar=dml_cte_rvar, sources=[], props_only=props_only, target_is_scalar=target_is_scalar, iterator_cte=iterator_cte, ctx=ctx, ) toplevel.ctes.append(data_cte) delqry: Optional[pgast.DeleteStmt] data_select = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()]), ), ], from_clause=[ pgast.RelRangeVar(relation=data_cte), ], ) if not is_insert and shape_op is not qlast.ShapeOp.APPEND: if shape_op is qlast.ShapeOp.SUBTRACT: data_rvar = relctx.rvar_for_rel(data_select, ctx=ctx) # Drop requested link records. delqry = pgast.DeleteStmt( relation=target_rvar, where_clause=astutils.new_binop( lexpr=astutils.new_binop( lexpr=col_data['source'], op='=', rexpr=pgast.ColumnRef(name=[target_alias, 'source'], ), ), op='AND', rexpr=astutils.new_binop( lexpr=pgast.ColumnRef(name=[target_alias, 'target'], ), op='=', rexpr=pgast.ColumnRef( name=[data_rvar.alias.aliasname, 'target'], ), ), ), using_clause=[ dml_cte_rvar, data_rvar, ], returning_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[target_alias, pgast.Star()], ), ) ]) else: # Drop all previous link records for this source. delqry = pgast.DeleteStmt( relation=target_rvar, where_clause=astutils.new_binop( lexpr=col_data['source'], op='=', rexpr=pgast.ColumnRef(name=[target_alias, 'source'], ), ), using_clause=[dml_cte_rvar], returning_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[target_alias, pgast.Star()], ), ) ]) delcte = pgast.CommonTableExpr( name=ctx.env.aliases.get(hint='d'), query=delqry, ) pathctx.put_path_value_rvar(delcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this removal in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'except', delcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(delcte) else: delqry = None if shape_op is qlast.ShapeOp.SUBTRACT: return data_cte cols = [pgast.ColumnRef(name=[col]) for col in specified_cols] conflict_cols = ['source', 'target', 'ptr_item_id'] if is_insert: conflict_clause = None elif len(cols) == len(conflict_cols) and delqry is not None: # There are no link properties, so we can optimize the # link replacement operation by omitting the overlapping # link rows from deletion. filter_select = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=['source']), ), pgast.ResTarget(val=pgast.ColumnRef(name=['target']), ), ], from_clause=[pgast.RelRangeVar(relation=data_cte)], ) delqry.where_clause = astutils.extend_binop( delqry.where_clause, astutils.new_binop( lexpr=pgast.ImplicitRowExpr(args=[ pgast.ColumnRef(name=['source']), pgast.ColumnRef(name=['target']), ], ), rexpr=pgast.SubLink( type=pgast.SubLinkType.ALL, expr=filter_select, ), op='!=', )) conflict_clause = pgast.OnConflictClause( action='nothing', infer=pgast.InferClause(index_elems=[ pgast.ColumnRef(name=[col]) for col in conflict_cols ]), ) else: # Inserting rows into the link table may produce cardinality # constraint violations, since the INSERT into the link table # is executed in the snapshot where the above DELETE from # the link table is not visible. Hence, we need to use # the ON CONFLICT clause to resolve this. conflict_inference = [] conflict_exc_row = [] for col in conflict_cols: conflict_inference.append(pgast.ColumnRef(name=[col])) conflict_exc_row.append(pgast.ColumnRef(name=['excluded', col])) conflict_data = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RelRangeVar(relation=data_cte)], where_clause=astutils.new_binop( lexpr=pgast.ImplicitRowExpr(args=conflict_inference), rexpr=pgast.ImplicitRowExpr(args=conflict_exc_row), op='=')) conflict_clause = pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=conflict_inference), target_list=[ pgast.MultiAssignRef(columns=cols, source=conflict_data) ]) updcte = pgast.CommonTableExpr( name=ctx.env.aliases.get(hint='i'), query=pgast.InsertStmt( relation=target_rvar, select_stmt=data_select, cols=cols, on_conflict=conflict_clause, returning_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[pgast.Star()])) ])) pathctx.put_path_value_rvar(updcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this insertion in the relation overlay # context to ensure that references to the link in the result # of this DML statement yield the expected results. dml_stack = get_dml_stmt_stack(ir_stmt, ctx=ctx) relctx.add_ptr_rel_overlay(ptrref, 'union', updcte, dml_stmts=dml_stack, ctx=ctx) toplevel.ctes.append(updcte) return data_cte
NATIVE = enum.auto() #: Result data output as a single JSON string. JSON = enum.auto() #: Result data output as a single PostgreSQL JSONB type value. JSONB = enum.auto() #: Result data output as a JSON string for each element in returned set. JSON_ELEMENTS = enum.auto() #: Script mode: query result not returned, cardinality of result set #: is returned instead. SCRIPT = enum.auto() #: Like NATIVE, but objects without an explicit shape are serialized #: as UUIDs. NATIVE_INTERNAL = enum.auto() NO_STMT = pgast.SelectStmt() class CompilerContextLevel(compiler.ContextLevel): #: static compilation environment env: Environment #: mapping of named args to position argmap: Dict[str, pgast.Param] #: next argument number for named arguments next_argument: Iterator[int] #: whether compiling in singleton expression mode singleton_mode: bool
def range_for_material_objtype( typeref: irast.TypeRef, path_id: irast.PathId, *, include_overlays: bool=True, env: context.Environment) -> pgast.BaseRangeVar: from . import pathctx # XXX: fix cycle if typeref.material_type is not None: typeref = typeref.material_type table_schema_name, table_name = common.get_objtype_backend_name( typeref.id, typeref.module_id, catenate=False) if typeref.name_hint.module in {'schema', 'cfg', 'sys'}: # Redirect all queries to schema tables to edgedbss table_schema_name = 'edgedbss' relation = pgast.Relation( schemaname=table_schema_name, name=table_name, path_id=path_id, ) rvar = pgast.RangeVar( relation=relation, alias=pgast.Alias( aliasname=env.aliases.get(typeref.name_hint.name) ) ) overlays = env.rel_overlays.get(str(typeref.id)) if overlays and include_overlays: set_ops = [] qry = pgast.SelectStmt() qry.from_clause.append(rvar) pathctx.put_path_value_rvar(qry, path_id, rvar, env=env) pathctx.put_path_bond(qry, path_id) set_ops.append(('union', qry)) for op, cte in overlays: rvar = pgast.RangeVar( relation=cte, alias=pgast.Alias( aliasname=env.aliases.get(hint=cte.name) ) ) qry = pgast.SelectStmt( from_clause=[rvar], ) pathctx.put_path_value_rvar(qry, path_id, rvar, env=env) pathctx.put_path_bond(qry, path_id) if op == 'replace': op = 'union' set_ops = [] set_ops.append((op, qry)) rvar = range_from_queryset(set_ops, typeref.name_hint, env=env) return rvar
def __init__( self, prevlevel: Optional[CompilerContextLevel], mode: ContextSwitchMode, *, env: Optional[Environment] = None, scope_tree: Optional[irast.ScopeTreeNode] = None, ) -> None: if prevlevel is None: assert env is not None assert scope_tree is not None self.env = env self.argmap = collections.OrderedDict() self.next_argument = itertools.count(1) self.singleton_mode = False self.toplevel_stmt = NO_STMT self.stmt = NO_STMT self.rel = NO_STMT self.rel_hierarchy = {} self.type_ctes = {} self.pending_type_ctes = set() self.dml_stmts = {} self.parent_rel = None self.pending_query = None self.materializing = frozenset() self.expr_exposed = None self.volatility_ref = () self.current_insert_path_id = None self.group_by_rels = {} self.disable_semi_join = set() self.force_optional = set() self.intersection_narrowing = {} self.path_scope = collections.ChainMap() self.scope_tree = scope_tree self.dml_stmt_stack = [] self.type_rel_overlays = collections.defaultdict( lambda: collections.defaultdict(list)) self.ptr_rel_overlays = collections.defaultdict( lambda: collections.defaultdict(list)) self.enclosing_cte_iterator = None self.shapes_needed_by_dml = set() else: self.env = prevlevel.env self.argmap = prevlevel.argmap self.next_argument = prevlevel.next_argument self.singleton_mode = prevlevel.singleton_mode self.toplevel_stmt = prevlevel.toplevel_stmt self.stmt = prevlevel.stmt self.rel = prevlevel.rel self.rel_hierarchy = prevlevel.rel_hierarchy self.type_ctes = prevlevel.type_ctes self.pending_type_ctes = prevlevel.pending_type_ctes self.dml_stmts = prevlevel.dml_stmts self.parent_rel = prevlevel.parent_rel self.pending_query = prevlevel.pending_query self.materializing = prevlevel.materializing self.expr_exposed = prevlevel.expr_exposed self.volatility_ref = prevlevel.volatility_ref self.current_insert_path_id = prevlevel.current_insert_path_id self.group_by_rels = prevlevel.group_by_rels self.disable_semi_join = prevlevel.disable_semi_join.copy() self.force_optional = prevlevel.force_optional.copy() self.intersection_narrowing = prevlevel.intersection_narrowing self.path_scope = prevlevel.path_scope self.scope_tree = prevlevel.scope_tree self.dml_stmt_stack = prevlevel.dml_stmt_stack self.type_rel_overlays = prevlevel.type_rel_overlays self.ptr_rel_overlays = prevlevel.ptr_rel_overlays self.enclosing_cte_iterator = prevlevel.enclosing_cte_iterator self.shapes_needed_by_dml = prevlevel.shapes_needed_by_dml if mode is ContextSwitchMode.SUBSTMT: if self.pending_query is not None: self.rel = self.pending_query else: self.rel = pgast.SelectStmt() if prevlevel.parent_rel is not None: parent_rel = prevlevel.parent_rel else: parent_rel = prevlevel.rel self.rel_hierarchy[self.rel] = parent_rel self.stmt = self.rel self.pending_query = None self.parent_rel = None elif mode is ContextSwitchMode.SUBREL: self.rel = pgast.SelectStmt() if prevlevel.parent_rel is not None: parent_rel = prevlevel.parent_rel else: parent_rel = prevlevel.rel self.rel_hierarchy[self.rel] = parent_rel self.pending_query = None self.parent_rel = None elif mode is ContextSwitchMode.NEWREL: self.rel = pgast.SelectStmt() self.pending_query = None self.parent_rel = None self.path_scope = collections.ChainMap() self.rel_hierarchy = {} self.scope_tree = prevlevel.scope_tree.root self.disable_semi_join = set() self.force_optional = set() self.intersection_narrowing = {} self.pending_type_ctes = set(prevlevel.pending_type_ctes) elif mode == ContextSwitchMode.NEWSCOPE: self.path_scope = prevlevel.path_scope.new_child()
def process_update_body(ir_stmt: irast.MutatingStmt, wrapper: pgast.Query, update_cte: pgast.CommonTableExpr, range_cte: pgast.CommonTableExpr, *, ctx: context.CompilerContextLevel): """Generate SQL DML CTEs from an UpdateStmt IR. :param ir_stmt: IR of the statement. :param wrapper: Top-level SQL query. :param update_cte: CTE representing the SQL UPDATE to the main relation of the Object. :param range_cte: CTE representing the range affected by the statement. """ update_stmt = update_cte.query assert isinstance(update_stmt, pgast.UpdateStmt) external_updates = [] toplevel = ctx.toplevel_stmt toplevel.ctes.append(range_cte) toplevel.ctes.append(update_cte) with ctx.newscope() as subctx: # It is necessary to process the expressions in # the UpdateStmt shape body in the context of the # UPDATE statement so that references to the current # values of the updated object are resolved correctly. subctx.path_scope[ir_stmt.subject.path_id] = update_stmt subctx.rel = update_stmt subctx.expr_exposed = False for shape_el in ir_stmt.subject.shape: ptrref = shape_el.rptr.ptrref updvalue = shape_el.expr ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=True, link_bias=False) if ptr_info.table_type == 'ObjectType' and updvalue is not None: with subctx.newscope() as scopectx: # First, process all internal link updates updtarget = pgast.UpdateTarget( name=ptr_info.column_name, val=pgast.TypeCast(arg=dispatch.compile(updvalue, ctx=scopectx), type_name=pgast.TypeName( name=ptr_info.column_type))) update_stmt.targets.append(updtarget) props_only = is_props_only_update(shape_el, ctx=subctx) ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=True) if ptr_info and ptr_info.table_type == 'link': external_updates.append((shape_el, props_only)) if not update_stmt.targets: # No updates directly to the set target table, # so convert the UPDATE statement into a SELECT. from_clause: typing.List[pgast.BaseRangeVar] = [update_stmt.relation] from_clause.extend(update_stmt.from_clause) update_cte.query = pgast.SelectStmt( ctes=update_stmt.ctes, target_list=update_stmt.returning_list, from_clause=from_clause, where_clause=update_stmt.where_clause, path_namespace=update_stmt.path_namespace, path_outputs=update_stmt.path_outputs, path_scope=update_stmt.path_scope, path_rvar_map=update_stmt.path_rvar_map.copy(), view_path_id_map=update_stmt.view_path_id_map.copy(), ptr_join_map=update_stmt.ptr_join_map.copy(), ) # Process necessary updates to the link tables. for expr, props_only in external_updates: process_link_update(ir_stmt=ir_stmt, ir_set=expr, props_only=False, wrapper=wrapper, dml_cte=update_cte, iterator_cte=None, is_insert=False, ctx=ctx)
def range_for_ptrref( ptrref: irast.BasePointerRef, *, dml_source: Optional[irast.MutatingStmt] = None, for_mutation: bool = False, only_self: bool = False, ctx: context.CompilerContextLevel, ) -> pgast.PathRangeVar: """"Return a Range subclass corresponding to a given ptr step. The return value may potentially be a UNION of all tables corresponding to a set of specialized links computed from the given `ptrref` taking source inheritance into account. """ output_cols = ('source', 'target') set_ops = [] if ptrref.union_components: refs = ptrref.union_components if only_self and len(refs) > 1: raise errors.InternalServerError('unexpected union link') else: refs = {ptrref} assert isinstance(ptrref, irast.PointerRef), \ "expected regular PointerRef" overlays = get_ptr_rel_overlays(ptrref, dml_source=dml_source, ctx=ctx) for src_ptrref in refs: assert isinstance(src_ptrref, irast.PointerRef), \ "expected regular PointerRef" # Most references to inline links are dispatched to a separate # code path (_new_inline_pointer_rvar) by new_pointer_rvar, # but when we have union pointers, some might be inline. We # always use the link table if it exists (because this range # needs to contain any link properties, for one reason.) ptr_info = pg_types.get_ptrref_storage_info( src_ptrref, resolve_type=False, link_bias=True, ) if not ptr_info: assert ptrref.union_components ptr_info = pg_types.get_ptrref_storage_info( src_ptrref, resolve_type=False, link_bias=False, ) cols = [ 'source' if ptr_info.table_type == 'link' else 'id', ptr_info.column_name, ] table = table_from_ptrref( src_ptrref, ptr_info, include_descendants=not ptrref.union_is_concrete, for_mutation=for_mutation, ctx=ctx, ) qry = pgast.SelectStmt() qry.from_clause.append(table) # Make sure all property references are pulled up properly for colname, output_colname in zip(cols, output_cols): selexpr = pgast.ColumnRef(name=[table.alias.aliasname, colname]) qry.target_list.append( pgast.ResTarget(val=selexpr, name=output_colname)) set_ops.append(('union', qry)) overlays = get_ptr_rel_overlays(src_ptrref, dml_source=dml_source, ctx=ctx) if overlays and not for_mutation: for op, cte in overlays: rvar = pgast.RelRangeVar( relation=cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get(cte.name))) qry = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[col])) for col in cols ], from_clause=[rvar], ) set_ops.append((op, qry)) return range_from_queryset(set_ops, ptrref.shortname, ctx=ctx)
typeref=typeref, include_inherited=include_descendants, alias=pgast.Alias( aliasname=env.aliases.get(typeref.name_hint.name))) overlays = get_type_rel_overlays(typeref, dml_source=dml_source, ctx=ctx) external_rvar = ctx.env.external_rvars.get((path_id, 'source')) if external_rvar is not None: if overlays: raise AssertionError('cannot mix external and internal overlays') return external_rvar if overlays and include_overlays: set_ops = [] qry = pgast.SelectStmt() qry.from_clause.append(rvar) pathctx.put_path_value_rvar(qry, path_id, rvar, env=env) if path_id.is_objtype_path(): pathctx.put_path_source_rvar(qry, path_id, rvar, env=env) pathctx.put_path_bond(qry, path_id) set_ops.append(('union', qry)) for op, cte, cte_path_id in overlays: rvar = pgast.RelRangeVar( relation=cte, typeref=typeref, alias=pgast.Alias(aliasname=env.aliases.get(hint=cte.name))) qry = pgast.SelectStmt(from_clause=[rvar], )
def range_for_typeref( typeref: irast.TypeRef, path_id: irast.PathId, *, for_mutation: bool = False, include_descendants: bool = True, dml_source: Optional[irast.MutatingStmt] = None, common_parent: bool = False, ctx: context.CompilerContextLevel, ) -> pgast.PathRangeVar: if typeref.common_parent is not None and common_parent: rvar = range_for_material_objtype( typeref.common_parent, path_id, include_descendants=include_descendants, for_mutation=for_mutation, dml_source=dml_source, ctx=ctx, ) elif typeref.union: # Union object types are represented as a UNION of selects # from their children, which is, for most purposes, equivalent # to SELECTing from a parent table. set_ops = [] for child in typeref.union: c_rvar = range_for_typeref( child, path_id=path_id, include_descendants=not typeref.union_is_concrete, for_mutation=for_mutation, dml_source=dml_source, ctx=ctx, ) qry = pgast.SelectStmt(from_clause=[c_rvar], ) pathctx.put_path_value_rvar(qry, path_id, c_rvar, env=ctx.env) if path_id.is_objtype_path(): pathctx.put_path_source_rvar(qry, path_id, c_rvar, env=ctx.env) pathctx.put_path_bond(qry, path_id) set_ops.append(('union', qry)) rvar = range_from_queryset(set_ops, typeref.name_hint, ctx=ctx) else: rvar = range_for_material_objtype( typeref, path_id, include_descendants=include_descendants, for_mutation=for_mutation, dml_source=dml_source, ctx=ctx, ) rvar.query.path_id = path_id return rvar
def init_toplevel_query(ir_set: irast.Set, *, ctx: context.CompilerContextLevel) -> None: ctx.toplevel_stmt = ctx.stmt = ctx.rel = pgast.SelectStmt() update_scope(ir_set, ctx.rel, ctx=ctx) ctx.pending_query = ctx.rel
def unnamed_tuple_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: vals: List[pgast.BaseExpr] = [] if styperef.in_schema: for el_idx, el_type in enumerate(styperef.subtypes): val: pgast.BaseExpr = pgast.Indirection( arg=expr, indirection=[ pgast.ColumnRef(name=[str(el_idx)], ), ], ) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) return pgast.FuncCall(name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable) else: coldeflist = [] for el_idx, el_type in enumerate(styperef.subtypes): coldeflist.append( pgast.ColumnDef( name=str(el_idx), typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[str(el_idx)]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) res = pgast.FuncCall(name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=res, ), ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall( name=('unnest', ), args=[pgast.ArrayExpr(elements=[expr], )], coldeflist=coldeflist, ) ]) ])