def strip_output_var( var: pgast.OutputVar, *, optional: typing.Optional[bool]=None, nullable: typing.Optional[bool]=None) -> pgast.OutputVar: if isinstance(var, pgast.TupleVar): elements = [] for el in var.elements: if isinstance(el.name, str): val = pgast.ColumnRef(name=[el.name]) else: val = strip_output_var(el.name) elements.append( pgast.TupleElement( path_id=el.path_id, name=el.name, val=val)) result = pgast.TupleVar(elements, named=var.named) else: result = pgast.ColumnRef( name=[var.name[-1]], optional=optional if optional is not None else var.optional, nullable=nullable if nullable is not None else var.nullable, ) return result
def _compile_set_in_singleton_mode( node: irast.Set, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: if isinstance(node, irast.EmptySet): return pgast.NullConstant() elif node.expr is not None: return dispatch.compile(node.expr, ctx=ctx) else: if node.rptr: ptrref = node.rptr.ptrref source = node.rptr.source if ptrref.parent_ptr is None and source.rptr is not None: raise RuntimeError('unexpectedly long path in simple expr') ptr_stor_info = pg_types.get_ptrref_storage_info( ptrref, resolve_type=False) colref = pgast.ColumnRef(name=[ptr_stor_info.column_name]) elif irtyputils.is_scalar(node.typeref): colref = pgast.ColumnRef( name=[common.edgedb_name_to_pg_name(str(node.typeref.id))]) else: colref = pgast.ColumnRef( name=[common.edgedb_name_to_pg_name(str(node.typeref.id))]) return colref
def _new_mapped_pointer_rvar( ir_ptr: irast.Pointer, *, ctx: context.CompilerContextLevel) -> pgast.PathRangeVar: ptrref = ir_ptr.ptrref dml_source = irutils.get_nearest_dml_stmt(ir_ptr.source) ptr_rvar = range_for_pointer(ir_ptr, dml_source=dml_source, ctx=ctx) src_col = 'source' source_ref = pgast.ColumnRef(name=[src_col], nullable=False) if (irtyputils.is_object(ptrref.out_target) and not irtyputils.is_computable_ptrref(ptrref)): tgt_ptr_info = pg_types.get_ptrref_storage_info(ptrref, link_bias=True, resolve_type=False) tgt_col = tgt_ptr_info.column_name else: tgt_col = 'target' target_ref = pgast.ColumnRef(name=[tgt_col], nullable=not ptrref.required) # Set up references according to the link direction. if ir_ptr.direction == s_pointers.PointerDirection.Inbound: near_ref = target_ref far_ref = source_ref else: near_ref = source_ref far_ref = target_ref src_pid = ir_ptr.source.path_id tgt_pid = ir_ptr.target.path_id ptr_pid = tgt_pid.ptr_path() ptr_rvar.query.path_id = ptr_pid pathctx.put_rvar_path_bond(ptr_rvar, src_pid) pathctx.put_rvar_path_output(ptr_rvar, src_pid, aspect='identity', var=near_ref, env=ctx.env) pathctx.put_rvar_path_output(ptr_rvar, src_pid, aspect='value', var=near_ref, env=ctx.env) pathctx.put_rvar_path_output(ptr_rvar, tgt_pid, aspect='value', var=far_ref, env=ctx.env) if tgt_pid.is_objtype_path(): pathctx.put_rvar_path_bond(ptr_rvar, tgt_pid) pathctx.put_rvar_path_output(ptr_rvar, tgt_pid, aspect='identity', var=far_ref, env=ctx.env) return ptr_rvar
def _get_rel_object_id_output(rel: pgast.BaseRelation, path_id: irast.PathId, *, aspect: str, ptr_info: typing.Optional[ pg_types.PointerStorageInfo] = None, env: context.Environment) -> pgast.OutputVar: var = rel.path_outputs.get((path_id, aspect)) if var is not None: return var if isinstance(rel, pgast.NullRelation): name = env.aliases.get('id') val = pgast.TypeCast(arg=pgast.NullConstant(), type_name=pgast.TypeName(name=('uuid', ), )) rel.target_list.append(pgast.ResTarget(name=name, val=val)) result = pgast.ColumnRef(name=[name], nullable=True) else: result = pgast.ColumnRef(name=['id'], nullable=False) _put_path_output_var(rel, path_id, aspect, result, env=env) return result
def unnamed_tuple_as_json_object(expr, *, styperef, env): vals = [] if styperef.in_schema: for el_idx, el_type in enumerate(styperef.subtypes): val = pgast.Indirection( arg=expr, indirection=[ pgast.ColumnRef(name=[str(el_idx)], ), ], ) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) return pgast.FuncCall(name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable) else: coldeflist = [] for el_idx, el_type in enumerate(styperef.subtypes): coldeflist.append( pgast.ColumnDef( name=str(el_idx), typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[str(el_idx)]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) res = pgast.FuncCall(name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=res, ), ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall( name=('unnest', ), args=[pgast.ArrayExpr(elements=[expr], )], coldeflist=coldeflist, ) ]) ])
def tuple_getattr(tuple_val, tuple_typeref, attr): ttypes = [] pgtypes = [] for i, st in enumerate(tuple_typeref.subtypes): pgtype = pg_types.pg_type_from_ir_typeref(st) pgtypes.append(pgtype) if st.element_name: ttypes.append(st.element_name) else: ttypes.append(str(i)) index = ttypes.index(attr) if tuple_typeref.in_schema: set_expr = pgast.Indirection( arg=tuple_val, indirection=[ pgast.ColumnRef( name=[attr], ), ], ) else: set_expr = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.ColumnRef( name=[str(index)], ), ), ], from_clause=[ pgast.RangeFunction( functions=[ pgast.FuncCall( name=('unnest',), args=[ pgast.ArrayExpr( elements=[tuple_val], ) ], coldeflist=[ pgast.ColumnDef( name=str(i), typename=pgast.TypeName( name=t ) ) for i, t in enumerate(pgtypes) ] ) ] ) ] ) return set_expr
def range_for_ptrref(ptrref: irast.BasePointerRef, *, include_overlays: bool = True, only_self: bool = False, env: context.Environment) -> pgast.BaseRangeVar: """"Return a Range subclass corresponding to a given ptr step. The return value may potentially be a UNION of all tables corresponding to a set of specialized links computed from the given `ptrref` taking source inheritance into account. """ tgt_col = pgtypes.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=True).column_name cols = ['source', tgt_col] set_ops = [] if only_self: ptrrefs = {ptrref} else: ptrrefs = {ptrref} | ptrref.descendants for src_ptrref in ptrrefs: table = table_from_ptrref(src_ptrref, env=env) qry = pgast.SelectStmt() qry.from_clause.append(table) qry.rptr_rvar = table # Make sure all property references are pulled up properly for colname in cols: selexpr = pgast.ColumnRef(name=[table.alias.aliasname, colname]) qry.target_list.append(pgast.ResTarget(val=selexpr, name=colname)) set_ops.append(('union', qry)) overlays = env.rel_overlays.get(src_ptrref.shortname) if overlays and include_overlays: for op, cte in overlays: rvar = pgast.RangeVar( relation=cte, alias=pgast.Alias(aliasname=env.aliases.get(cte.name))) qry = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[col])) for col in cols ], from_clause=[rvar], ) set_ops.append((op, qry)) rvar = range_from_queryset(set_ops, ptrref.shortname, env=env) return rvar
def wrap_script_stmt( stmt: pgast.SelectStmt, *, suppress_all_output: bool = False, env: context.Environment, ) -> pgast.SelectStmt: subrvar = pgast.RangeSubselect( subquery=stmt, alias=pgast.Alias(aliasname=env.aliases.get('aggw'))) stmt_res = stmt.target_list[0] if stmt_res.name is None: stmt_res = stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=stmt_res.val, ) assert stmt_res.name is not None count_val = pgast.FuncCall(name=('count', ), args=[pgast.ColumnRef(name=[stmt_res.name])]) result = pgast.SelectStmt(target_list=[ pgast.ResTarget( val=count_val, name=stmt_res.name, ), ], from_clause=[ subrvar, ]) if suppress_all_output: subrvar = pgast.RangeSubselect( subquery=result, alias=pgast.Alias(aliasname=env.aliases.get('q'))) result = pgast.SelectStmt( target_list=[], from_clause=[ subrvar, ], where_clause=pgast.NullTest(arg=pgast.ColumnRef( name=[subrvar.alias.aliasname, stmt_res.name], ), ), ) result.ctes = stmt.ctes result.argnames = stmt.argnames stmt.ctes = [] return result
def get_volatility_ref( path_id: irast.PathId, stmt: pgast.SelectStmt, *, ctx: context.CompilerContextLevel) -> Optional[pgast.BaseExpr]: """Produce an appropriate volatility_ref from a path_id.""" ref: Optional[pgast.BaseExpr] = relctx.maybe_get_path_var( stmt, path_id, aspect='identity', ctx=ctx) if not ref: rvar = relctx.maybe_get_path_rvar(stmt, path_id, aspect='value', ctx=ctx) if rvar and isinstance(rvar.query, pgast.ReturningQuery): # If we are selecting from a nontrivial subquery, manually # add a volatility ref based on row_number. We do it # manually because the row number isn't /really/ the # identity of the set. name = ctx.env.aliases.get('key') rvar.query.target_list.append( pgast.ResTarget(name=name, val=pgast.FuncCall(name=('row_number', ), args=[], over=pgast.WindowDef()))) ref = pgast.ColumnRef(name=[rvar.alias.aliasname, name]) else: ref = relctx.maybe_get_path_var(stmt, path_id, aspect='value', ctx=ctx) return ref
def get_packed_path_var( rvar: pgast.PathRangeVar, path_id: irast.PathId, aspect: str, *, env: context.Environment) -> Tuple[pgast.OutputVar, bool]: res = maybe_get_rvar_path_packed_output( rvar, path_id, aspect, env=env) if res: return res query = rvar.query assert isinstance(query, pgast.Query) rel_rvar = get_path_rvar( query, path_id, flavor='packed', aspect=aspect, env=env) # XXX: some duplication of path_output ref, multi = get_packed_path_var(rel_rvar, path_id, aspect, env=env) alias = get_path_output_alias(path_id, aspect, env=env) restarget = pgast.ResTarget( name=alias, val=ref, ser_safe=getattr(ref, 'ser_safe', False)) query.target_list.append(restarget) nullable = is_nullable(ref, env=env) optional = None if isinstance(ref, pgast.ColumnRef): optional = ref.optional result = pgast.ColumnRef( name=[alias], nullable=nullable, optional=optional) _put_path_output_var(query, path_id, aspect, result, env=env) return result, multi
def get_path_output_or_null( rel: pgast.Query, path_id: irast.PathId, *, aspect: str, env: context.Environment) -> \ typing.Tuple[pgast.OutputVar, bool]: path_id = map_path_id(path_id, rel.view_path_id_map) ref = maybe_get_path_output(rel, path_id, aspect=aspect, env=env) if ref is not None: return ref, False alt_aspect = get_less_specific_aspect(path_id, aspect) if alt_aspect is not None: ref = maybe_get_path_output(rel, path_id, aspect=alt_aspect, env=env) if ref is not None: _put_path_output_var(rel, path_id, aspect, ref, env=env) return ref, False alias = env.aliases.get('null') restarget = pgast.ResTarget(name=alias, val=pgast.NullConstant()) rel.target_list.append(restarget) ref = pgast.ColumnRef(name=[alias], nullable=True) _put_path_output_var(rel, path_id, aspect, ref, env=env) return ref, True
def get_path_serialized_output(rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> pgast.OutputVar: # Serialized output is a special case, we don't # want this behaviour to be recursive, so it # must be kept outside of get_path_output() generic. aspect = 'serialized' result = rel.path_outputs.get((path_id, aspect)) if result is not None: return result ref = get_path_serialized_or_value_var(rel, path_id, env=env) refexpr = output.serialize_expr(ref, path_id=path_id, env=env) alias = get_path_output_alias(path_id, aspect, env=env) restarget = pgast.ResTarget(name=alias, val=refexpr, ser_safe=True) rel.target_list.append(restarget) result = pgast.ColumnRef(name=[alias], nullable=refexpr.nullable, ser_safe=True) _put_path_output_var(rel, path_id, aspect, result, env=env) return result
def aggregate_json_output(stmt: pgast.Query, ir_set: irast.Set, *, env: context.Environment) -> pgast.Query: subrvar = pgast.RangeSubselect( subquery=stmt, alias=pgast.Alias(aliasname=env.aliases.get('aggw'))) stmt_res = stmt.target_list[0] if stmt_res.name is None: stmt_res = stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=stmt_res.val, ) new_val = pgast.FuncCall(name=_get_json_func('agg', env=env), args=[pgast.ColumnRef(name=[stmt_res.name])]) new_val = pgast.CoalesceExpr( args=[new_val, pgast.StringConstant(val='[]')]) result = pgast.SelectStmt(target_list=[pgast.ResTarget(val=new_val)], from_clause=[subrvar]) result.ctes = stmt.ctes result.argnames = stmt.argnames stmt.ctes = [] return result
def new_root_rvar(ir_set: irast.Set, *, typeref: typing.Optional[irast.TypeRef] = None, ctx: context.CompilerContextLevel) -> pgast.BaseRangeVar: if not ir_set.path_id.is_objtype_path(): raise ValueError('cannot create root rvar for non-object path') if typeref is None: typeref = ir_set.typeref set_rvar = dbobj.range_for_typeref(typeref, ir_set.path_id, env=ctx.env) pathctx.put_rvar_path_bond(set_rvar, ir_set.path_id) set_rvar.value_scope.add(ir_set.path_id) if ir_set.rptr and ir_set.rptr.is_inbound: ptrref = ir_set.rptr.ptrref ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=False) if ptr_info.table_type == 'ObjectType': # Inline link rref = pgast.ColumnRef(name=[ptr_info.column_name], nullable=not ptrref.required) pathctx.put_rvar_path_bond(set_rvar, ir_set.path_id.src_path()) pathctx.put_rvar_path_output(set_rvar, ir_set.path_id.src_path(), aspect='identity', var=rref, env=ctx.env) return set_rvar
def wrap_script_stmt( stmt: pgast.SelectStmt, ir_set: irast.Set, *, env: context.Environment, ) -> pgast.SelectStmt: subrvar = pgast.RangeSubselect( subquery=stmt, alias=pgast.Alias(aliasname=env.aliases.get('aggw'))) stmt_res = stmt.target_list[0] if stmt_res.name is None: stmt_res = stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=stmt_res.val, ) count_val = pgast.FuncCall(name=('count', ), args=[pgast.ColumnRef(name=[stmt_res.name])]), result = pgast.SelectStmt(target_list=[pgast.ResTarget(val=count_val, )], from_clause=[subrvar]) result.ctes = stmt.ctes result.argnames = stmt.argnames stmt.ctes = [] return result
def top_output_as_config_op( ir_set: irast.Set, stmt: pgast.SelectStmt, *, env: context.Environment) -> pgast.Query: assert isinstance(ir_set.expr, irast.ConfigCommand) if ir_set.expr.scope is qltypes.ConfigScope.SYSTEM: alias = env.aliases.get('cfg') subrvar = pgast.RangeSubselect( subquery=stmt, alias=pgast.Alias( aliasname=alias, ) ) stmt_res = stmt.target_list[0] if stmt_res.name is None: stmt_res = stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=stmt_res.val, ) result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='ADD'), pgast.StringConstant(val=str(ir_set.expr.scope)), pgast.StringConstant(val=ir_set.expr.name), pgast.ColumnRef(name=[stmt_res.name]), ] ) array = pgast.FuncCall( name=('jsonb_build_array',), args=result_row.args, null_safe=True, ser_safe=True, ) result = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=array, ), ], from_clause=[ subrvar, ], ) result.ctes = stmt.ctes result.argnames = stmt.argnames stmt.ctes = [] return result else: raise errors.InternalServerError( f'CONFIGURE {ir_set.expr.scope} INSERT is not supported')
def compile_materialized_exprs( query: pgast.SelectStmt, stmt: irast.Stmt, *, ctx: context.CompilerContextLevel) -> None: if not stmt.materialized_sets: return if stmt in ctx.materializing: return with context.output_format(ctx, context.OutputFormat.NATIVE), ( ctx.new()) as matctx: matctx.materializing |= {stmt} matctx.expr_exposed = True for mat_set in stmt.materialized_sets.values(): if len(mat_set.uses) <= 1: continue assert mat_set.materialized assert mat_set.finalized if relctx.find_rvar( query, flavor='packed', path_id=mat_set.materialized.path_id, ctx=matctx): continue mat_ids = set(mat_set.uses) # We pack optional things into arrays also, since it works. # TODO: use NULL? card = mat_set.cardinality is_singleton = card.is_single() and not card.can_be_zero() matctx.path_scope = matctx.path_scope.new_child() for mat_id in mat_ids: matctx.path_scope[mat_id] = None mat_qry = relgen.set_as_subquery( mat_set.materialized, as_value=True, ctx=matctx ) if not is_singleton: mat_qry = relctx.set_to_array( path_id=mat_set.materialized.path_id, query=mat_qry, materializing=True, ctx=matctx) if not mat_qry.target_list[0].name: mat_qry.target_list[0].name = ctx.env.aliases.get('v') ref = pgast.ColumnRef(name=[mat_qry.target_list[0].name]) for mat_id in mat_ids: pathctx.put_path_packed_output( mat_qry, mat_id, ref, multi=not is_singleton) mat_rvar = relctx.rvar_for_rel(mat_qry, lateral=True, ctx=matctx) for mat_id in mat_ids: relctx.include_rvar( query, mat_rvar, path_id=mat_id, flavor='packed', pull_namespace=False, ctx=matctx, )
def get_column(rvar: pgast.BaseRangeVar, colspec: Union[str, pgast.ColumnRef], *, is_packed_multi: bool = True, nullable: Optional[bool] = None) -> pgast.ColumnRef: if isinstance(colspec, pgast.ColumnRef): colname = colspec.name[-1] else: colname = colspec assert isinstance(colname, str) ser_safe = False if nullable is None: if isinstance(rvar, pgast.RelRangeVar): # Range over a relation, we cannot infer nullability in # this context, so assume it's true, unless we are looking # at a colspec that says it is false if isinstance(colspec, pgast.ColumnRef): nullable = colspec.nullable else: nullable = True elif isinstance(rvar, pgast.RangeSubselect): col_idx = find_column_in_subselect_rvar(rvar, colname) if is_set_op_query(rvar.subquery): nullables = [] ser_safes = [] def _cb(q: pgast.Query) -> None: nullables.append(q.target_list[col_idx].nullable) ser_safes.append(q.target_list[col_idx].ser_safe) for_each_query_in_set(rvar.subquery, _cb) nullable = any(nullables) ser_safe = all(ser_safes) else: rt = rvar.subquery.target_list[col_idx] nullable = rt.nullable ser_safe = rt.ser_safe elif isinstance(rvar, pgast.RangeFunction): # Range over a function. # TODO: look into the possibility of inspecting coldeflist. nullable = True elif isinstance(rvar, pgast.JoinExpr): raise RuntimeError( f'cannot find {colname!r} in unexpected {rvar!r} range var') name = [rvar.alias.aliasname, colname] return pgast.ColumnRef(name=name, nullable=nullable, ser_safe=ser_safe, is_packed_multi=is_packed_multi)
def scan_check_ctes( stmt: pgast.Query, check_ctes: List[pgast.CommonTableExpr], *, ctx: context.CompilerContextLevel, ) -> None: if not check_ctes: return # Scan all of the check CTEs to enforce constraints that are # checked as explicit queries and not Postgres constraints or # triggers. # To make sure that Postgres can't optimize the checks away, we # reference them in the where clause of an UPDATE to a dummy # table. # Add a big random number, so that different queries should try to # access different "rows" of the table, in case that matters. base_int = random.randint(0, (1 << 60) - 1) val: pgast.BaseExpr = pgast.NumericConstant(val=str(base_int)) for check_cte in check_ctes: # We want the CTE to be MATERIALIZED, because otherwise # Postgres might not fully evaluate all its columns when # scanning it. check_cte.materialized = True check = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.FuncCall(name=('count', ), args=[pgast.Star()]), ) ], from_clause=[ relctx.rvar_for_rel(check_cte, ctx=ctx), ], ) val = pgast.Expr(kind=pgast.ExprKind.OP, name='+', lexpr=val, rexpr=check) update_query = pgast.UpdateStmt( targets=[ pgast.UpdateTarget(name='flag', val=pgast.BooleanConstant(val='true')) ], relation=pgast.RelRangeVar( relation=pgast.Relation(schemaname='edgedb', name='_dml_dummy')), where_clause=pgast.Expr( kind=pgast.ExprKind.OP, name='=', lexpr=pgast.ColumnRef(name=['id']), rexpr=val, )) stmt.append_cte( pgast.CommonTableExpr(query=update_query, name=ctx.env.aliases.get(hint='check_scan')))
def new_root_rvar(ir_set: irast.Set, *, typeref: Optional[irast.TypeRef] = None, ctx: context.CompilerContextLevel) -> pgast.PathRangeVar: if not ir_set.path_id.is_objtype_path(): raise ValueError('cannot create root rvar for non-object path') if typeref is None: typeref = ir_set.typeref if typeref.intersection: wrapper = pgast.SelectStmt() for component in typeref.intersection: component_rvar = new_root_rvar(ir_set, typeref=component, ctx=ctx) pathctx.put_rvar_path_bond(component_rvar, ir_set.path_id) include_rvar(wrapper, component_rvar, ir_set.path_id, ctx=ctx) return rvar_for_rel(wrapper, ctx=ctx) dml_source = irutils.get_nearest_dml_stmt(ir_set) set_rvar = range_for_typeref(typeref, ir_set.path_id, dml_source=dml_source, ctx=ctx) pathctx.put_rvar_path_bond(set_rvar, ir_set.path_id) set_rvar.query.value_scope.add(ir_set.path_id) if ir_set.rptr and ir_set.rptr.is_inbound: ptrref = ir_set.rptr.ptrref ptr_info = pg_types.get_ptrref_storage_info(ptrref, resolve_type=False, link_bias=False) if ptr_info.table_type == 'ObjectType': # Inline link prefix_path_id = ir_set.path_id.src_path() assert prefix_path_id is not None, 'expected a path' rref = pgast.ColumnRef(name=[ptr_info.column_name], nullable=not ptrref.required) pathctx.put_rvar_path_bond(set_rvar, prefix_path_id) pathctx.put_rvar_path_output(set_rvar, prefix_path_id, aspect='identity', var=rref, env=ctx.env) if astutils.is_set_op_query(set_rvar.query): assert isinstance(set_rvar.query, pgast.SelectStmt) astutils.for_each_query_in_set( set_rvar.query, lambda qry: qry.target_list.append( pgast.ResTarget( val=rref, name=ptr_info.column_name, ))) return set_rvar
def strip_output_var(var: pgast.OutputVar, *, optional: Optional[bool] = None, nullable: Optional[bool] = None) -> pgast.OutputVar: result: pgast.OutputVar if isinstance(var, pgast.TupleVarBase): elements = [] for el in var.elements: val: pgast.OutputVar el_name = el.name if isinstance(el_name, str): val = pgast.ColumnRef(name=[el_name]) elif isinstance(el_name, pgast.OutputVar): val = strip_output_var(el_name) else: raise AssertionError( f'unexpected tuple element class: {el_name!r}') elements.append( pgast.TupleElement(path_id=el.path_id, name=el_name, val=val)) result = pgast.TupleVar( elements, named=var.named, typeref=var.typeref, ) elif isinstance(var, pgast.ColumnRef): result = pgast.ColumnRef( name=[var.name[-1]], optional=optional if optional is not None else var.optional, nullable=nullable if nullable is not None else var.nullable, ) else: raise AssertionError(f'unexpected OutputVar subclass: {var!r}') return result
def top_output_as_config_op( ir_set: irast.Set, stmt: pgast.Query, *, env: context.Environment) -> pgast.Query: if ir_set.expr.system: alias = env.aliases.get('cfg') subrvar = pgast.RangeSubselect( subquery=stmt, alias=pgast.Alias( aliasname=alias, ) ) stmt_res = stmt.target_list[0] if stmt_res.name is None: stmt_res = stmt.target_list[0] = pgast.ResTarget( name=env.aliases.get('v'), val=stmt_res.val, ) result_row = pgast.RowExpr( args=[ pgast.StringConstant(val='ADD'), pgast.StringConstant( val='SYSTEM' if ir_set.expr.system else 'SESSION'), pgast.StringConstant(val=ir_set.expr.name), pgast.ColumnRef(name=[stmt_res.name]), ] ) result = pgast.FuncCall( name=('jsonb_build_array',), args=result_row.args, null_safe=True, ser_safe=True, ) return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=result, ), ], from_clause=[ subrvar, ], ) else: raise errors.InternalServerError( 'CONFIGURE SESSION INSERT is not supported')
def _compile_set_in_singleton_mode( node: irast.Set, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: if isinstance(node, irast.EmptySet): return pgast.NullConstant() elif node.expr is not None: return dispatch.compile(node.expr, ctx=ctx) else: if node.rptr: ptrref = node.rptr.ptrref source = node.rptr.source if isinstance(ptrref, irast.TupleIndirectionPointerRef): tuple_val = dispatch.compile(source, ctx=ctx) set_expr = astutils.tuple_getattr( tuple_val, source.typeref, ptrref.shortname.name, ) return set_expr if ptrref.source_ptr is None and source.rptr is not None: raise errors.UnsupportedFeatureError( 'unexpectedly long path in simple expr') ptr_stor_info = pg_types.get_ptrref_storage_info( ptrref, resolve_type=False) colref = pgast.ColumnRef( name=[ptr_stor_info.column_name], nullable=node.rptr.dir_cardinality.can_be_zero()) else: name = [common.edgedb_name_to_pg_name(str(node.typeref.id))] if node.path_id.is_objtype_path(): name.append('id') colref = pgast.ColumnRef(name=name) return colref
def _get_volatility_ref() -> Optional[pgast.BaseExpr]: nonlocal vol_ref if vol_ref: return vol_ref name = ctx.env.aliases.get('key') grouprel.target_list.append( pgast.ResTarget(name=name, val=pgast.FuncCall(name=('row_number', ), args=[], over=pgast.WindowDef()))) vol_ref = pgast.ColumnRef(name=[group_rvar.alias.aliasname, name]) return vol_ref
def new_external_rvar( *, rel_name: Tuple[str, ...], path_id: irast.PathId, outputs: Mapping[Tuple[irast.PathId, Tuple[str, ...]], str], ) -> pgast.RelRangeVar: """Construct a ``RangeVar`` instance given a relation name and a path id. Given an optionally-qualified relation name *rel_name* and a *path_id*, return a ``RangeVar`` instance over the specified relation that is then assumed to represent the *path_id* binding. This is useful in situations where it is necessary to "prime" the compiler with a list of external relations that exist in a larger SQL expression that _this_ expression is being embedded into. The *outputs* mapping optionally specifies a set of outputs in the resulting range var as a ``(path_id, tuple-of-aspects): attribute name`` mapping. """ if len(rel_name) == 1: table_name = rel_name[0] schema_name = None elif len(rel_name) == 2: schema_name, table_name = rel_name else: raise AssertionError(f'unexpected rvar name: {rel_name}') rel = pgast.Relation( name=table_name, schemaname=schema_name, path_id=path_id, ) alias = pgast.Alias(aliasname=table_name) if not path_id.is_ptr_path(): rvar = pgast.RelRangeVar( relation=rel, typeref=path_id.target, alias=alias) else: rvar = pgast.RelRangeVar( relation=rel, alias=alias) for (output_pid, output_aspects), colname in outputs.items(): var = pgast.ColumnRef(name=[colname]) for aspect in output_aspects: rel.path_outputs[output_pid, aspect] = var return rvar
def get_column( rvar: pgast.BaseRangeVar, colspec: typing.Union[str, pgast.ColumnRef], *, nullable: bool=None) -> pgast.ColumnRef: if isinstance(colspec, pgast.ColumnRef): colname = colspec.name[-1] else: colname = colspec ser_safe = False if nullable is None: if isinstance(rvar, pgast.RangeVar): # Range over a relation, we cannot infer nullability in # this context, so assume it's true. nullable = True elif isinstance(rvar, pgast.RangeSubselect): col_idx = find_column_in_subselect_rvar(rvar, colname) if astutils.is_set_op_query(rvar.subquery): nullables = [] ser_safes = [] astutils.for_each_query_in_set( rvar.subquery, lambda q: (nullables.append(q.target_list[col_idx].nullable), ser_safes.append(q.target_list[col_idx].ser_safe)) ) nullable = any(nullables) ser_safe = all(ser_safes) else: rt = rvar.subquery.target_list[col_idx] nullable = rt.nullable ser_safe = rt.ser_safe elif isinstance(rvar, pgast.RangeFunction): # Range over a function. # TODO: look into the possibility of inspecting coldeflist. nullable = True elif isinstance(rvar, pgast.JoinExpr): raise RuntimeError( f'cannot find {colname!r} in unexpected {rvar!r} range var') name = [rvar.alias.aliasname, colname] return pgast.ColumnRef(name=name, nullable=nullable, ser_safe=ser_safe)
def _pull_col(comp_qry: pgast.Query) -> None: rvar = pathctx.get_path_rvar(comp_qry, path_id, aspect='source', env=ctx.env) typeref = rvar.typeref assert typeref is not None comp_ptrref = ptr_ref_map[typeref.id] comp_pi = pg_types.get_ptrref_storage_info( comp_ptrref, resolve_type=False, link_bias=False) comp_qry.target_list.append( pgast.ResTarget( val=pgast.ColumnRef(name=[comp_pi.column_name]), name=ptr_info.column_name, ))
def get_path_output_or_null( rel: pgast.Query, path_id: irast.PathId, *, disable_output_fusion: bool=False, aspect: str, env: context.Environment) -> \ Tuple[pgast.OutputVar, bool]: path_id = map_path_id(path_id, rel.view_path_id_map) ref = maybe_get_path_output( rel, path_id, disable_output_fusion=disable_output_fusion, aspect=aspect, env=env) if ref is not None: return ref, False alt_aspect = get_less_specific_aspect(path_id, aspect) if alt_aspect is not None: # If disable_output_fusion is true, we need to be careful # to not reuse an existing column if disable_output_fusion: preexisting = rel.path_outputs.pop((path_id, alt_aspect), None) ref = maybe_get_path_output( rel, path_id, disable_output_fusion=disable_output_fusion, aspect=alt_aspect, env=env) if disable_output_fusion: # Put back the path_output to whatever it was before if not preexisting: rel.path_outputs.pop((path_id, alt_aspect), None) else: rel.path_outputs[(path_id, alt_aspect)] = preexisting if ref is not None: _put_path_output_var(rel, path_id, aspect, ref, env=env) return ref, False alias = env.aliases.get('null') restarget = pgast.ResTarget( name=alias, val=pgast.NullConstant()) rel.target_list.append(restarget) ref = pgast.ColumnRef(name=[alias], nullable=True) _put_path_output_var(rel, path_id, aspect, ref, env=env) return ref, True
def get_path_serialized_output( rel: pgast.Query, path_id: irast.PathId, *, env: context.Environment) -> pgast.OutputVar: # Serialized output is a special case, we don't # want this behaviour to be recursive, so it # must be kept outside of get_path_output() generic. aspect = 'serialized' path_id = map_path_id(path_id, rel.view_path_id_map) result = rel.path_outputs.get((path_id, aspect)) if result is not None: return result ref = get_path_serialized_or_value_var(rel, path_id, env=env) if ( isinstance(ref, pgast.TupleVarBase) and not isinstance(ref, pgast.TupleVar) ): elements = [] for el in ref.elements: assert el.path_id is not None val = get_path_serialized_or_value_var(rel, el.path_id, env=env) elements.append( pgast.TupleElement( path_id=el.path_id, name=el.name, val=val)) ref = pgast.TupleVar( elements, named=ref.named, typeref=ref.typeref, ) refexpr = output.serialize_expr(ref, path_id=path_id, env=env) alias = get_path_output_alias(path_id, aspect, env=env) restarget = pgast.ResTarget(name=alias, val=refexpr, ser_safe=True) rel.target_list.append(restarget) result = pgast.ColumnRef( name=[alias], nullable=refexpr.nullable, ser_safe=True) _put_path_output_var(rel, path_id, aspect, result, env=env) return result
def array_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: el_type = styperef.subtypes[0] if irtyputils.is_tuple(el_type): coldeflist = [] json_args: List[pgast.BaseExpr] = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = st.element_name json_args.append(pgast.StringConstant(val=st.element_name)) else: colname = str(i) val: pgast.BaseExpr = pgast.ColumnRef(name=[colname]) if irtyputils.is_collection(st): val = coll_as_json_object(val, styperef=st, env=env) json_args.append(val) if not irtyputils.is_persistent_tuple(el_type): # Column definition list is only allowed for functions # returning "record", i.e. an anonymous tuple, which # would not be the case for schema-persistent tuple types. coldeflist.append( pgast.ColumnDef( name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st) ) ) ) if is_named: json_func = _get_json_func('build_object', env=env) else: json_func = _get_json_func('build_array', env=env) return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr( args=[ pgast.FuncCall( name=_get_json_func('agg', env=env), args=[ pgast.FuncCall( name=json_func, args=json_args, ) ] ), pgast.StringConstant(val='[]'), ] ), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction( alias=pgast.Alias( aliasname=env.aliases.get('q'), ), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest',), args=[expr], coldeflist=coldeflist, ) ] ) ] ) else: return pgast.FuncCall( name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)