def range_for_material_objtype(objtype: s_objtypes.ObjectType, path_id: irast.PathId, *, include_overlays: bool = True, env: context.Environment) -> pgast.BaseRangeVar: from . import pathctx # XXX: fix cycle objtype = objtype.material_type() table_schema_name, table_name = common.objtype_name_to_table_name( objtype.name, catenate=False) if objtype.name.module == 'schema': # Redirect all queries to schema tables to edgedbss table_schema_name = 'edgedbss' relation = pgast.Relation( schemaname=table_schema_name, name=table_name, nullable=False, path_id=path_id, ) rvar = pgast.RangeVar( relation=relation, alias=pgast.Alias(aliasname=env.aliases.get(objtype.name.name))) overlays = env.rel_overlays.get(objtype.name) if overlays and include_overlays: set_ops = [] qry = pgast.SelectStmt() qry.from_clause.append(rvar) pathctx.put_path_value_rvar(qry, path_id, rvar, env=env) qry.path_scope.add(path_id) set_ops.append(('union', qry)) for op, cte in overlays: rvar = pgast.RangeVar( relation=cte, alias=pgast.Alias(aliasname=env.aliases.get(hint=cte.name))) qry = pgast.SelectStmt(from_clause=[rvar], ) pathctx.put_path_value_rvar(qry, path_id, rvar, env=env) qry.path_scope.add(path_id) if op == 'replace': op = 'union' set_ops = [] set_ops.append((op, qry)) rvar = range_from_queryset(set_ops, objtype, env=env) return rvar
def rvar_for_rel(rel: pgast.BaseRelation, *, lateral: bool = False, colnames: typing.List[str] = [], env: context.Environment) -> pgast.BaseRangeVar: if isinstance(rel, pgast.Query): alias = env.aliases.get(rel.name or 'q') rvar = pgast.RangeSubselect(subquery=rel, alias=pgast.Alias(aliasname=alias, colnames=colnames), lateral=lateral, nullable=rel.nullable) else: alias = env.aliases.get(rel.name) rvar = pgast.RangeVar(relation=rel, nullable=rel.nullable, alias=pgast.Alias(aliasname=alias, colnames=colnames)) return rvar
def table_from_ptrcls(ptrcls: s_links.Link, *, env: context.Environment) -> pgast.RangeVar: """Return a Table corresponding to a given Link.""" table_schema_name, table_name = common.get_table_name(ptrcls, catenate=False) pname = ptrcls.shortname if pname.module == 'schema': # Redirect all queries to schema tables to edgedbss table_schema_name = 'edgedbss' relation = pgast.Relation(schemaname=table_schema_name, name=table_name) rvar = pgast.RangeVar( relation=relation, alias=pgast.Alias(aliasname=env.aliases.get(pname.name))) return rvar
def range_from_queryset(set_ops: typing.Sequence[typing.Tuple[ str, pgast.BaseRelation]], scls: s_obj.Object, *, env: context.Environment) -> pgast.BaseRangeVar: if len(set_ops) > 1: # More than one class table, generate a UNION/EXCEPT clause. qry = pgast.SelectStmt(all=True, larg=set_ops[0][1]) for op, rarg in set_ops[1:]: qry.op, qry.rarg = op, rarg qry = pgast.SelectStmt(all=True, larg=qry) qry = qry.larg rvar = pgast.RangeSubselect( subquery=qry, alias=pgast.Alias(aliasname=env.aliases.get(scls.shortname.name))) else: # Just one class table, so return it directly rvar = set_ops[0][1].from_clause[0] return rvar
def process_link_values(ir_stmt, ir_expr, target_tab, tab_cols, col_data, dml_rvar, sources, props_only, target_is_scalar, iterator_cte, *, ctx=context.CompilerContext) -> pgast.CommonTableExpr: """Unpack data from an update expression into a series of selects. :param ir_expr: IR of the INSERT/UPDATE body element. :param target_tab: The link table being updated. :param tab_cols: A sequence of columns in the table being updated. :param col_data: Expressions used to populate well-known columns of the link table such as std::source and std::__type__. :param sources: A list of relations which must be joined into the data query to resolve expressions in *col_data*. :param props_only: Whether this link update only touches link properties. :param target_is_scalar: Whether the link target is an ScalarType. :param iterator_cte: CTE representing the iterator range in the FOR clause of the EdgeQL DML statement. """ with ctx.newscope() as newscope, newscope.newrel() as subrelctx: row_query = subrelctx.rel relctx.include_rvar(row_query, dml_rvar, ctx=subrelctx) subrelctx.path_scope[ir_stmt.subject.path_id] = row_query if iterator_cte is not None: iterator_rvar = dbobj.rvar_for_rel(iterator_cte, lateral=True, env=subrelctx.env) relctx.include_rvar(row_query, iterator_rvar, iterator_cte.query.path_id, aspect='value', ctx=subrelctx) with subrelctx.newscope() as sctx, sctx.subrel() as input_rel_ctx: input_rel = input_rel_ctx.rel if iterator_cte is not None: input_rel_ctx.path_scope[iterator_cte.query.path_id] = \ row_query input_rel_ctx.expr_exposed = False input_rel_ctx.shape_format = context.ShapeFormat.FLAT input_rel_ctx.volatility_ref = pathctx.get_path_identity_var( row_query, ir_stmt.subject.path_id, env=input_rel_ctx.env) dispatch.compile(ir_expr, ctx=input_rel_ctx) input_stmt = input_rel input_rvar = pgast.RangeSubselect( subquery=input_rel, lateral=True, alias=pgast.Alias(aliasname=ctx.env.aliases.get('val'))) row = pgast.ImplicitRowExpr() source_data = {} if input_stmt.op is not None: # UNION input_stmt = input_stmt.rarg path_id = ir_expr.path_id output = pathctx.get_path_value_output(input_stmt, path_id, env=ctx.env) if isinstance(output, pgast.TupleVar): for element in output.elements: name = element.path_id.rptr_name() if name is None: name = element.path_id[-1].name colname = common.edgedb_name_to_pg_name(name) source_data.setdefault(colname, dbobj.get_column(input_rvar, element.name)) else: if target_is_scalar: target_ref = pathctx.get_rvar_path_value_var(input_rvar, path_id, env=ctx.env) else: target_ref = pathctx.get_rvar_path_identity_var(input_rvar, path_id, env=ctx.env) source_data['std::target'] = target_ref if not target_is_scalar and 'std::target' not in source_data: target_ref = pathctx.get_rvar_path_identity_var(input_rvar, path_id, env=ctx.env) source_data['std::target'] = target_ref for col in tab_cols: expr = col_data.get(col) if expr is None: expr = source_data.get(col) if expr is None: if tab_cols[col]['column_default'] is not None: expr = pgast.LiteralExpr(expr=tab_cols[col]['column_default']) else: expr = pgast.Constant(val=None) row.args.append(expr) row_query.target_list = [ pgast.ResTarget(val=pgast.Indirection(arg=pgast.TypeCast( arg=row, type_name=pgast.TypeName(name=target_tab)), indirection=[pgast.Star()])) ] row_query.from_clause += list(sources) + [input_rvar] link_rows = pgast.CommonTableExpr(query=row_query, name=ctx.env.aliases.get(hint='r')) return link_rows
def process_linkprop_update(ir_stmt: irast.MutatingStmt, ir_expr: irast.Base, wrapper: pgast.Query, dml_cte: pgast.CommonTableExpr, *, ctx: context.CompilerContextLevel) -> None: """Perform link property updates to a link relation. :param ir_stmt: IR of the statement. :param ir_expr: IR of the UPDATE body element. :param wrapper: Top-level SQL query. :param dml_cte: CTE representing the SQL UPDATE to the main relation of the Object. """ toplevel = ctx.toplevel_stmt rptr = ir_expr.rptr ptrcls = rptr.ptrcls target_is_scalar = isinstance(rptr.target, s_scalars.ScalarType) target_tab = dbobj.range_for_ptrcls(ptrcls, '>', include_overlays=False, env=ctx.env) if target_is_scalar: target_tab_name = (target_tab.schema, target_tab.name) else: target_tab_name = common.link_name_to_table_name(ptrcls.shortname, catenate=False) tab_cols = \ ctx.env.backend._type_mech.get_cached_table_columns(target_tab_name) assert tab_cols, "could not get cols for {!r}".format(target_tab_name) dml_cte_rvar = pgast.RangeVar( relation=dml_cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get('m'))) cond = astutils.new_binop( pathctx.get_rvar_path_identity_var(dml_cte_rvar, ir_stmt.subject.path_id, env=ctx.env), dbobj.get_column(target_tab, 'std::source'), ast.ops.EQ) targets = [] for prop_el in ir_expr.shape: ptrname = prop_el.rptr.ptrcls.shortname with ctx.new() as input_rel_ctx: input_rel_ctx.expr_exposed = False input_rel = dispatch.compile(prop_el.expr, ctx=input_rel_ctx) targets.append( pgast.UpdateTarget(name=common.edgedb_name_to_pg_name(ptrname), val=input_rel)) updstmt = pgast.UpdateStmt(relation=target_tab, where_clause=cond, targets=targets, from_clause=[dml_cte_rvar]) updcte = pgast.CommonTableExpr(query=updstmt, name=ctx.env.aliases.get( ptrcls.shortname.name)) toplevel.ctes.append(updcte)
def process_link_update( ir_stmt: irast.MutatingStmt, ir_expr: irast.Base, props_only: bool, wrapper: pgast.Query, dml_cte: pgast.CommonTableExpr, iterator_cte: pgast.CommonTableExpr, *, ctx: context.CompilerContextLevel) -> typing.Optional[pgast.Query]: """Perform updates to a link relation as part of a DML statement. :param ir_stmt: IR of the statement. :param ir_expr: IR of the INSERT/UPDATE body element. :param props_only: Whether this link update only touches link properties. :param wrapper: Top-level SQL query. :param dml_cte: CTE representing the SQL INSERT or UPDATE to the main relation of the Object. :param iterator_cte: CTE representing the iterator range in the FOR clause of the EdgeQL DML statement. """ toplevel = ctx.toplevel_stmt edgedb_ptr_tab = pgast.RangeVar( relation=pgast.Relation(schemaname='edgedb', name='pointer'), alias=pgast.Alias(aliasname=ctx.env.aliases.get(hint='ptr'))) ltab_alias = edgedb_ptr_tab.alias.aliasname rptr = ir_expr.rptr ptrcls = rptr.ptrcls target_is_scalar = isinstance(ptrcls.target, s_scalars.ScalarType) path_id = rptr.source.path_id.extend(ptrcls, rptr.direction, rptr.target.scls) # The links in the dml class shape have been derived, # but we must use the correct specialized link class for the # base material type. mptrcls = ptrcls.material_type() # Lookup link class id by link name. lname_to_id = pgast.CommonTableExpr(query=pgast.SelectStmt( from_clause=[edgedb_ptr_tab], target_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[ltab_alias, 'id'])) ], where_clause=astutils.new_binop( lexpr=pgast.ColumnRef(name=[ltab_alias, 'name']), rexpr=pgast.Constant(val=mptrcls.name), op=ast.ops.EQ)), name=ctx.env.aliases.get(hint='lid')) lname_to_id_rvar = pgast.RangeVar(relation=lname_to_id) toplevel.ctes.append(lname_to_id) target_rvar = dbobj.range_for_ptrcls(mptrcls, '>', include_overlays=False, env=ctx.env) target_alias = target_rvar.alias.aliasname if target_is_scalar: target_tab_name = (target_rvar.relation.schemaname, target_rvar.relation.name) else: target_tab_name = common.link_name_to_table_name(mptrcls.shortname, catenate=False) tab_cols = \ ctx.env.backend._type_mech.get_cached_table_columns(target_tab_name) assert tab_cols, "could not get cols for {!r}".format(target_tab_name) dml_cte_rvar = pgast.RangeVar( relation=dml_cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get('m'))) col_data = { 'ptr_item_id': pgast.ColumnRef(name=[lname_to_id.name, 'id']), 'std::source': pathctx.get_rvar_path_identity_var(dml_cte_rvar, ir_stmt.subject.path_id, env=ctx.env) } # Drop all previous link records for this source. delcte = pgast.CommonTableExpr(query=pgast.DeleteStmt( relation=target_rvar, where_clause=astutils.new_binop( lexpr=col_data['std::source'], op=ast.ops.EQ, rexpr=pgast.ColumnRef(name=[target_alias, 'std::source'])), using_clause=[dml_cte_rvar], returning_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[target_alias, pgast.Star()])) ]), name=ctx.env.aliases.get(hint='d')) pathctx.put_path_value_rvar(delcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this removal in the relation overlay # context to ensure that the RETURNING clause potentially # referencing this link yields the expected results. overlays = ctx.env.rel_overlays[ptrcls.shortname] overlays.append(('except', delcte)) toplevel.ctes.append(delcte) # Turn the IR of the expression on the right side of := # into a subquery returning records for the link table. data_cte = process_link_values(ir_stmt, ir_expr, target_tab_name, tab_cols, col_data, dml_cte_rvar, [lname_to_id_rvar], props_only, target_is_scalar, iterator_cte, ctx=ctx) toplevel.ctes.append(data_cte) data_select = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RangeVar(relation=data_cte)]) # Inserting rows into the link table may produce cardinality # constraint violations, since the INSERT into the link table # is executed in the snapshot where the above DELETE from # the link table is not visible. Hence, we need to use # the ON CONFLICT clause to resolve this. conflict_cols = ['std::source', 'std::target', 'ptr_item_id'] conflict_inference = [] conflict_exc_row = [] for col in conflict_cols: conflict_inference.append(pgast.ColumnRef(name=[col])) conflict_exc_row.append(pgast.ColumnRef(name=['excluded', col])) conflict_data = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef( name=[data_cte.name, pgast.Star()])) ], from_clause=[pgast.RangeVar(relation=data_cte)], where_clause=astutils.new_binop( lexpr=pgast.ImplicitRowExpr(args=conflict_inference), rexpr=pgast.ImplicitRowExpr(args=conflict_exc_row), op='=')) cols = [pgast.ColumnRef(name=[col]) for col in tab_cols] updcte = pgast.CommonTableExpr( name=ctx.env.aliases.get(hint='i'), query=pgast.InsertStmt( relation=target_rvar, select_stmt=data_select, cols=cols, on_conflict=pgast.OnConflictClause( action='update', infer=pgast.InferClause(index_elems=conflict_inference), target_list=[ pgast.MultiAssignRef(columns=cols, source=conflict_data) ]), returning_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[pgast.Star()])) ])) pathctx.put_path_value_rvar(updcte.query, path_id.ptr_path(), target_rvar, env=ctx.env) # Record the effect of this insertion in the relation overlay # context to ensure that the RETURNING clause potentially # referencing this link yields the expected results. overlays = ctx.env.rel_overlays[ptrcls.shortname] overlays.append(('union', updcte)) toplevel.ctes.append(updcte) return data_cte
def init_dml_stmt( ir_stmt: irast.MutatingStmt, dml_stmt: pgast.DML, *, ctx: context.CompilerContextLevel, parent_ctx: context.CompilerContextLevel) \ -> typing.Tuple[pgast.Query, pgast.CommonTableExpr, pgast.CommonTableExpr]: """Prepare the common structure of the query representing a DML stmt. :param ir_stmt: IR of the statement. :param dml_stmt: SQL DML node instance. :return: A (*wrapper*, *dml_cte*, *range_cte*) tuple, where *wrapper* the the wrapping SQL statement, *dml_cte* is the CTE representing the SQL DML operation in the main relation of the Object, and *range_cte* is the CTE for the subset affected by the statement. *range_cte* is None for INSERT statmenets. """ wrapper = ctx.rel clauses.init_stmt(ir_stmt, ctx, parent_ctx) target_ir_set = ir_stmt.subject dml_stmt.relation = dbobj.range_for_set(ir_stmt.subject, include_overlays=False, env=ctx.env) pathctx.put_path_value_rvar(dml_stmt, target_ir_set.path_id, dml_stmt.relation, env=ctx.env) dml_stmt.path_scope.add(target_ir_set.path_id) dml_cte = pgast.CommonTableExpr(query=dml_stmt, name=ctx.env.aliases.get(hint='m')) if isinstance(ir_stmt, (irast.UpdateStmt, irast.DeleteStmt)): # UPDATE and DELETE operate over a range, so generate # the corresponding CTE and connect it to the DML query. range_cte = get_dml_range(ir_stmt, dml_stmt, ctx=ctx) range_rvar = pgast.RangeVar( relation=range_cte, alias=pgast.Alias(aliasname=ctx.env.aliases.get(hint='range'))) relctx.pull_path_namespace(target=dml_stmt, source=range_rvar, ctx=ctx) # Auxillary relations are always joined via the WHERE # clause due to the structure of the UPDATE/DELETE SQL statments. id_col = common.edgedb_name_to_pg_name('std::id') dml_stmt.where_clause = astutils.new_binop( lexpr=pgast.ColumnRef( name=[dml_stmt.relation.alias.aliasname, id_col]), op=ast.ops.EQ, rexpr=pathctx.get_rvar_path_identity_var(range_rvar, target_ir_set.path_id, env=ctx.env)) # UPDATE has "FROM", while DELETE has "USING". if hasattr(dml_stmt, 'from_clause'): dml_stmt.from_clause.append(range_rvar) else: dml_stmt.using_clause.append(range_rvar) else: range_cte = None # Due to the fact that DML statements are structured # as a flat list of CTEs instead of nested range vars, # the top level path scope must be empty. The necessary # range vars will be injected explicitly in all rels that # need them. ctx.path_scope.clear() pathctx.put_path_value_rvar(dml_stmt, ir_stmt.subject.path_id, dml_stmt.relation, env=ctx.env) dml_rvar = pgast.RangeVar( relation=dml_cte, alias=pgast.Alias(aliasname=parent_ctx.env.aliases.get('d'))) relctx.include_rvar(wrapper, dml_rvar, ir_stmt.subject.path_id, aspect='value', ctx=ctx) pathctx.put_path_bond(wrapper, ir_stmt.subject.path_id) return wrapper, dml_cte, dml_rvar, range_cte
def cte_for_query(rel: pgast.Query, *, env: context.Environment) -> pgast.CommonTableExpr: return pgast.CommonTableExpr( query=rel, alias=pgast.Alias(aliasname=env.aliases.get(rel.name)))
def range_for_ptrcls(ptrcls: s_links.Link, direction: s_pointers.PointerDirection, *, include_overlays: bool = True, env: context.Environment) -> pgast.BaseRangeVar: """"Return a Range subclass corresponding to a given ptr step. If `ptrcls` is a generic link, then a simple RangeVar is returned, otherwise the return value may potentially be a UNION of all tables corresponding to a set of specialized links computed from the given `ptrcls` taking source inheritance into account. """ linkname = ptrcls.shortname endpoint = ptrcls.source tgt_col = pgtypes.get_pointer_storage_info(ptrcls, resolve_type=False, link_bias=True).column_name cols = ['std::source', tgt_col] set_ops = [] ptrclses = set() for source in {endpoint} | set(endpoint.descendants(env.schema)): # Sift through the descendants to see who has this link try: src_ptrcls = source.pointers[linkname].material_type() except KeyError: # This source has no such link, skip it continue else: if src_ptrcls in ptrclses: # Seen this link already continue ptrclses.add(src_ptrcls) table = table_from_ptrcls(src_ptrcls, env=env) qry = pgast.SelectStmt() qry.from_clause.append(table) qry.rptr_rvar = table # Make sure all property references are pulled up properly for colname in cols: selexpr = pgast.ColumnRef(name=[table.alias.aliasname, colname]) qry.target_list.append(pgast.ResTarget(val=selexpr, name=colname)) set_ops.append(('union', qry)) overlays = env.rel_overlays.get(src_ptrcls.shortname) if overlays and include_overlays: for op, cte in overlays: rvar = pgast.RangeVar( relation=cte, alias=pgast.Alias(aliasname=env.aliases.get(cte.name))) qry = pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.ColumnRef(name=[col])) for col in cols ], from_clause=[rvar], ) set_ops.append((op, qry)) rvar = range_from_queryset(set_ops, ptrcls, env=env) return rvar
def cast(node: pgast.Base, *, source_type: s_obj.Object, target_type: s_obj.Object, force: bool = False, env: context.Environment) -> pgast.Base: if source_type.name == target_type.name and not force: return node schema = env.schema real_t = schema.get('std::anyreal') int_t = schema.get('std::anyint') json_t = schema.get('std::json') str_t = schema.get('std::str') datetime_t = schema.get('std::datetime') bool_t = schema.get('std::bool') if isinstance(target_type, s_types.Collection): if target_type.schema_name == 'array': if source_type.issubclass(json_t): # If we are casting a jsonb array to array, we do the # following transformation: # EdgeQL: <array<T>>MAP_VALUE # SQL: # SELECT array_agg(j::T) # FROM jsonb_array_elements(MAP_VALUE) AS j inner_cast = cast(pgast.ColumnRef(name=['j']), source_type=source_type, target_type=target_type.element_type, env=env) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.FuncCall( name=('array_agg', ), args=[inner_cast])) ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall(name=('jsonb_array_elements', ), args=[node]) ], alias=pgast.Alias(aliasname='j')) ]) else: # EdgeQL: <array<int64>>['1', '2'] # to SQL: ARRAY['1', '2']::int[] elem_pgtype = pg_types.pg_type_from_object( schema, target_type.element_type, topbase=True) return pgast.TypeCast(arg=node, type_name=pgast.TypeName( name=elem_pgtype, array_bounds=[-1])) elif target_type.schema_name == 'map': if source_type.issubclass(json_t): # If the source type is json do nothing, since # maps are already encoded in json. return node # EdgeQL: <map<Tkey,Tval>>MAP<Vkey,Vval> # to SQL: SELECT jsonb_object_agg( # key::Vkey::Tkey::text, # value::Vval::Tval) # FROM jsonb_each_text(MAP) key_cast = cast( cast( cast(pgast.ColumnRef(name=['key']), source_type=str_t, target_type=source_type.key_type, env=env), source_type=source_type.key_type, target_type=target_type.key_type, env=env, ), source_type=target_type.key_type, target_type=str_t, env=env, ) target_v_type = target_type.element_type val_cast = cast(cast(pgast.ColumnRef(name=['value']), source_type=str_t, target_type=source_type.element_type, env=env), source_type=source_type.element_type, target_type=target_v_type, env=env) map_cast = pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.FuncCall(name=('jsonb_object_agg', ), args=[key_cast, val_cast])) ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall(name=('jsonb_each_text', ), args=[node]) ]) ]) return pgast.FuncCall( name=('coalesce', ), args=[ map_cast, pgast.TypeCast(arg=pgast.Constant(val='{}'), type_name=pgast.TypeName(name=('jsonb', ))) ]) else: # `target_type` is not a collection. if (source_type.issubclass(datetime_t) and target_type.issubclass(str_t)): # Normalize datetime to text conversion to have the same # format as one would get by serializing to JSON. # # EdgeQL: <text><datetime>'2010-10-10'; # To SQL: trim(to_json('2010-01-01'::timestamptz)::text, '"') return pgast.FuncCall( name=('trim', ), args=[ pgast.TypeCast(arg=pgast.FuncCall(name=('to_json', ), args=[node]), type_name=pgast.TypeName(name=('text', ))), pgast.Constant(val='"') ]) elif source_type.issubclass(bool_t) and target_type.issubclass(int_t): # PostgreSQL 9.6 doesn't allow to cast 'boolean' to any integer # other than int32: # SELECT 'true'::boolean::bigint; # ERROR: cannot cast type boolean to bigint # So we transform EdgeQL: <int64>BOOL # to SQL: BOOL::int::<targetint> return pgast.TypeCast( arg=pgast.TypeCast(arg=node, type_name=pgast.TypeName(name=('int', ))), type_name=pgast.TypeName( name=pg_types.pg_type_from_scalar(schema, target_type))) elif source_type.issubclass(int_t) and target_type.issubclass(bool_t): # PostgreSQL 9.6 doesn't allow to cast any integer other # than int32 to 'boolean': # SELECT 1::bigint::boolean; # ERROR: cannot cast type bigint to boolea # So we transform EdgeQL: <boolean>INT # to SQL: (INT != 0) return astutils.new_binop(node, pgast.Constant(val=0), op=ast.ops.NE) elif source_type.issubclass(json_t): if (target_type.issubclass(real_t) or target_type.issubclass(bool_t)): # Simply cast to text and the to the target type. return cast(cast(node, source_type=source_type, target_type=str_t, env=env), source_type=str_t, target_type=target_type, env=env) elif target_type.issubclass(str_t): # It's not possible to cast jsonb string to text directly, # so we do a trick: # EdgeQL: <str>JSONB_VAL # SQL: array_to_json(ARRAY[JSONB_VAL])->>0 return astutils.new_binop(pgast.FuncCall( name=('array_to_json', ), args=[pgast.ArrayExpr(elements=[node])]), pgast.Constant(val=0), op='->>') elif target_type.issubclass(json_t): return pgast.TypeCast( arg=node, type_name=pgast.TypeName(name=('jsonb', ))) else: const_type = pg_types.pg_type_from_object(schema, target_type, topbase=True) return pgast.TypeCast(arg=node, type_name=pgast.TypeName(name=const_type)) raise RuntimeError( f'could not cast {source_type.name} to {target_type.name}')