def _infer_shape( ir: irast.Set, *, is_mutation: bool=False, scope_tree: irast.ScopeTreeNode, ctx: inference_context.InfCtx, ) -> None: for shape_set, _ in ir.shape: new_scope = cardinality._get_set_scope(shape_set, scope_tree, ctx=ctx) if shape_set.expr and shape_set.rptr: expr_mult = infer_multiplicity( shape_set.expr, scope_tree=new_scope, ctx=ctx) ptrref = shape_set.rptr.ptrref if expr_mult is MANY and irtyputils.is_object(ptrref.out_target): raise errors.QueryError( f'possibly not a strict set returned by an ' f'expression for a computed ' f'{ptrref.shortname.name}.', hint=( f'Use DISTINCT for the entire computed expression ' f'to resolve this.' ), context=shape_set.context ) _infer_shape( shape_set, is_mutation=is_mutation, scope_tree=scope_tree, ctx=ctx)
def _new_mapped_pointer_rvar( ir_ptr: irast.Pointer, *, ctx: context.CompilerContextLevel) -> pgast.PathRangeVar: ptrref = ir_ptr.ptrref dml_source = irutils.get_nearest_dml_stmt(ir_ptr.source) ptr_rvar = range_for_pointer(ir_ptr, dml_source=dml_source, ctx=ctx) src_col = 'source' source_ref = pgast.ColumnRef(name=[src_col], nullable=False) if (irtyputils.is_object(ptrref.out_target) and not irtyputils.is_computable_ptrref(ptrref)): tgt_ptr_info = pg_types.get_ptrref_storage_info(ptrref, link_bias=True, resolve_type=False) tgt_col = tgt_ptr_info.column_name else: tgt_col = 'target' target_ref = pgast.ColumnRef(name=[tgt_col], nullable=not ptrref.required) # Set up references according to the link direction. if ir_ptr.direction == s_pointers.PointerDirection.Inbound: near_ref = target_ref far_ref = source_ref else: near_ref = source_ref far_ref = target_ref src_pid = ir_ptr.source.path_id tgt_pid = ir_ptr.target.path_id ptr_pid = tgt_pid.ptr_path() ptr_rvar.query.path_id = ptr_pid pathctx.put_rvar_path_bond(ptr_rvar, src_pid) pathctx.put_rvar_path_output(ptr_rvar, src_pid, aspect='identity', var=near_ref, env=ctx.env) pathctx.put_rvar_path_output(ptr_rvar, src_pid, aspect='value', var=near_ref, env=ctx.env) pathctx.put_rvar_path_output(ptr_rvar, tgt_pid, aspect='value', var=far_ref, env=ctx.env) if tgt_pid.is_objtype_path(): pathctx.put_rvar_path_bond(ptr_rvar, tgt_pid) pathctx.put_rvar_path_output(ptr_rvar, tgt_pid, aspect='identity', var=far_ref, env=ctx.env) return ptr_rvar
def compile_Introspect(expr: qlast.Introspect, *, ctx: context.ContextLevel) -> irast.Set: typeref = typegen.ql_typeexpr_to_ir_typeref(expr.type, ctx=ctx) if typeref.material_type and not irtyputils.is_object(typeref): typeref = typeref.material_type if typeref.is_opaque_union: typeref = typegen.type_to_typeref( typing.cast( s_objtypes.ObjectType, ctx.env.schema.get('std::BaseObject'), ), env=ctx.env, ) if irtyputils.is_view(typeref): raise errors.QueryError(f'cannot introspect transient type variant', context=expr.type.context) if irtyputils.is_collection(typeref): raise errors.QueryError(f'cannot introspect collection types', context=expr.type.context) if irtyputils.is_generic(typeref): raise errors.QueryError(f'cannot introspect generic types', context=expr.type.context) return setgen.ensure_set(irast.TypeIntrospection(typeref=typeref), ctx=ctx)
def pg_type_from_ir_typeref( ir_typeref: irast.TypeRef, *, serialized: bool = False, persistent_tuples: bool = False) -> Tuple[str, ...]: if irtyputils.is_array(ir_typeref): if (irtyputils.is_generic(ir_typeref) or (irtyputils.is_abstract(ir_typeref.subtypes[0]) and irtyputils.is_scalar(ir_typeref.subtypes[0]))): return ('anyarray', ) else: tp = pg_type_from_ir_typeref(ir_typeref.subtypes[0], serialized=serialized, persistent_tuples=persistent_tuples) if len(tp) == 1: return (tp[0] + '[]', ) else: return (tp[0], tp[1] + '[]') elif irtyputils.is_anytuple(ir_typeref): return ('record', ) elif irtyputils.is_tuple(ir_typeref): if ir_typeref.material_type: material = ir_typeref.material_type else: material = ir_typeref if persistent_tuples or material.in_schema: return common.get_tuple_backend_name(material.id, catenate=False) else: return ('record', ) elif irtyputils.is_any(ir_typeref): return ('anyelement', ) else: if ir_typeref.material_type: material = ir_typeref.material_type else: material = ir_typeref if irtyputils.is_object(material): if serialized: return ('record', ) else: return ('uuid', ) elif irtyputils.is_abstract(material): return ('anynonarray', ) else: pg_type = base_type_name_map.get(material.id) if pg_type is None: # User-defined scalar type pg_type = common.get_scalar_backend_name( material.id, material.name_hint.module, catenate=False) return pg_type
def _validate_config_object(expr: irast.Set, *, level: str, ctx: context.ContextLevel) -> None: for element, _ in expr.shape: if element.rptr.ptrref.shortname.name == 'id': continue if (irtyputils.is_object(element.typeref) and isinstance(element.expr, irast.InsertStmt)): _validate_config_object(element, level=level, ctx=ctx)
def __infer_type_introspection(ir, env): if irtyputils.is_scalar(ir.typeref): return env.schema.get('schema::ScalarType') elif irtyputils.is_object(ir.typeref): return env.schema.get('schema::ObjectType') elif irtyputils.is_array(ir.typeref): return env.schema.get('schema::Array') elif irtyputils.is_tuple(ir.typeref): return env.schema.get('schema::Tuple') else: raise errors.QueryError('unexpected type in INTROSPECT', context=ir.context)
def _infer_set_inner( ir: irast.Set, *, is_mutation: bool=False, scope_tree: irast.ScopeTreeNode, ctx: inference_context.InfCtx, ) -> qltypes.Multiplicity: rptr = ir.rptr new_scope = cardinality._get_set_scope(ir, scope_tree, ctx=ctx) if rptr is not None: # Validate the source infer_multiplicity(rptr.source, scope_tree=new_scope, ctx=ctx) if ir.expr: expr_mult = infer_multiplicity(ir.expr, scope_tree=new_scope, ctx=ctx) if rptr is not None: rptrref = rptr.ptrref if isinstance(rptr.ptrref, irast.TupleIndirectionPointerRef): # All bets are off for tuple elements. return MANY elif not irtyputils.is_object(ir.typeref): # This is not an expression and is some kind of scalar, so # multiplicity cannot be guaranteed to be ONE (most scalar # expressions don't have an implicit requirement to be sets) # unless we also have an exclusive constraint. if rptr is not None: schema = ctx.env.schema # We should only have some kind of path terminating in a # property here. assert isinstance(rptrref, irast.PointerRef) ptr = schema.get_by_id(rptrref.id, type=s_pointers.Pointer) if ptr.is_exclusive(schema): # Got an exclusive constraint return ONE return MANY else: # This is some kind of a link at the end of a path. # Therefore the target is a proper set. return ONE elif ir.expr is not None: return expr_mult else: # Evidently this is not a pointer, expression, or a scalar. # This is an object type and therefore a proper set. return ONE
def get_pg_type(typeref: irast.TypeRef, *, ctx: context.CompilerContextLevel) -> typing.Tuple[str]: if in_serialization_ctx(ctx): if ctx.env.output_format is context.OutputFormat.JSONB: return ('jsonb', ) elif ctx.env.output_format is context.OutputFormat.JSON: return ('json', ) elif irtyputils.is_object(typeref): return ('record', ) else: return pgtypes.pg_type_from_ir_typeref(typeref) else: return pgtypes.pg_type_from_ir_typeref(typeref)
def __infer_type_introspection( ir: irast.TypeIntrospection, env: context.Environment, ) -> s_types.Type: if irtyputils.is_scalar(ir.typeref): return cast(s_objtypes.ObjectType, env.schema.get('schema::ScalarType')) elif irtyputils.is_object(ir.typeref): return cast(s_objtypes.ObjectType, env.schema.get('schema::ObjectType')) elif irtyputils.is_array(ir.typeref): return cast(s_objtypes.ObjectType, env.schema.get('schema::Array')) elif irtyputils.is_tuple(ir.typeref): return cast(s_objtypes.ObjectType, env.schema.get('schema::Tuple')) else: raise errors.QueryError('unexpected type in INTROSPECT', context=ir.context)
def compile_Introspect(expr: qlast.Introspect, *, ctx: context.ContextLevel) -> irast.Base: typeref = typegen.ql_typeref_to_ir_typeref(expr.type, ctx=ctx) if typeref.material_type and not irtyputils.is_object(typeref): typeref = typeref.material_type if irtyputils.is_view(typeref): raise errors.QueryError(f'cannot introspect views', context=expr.type.context) if irtyputils.is_collection(typeref): raise errors.QueryError(f'cannot introspect collection types', context=expr.type.context) if irtyputils.is_generic(typeref): raise errors.QueryError(f'cannot introspect generic types', context=expr.type.context) return irast.TypeIntrospection(typeref=typeref)
def __infer_set( ir: irast.Set, env: context.Environment, ) -> InferredVolatility: vol: InferredVolatility if ir.path_id in env.singletons: vol = IMMUTABLE elif ir.rptr is not None: src_vol = _infer_volatility(ir.rptr.source, env) # If source is an object, then a pointer reference implies # a table scan, and so we can assume STABLE at the minimum. # # A single dereference of a singleton path can be IMMUTABLE, # though, which we need in order to enforce that indexes # don't call STABLE functions. if ( irtyputils.is_object(ir.rptr.source.typeref) and ir.rptr.source.path_id not in env.singletons ): vol = _max_volatility((src_vol, STABLE)) else: vol = src_vol elif ir.expr is not None: vol = _infer_volatility(ir.expr, env) else: vol = STABLE # Cache our best-known as to this point volatility, to prevent # infinite recursion. env.inferred_volatility[ir] = vol if ir.shape: vol = _max_volatility([ _common_volatility( (el.expr for el, _ in ir.shape if el.expr), env ), vol, ]) if ir.is_binding: vol = IMMUTABLE return vol
def _infer_shape( ir: irast.Set, *, is_mutation: bool=False, scope_tree: irast.ScopeTreeNode, ctx: inf_ctx.InfCtx, ) -> None: for shape_set, shape_op in ir.shape: new_scope = inf_utils.get_set_scope(shape_set, scope_tree, ctx=ctx) if shape_set.expr and shape_set.rptr: expr_mult = infer_multiplicity( shape_set.expr, scope_tree=new_scope, ctx=ctx) ptrref = shape_set.rptr.ptrref if ( expr_mult.is_many() and shape_op is not qlast.ShapeOp.APPEND and shape_op is not qlast.ShapeOp.SUBTRACT and irtyputils.is_object(ptrref.out_target) ): ctx.env.schema, ptrcls = irtyputils.ptrcls_from_ptrref( ptrref, schema=ctx.env.schema) assert isinstance(ptrcls, s_pointers.Pointer) desc = ptrcls.get_verbosename(ctx.env.schema) if not is_mutation: desc = f"computed {desc}" raise errors.QueryError( f'possibly not a distinct set returned by an ' f'expression for a {desc}', hint=( f'You can use assert_distinct() around the expression ' f'to turn this into a runtime assertion, or the ' f'DISTINCT operator to silently discard duplicate ' f'elements.' ), context=shape_set.context ) _infer_shape( shape_set, is_mutation=is_mutation, scope_tree=scope_tree, ctx=ctx)
def _infer_set_inner( ir: irast.Set, *, is_mutation: bool=False, scope_tree: irast.ScopeTreeNode, ctx: inf_ctx.InfCtx, ) -> inf_ctx.MultiplicityInfo: rptr = ir.rptr new_scope = cardinality.inf_utils.get_set_scope(ir, scope_tree, ctx=ctx) if ir.expr is None: expr_mult = None else: expr_mult = infer_multiplicity(ir.expr, scope_tree=new_scope, ctx=ctx) if rptr is not None: rptrref = rptr.ptrref src_mult = infer_multiplicity( rptr.source, scope_tree=new_scope, ctx=ctx) if isinstance(rptrref, irast.TupleIndirectionPointerRef): if isinstance(src_mult, ContainerMultiplicityInfo): idx = irtyputils.get_tuple_element_index(rptrref) path_mult = src_mult.elements[idx] else: # All bets are off for tuple elements coming from # opaque tuples. path_mult = MANY elif not irtyputils.is_object(ir.typeref): # This is not an expression and is some kind of scalar, so # multiplicity cannot be guaranteed to be ONE (most scalar # expressions don't have an implicit requirement to be sets) # unless we also have an exclusive constraint. if ( expr_mult is not None and inf_utils.find_visible(rptr.source, new_scope) is not None ): path_mult = expr_mult else: schema = ctx.env.schema # We should only have some kind of path terminating in a # property here. assert isinstance(rptrref, irast.PointerRef) ptr = schema.get_by_id(rptrref.id, type=s_pointers.Pointer) if ptr.is_exclusive(schema): # Got an exclusive constraint path_mult = ONE else: path_mult = MANY else: # This is some kind of a link at the end of a path. # Therefore the target is a proper set. path_mult = ONE elif expr_mult is not None: path_mult = expr_mult else: # Evidently this is not a pointer, expression, or a scalar. # This is an object type and therefore a proper set. path_mult = ONE if ( not path_mult.is_many() and irutils.get_path_root(ir).path_id == ctx.distinct_iterator ): path_mult = dataclasses.replace(path_mult, disjoint_union=True) # Mark free object roots if irtyputils.is_free_object(ir.typeref) and not ir.expr: path_mult = dataclasses.replace(path_mult, fresh_free_object=True) # Remove free object freshness when we see them through a binding if ir.is_binding == irast.BindingKind.With and path_mult.fresh_free_object: path_mult = dataclasses.replace(path_mult, fresh_free_object=False) return path_mult
def _get_ptrref_storage_info( ptrref: irast.BasePointerRef, *, resolve_type=True, link_bias=False, allow_missing=False) -> Optional[PointerStorageInfo]: if ptrref.material_ptr: ptrref = ptrref.material_ptr if ptrref.out_cardinality is None: # Guard against the IR generator failure to populate the PointerRef # cardinality correctly. raise RuntimeError( f'cannot determine backend storage parameters for the ' f'{ptrref.name!r} pointer: the cardinality is not known') is_lprop = ptrref.source_ptr is not None if is_lprop: source = ptrref.source_ptr else: source = ptrref.out_source target = ptrref.out_target if is_lprop and str(ptrref.std_parent_name) == 'std::target': # Normalize link@target to link ptrref = source is_lprop = False if isinstance(ptrref, irast.TupleIndirectionPointerRef): table = None table_type = 'ObjectType' col_name = ptrref.shortname.name elif is_lprop: table = common.get_pointer_backend_name(source.id, source.name.module, catenate=False) table_type = 'link' if ptrref.shortname.name == 'source': col_name = 'source' else: col_name = str(ptrref.id) else: if irtyputils.is_scalar(source): # This is a pseudo-link on an scalar (__type__) table = None table_type = 'ObjectType' col_name = None elif _storable_in_source(ptrref) and not link_bias: table = common.get_objtype_backend_name(source.id, source.name_hint.module, catenate=False) ptrname = ptrref.shortname.name if ptrname.startswith('__') or ptrname == 'id': col_name = ptrname else: col_name = str(ptrref.id) table_type = 'ObjectType' elif _storable_in_pointer(ptrref): table = common.get_pointer_backend_name(ptrref.id, ptrref.name.module, catenate=False) col_name = 'target' table_type = 'link' elif not link_bias and not allow_missing: raise RuntimeError( f'cannot determine backend storage parameters for the ' f'{ptrref.name} pointer: unexpected characteristics') else: return None if resolve_type: if irtyputils.is_object(target): column_type = ('uuid', ) else: column_type = pg_type_from_ir_typeref(target, persistent_tuples=True) else: column_type = None return PointerStorageInfo(table_name=table, table_type=table_type, column_name=col_name, column_type=column_type)
def _get_shape_configuration( ir_set: irast.Set, *, rptr: typing.Optional[irast.Pointer]=None, parent_view_type: typing.Optional[s_types.ViewType]=None, ctx: context.ContextLevel) \ -> typing.List[typing.Tuple[irast.Set, s_pointers.Pointer]]: """Return a list of (source_set, ptrcls) pairs as a shape for a given set. """ stype = setgen.get_set_type(ir_set, ctx=ctx) sources = [] link_view = False is_objtype = ir_set.path_id.is_objtype_path() if rptr is None: rptr = ir_set.rptr if rptr is not None: rptrcls = irtyputils.ptrcls_from_ptrref(rptr.ptrref, schema=ctx.env.schema) else: rptrcls = None link_view = (rptrcls is not None and not rptrcls.is_link_property(ctx.env.schema) and _link_has_shape(rptrcls, ctx=ctx)) if is_objtype or not link_view: sources.append(stype) if link_view: sources.append(rptrcls) shape_ptrs = [] id_present_in_shape = False for source in sources: for ptr in ctx.env.view_shapes[source]: if (ptr.is_link_property(ctx.env.schema) and ir_set.path_id != rptr.target.path_id): path_tip = rptr.target else: path_tip = ir_set shape_ptrs.append((path_tip, ptr)) if source is stype and ptr.is_id_pointer(ctx.env.schema): id_present_in_shape = True if is_objtype and not id_present_in_shape: view_type = stype.get_view_type(ctx.env.schema) is_mutation = view_type in (s_types.ViewType.Insert, s_types.ViewType.Update) is_parent_update = parent_view_type is s_types.ViewType.Update implicit_id = ( # shape is not specified at all not shape_ptrs # implicit ids are always wanted or (ctx.implicit_id_in_shapes and not is_mutation) # we are inside an UPDATE shape and this is # an explicit expression (link target update) or (is_parent_update and ir_set.expr is not None)) if implicit_id: # We want the id in this shape and it's not already there, # so insert it in the first position. pointers = stype.get_pointers(ctx.env.schema).objects( ctx.env.schema) for ptr in pointers: if ptr.is_id_pointer(ctx.env.schema): view_shape = ctx.env.view_shapes[stype] if ptr not in view_shape: shape_metadata = ctx.env.view_shapes_metadata[stype] view_shape.insert(0, ptr) shape_metadata.has_implicit_id = True shape_ptrs.insert(0, (ir_set, ptr)) break if (ir_set.typeref is not None and irtyputils.is_object(ir_set.typeref) and parent_view_type is not s_types.ViewType.Insert and parent_view_type is not s_types.ViewType.Update and ctx.implicit_tid_in_shapes): ql = qlast.ShapeElement( expr=qlast.Path(steps=[ qlast.Ptr( ptr=qlast.ObjectRef(name='__tid__'), direction=s_pointers.PointerDirection.Outbound, ) ], ), compexpr=qlast.Path(steps=[ qlast.Source(), qlast.Ptr( ptr=qlast.ObjectRef(name='__type__'), direction=s_pointers.PointerDirection.Outbound, ), qlast.Ptr( ptr=qlast.ObjectRef(name='id'), direction=s_pointers.PointerDirection.Outbound, ) ])) with ctx.newscope(fenced=True) as scopectx: scopectx.anchors = scopectx.anchors.copy() scopectx.anchors[qlast.Source] = ir_set ptr = _normalize_view_ptr_expr(ql, stype, path_id=ir_set.path_id, ctx=scopectx) view_shape = ctx.env.view_shapes[stype] if ptr not in view_shape: view_shape.insert(0, ptr) shape_ptrs.insert(0, (ir_set, ptr)) return shape_ptrs
def compile_operator(expr: irast.OperatorCall, args: Sequence[pgast.BaseExpr], *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: lexpr = rexpr = None result: Optional[pgast.BaseExpr] = None if expr.operator_kind is ql_ft.OperatorKind.Infix: lexpr, rexpr = args elif expr.operator_kind is ql_ft.OperatorKind.Prefix: rexpr = args[0] elif expr.operator_kind is ql_ft.OperatorKind.Postfix: lexpr = args[0] else: raise RuntimeError(f'unexpected operator kind: {expr.operator_kind!r}') str_func_name = str(expr.func_shortname) if ((str_func_name in {'std::=', 'std::!='} or str(expr.origin_name) in {'std::=', 'std::!='}) and expr.args[0].expr.typeref is not None and irtyputils.is_object(expr.args[0].expr.typeref) and expr.args[1].expr.typeref is not None and irtyputils.is_object(expr.args[1].expr.typeref)): if str_func_name == 'std::=' or str(expr.origin_name) == 'std::=': sql_oper = '=' else: sql_oper = '!=' elif str_func_name == 'std::EXISTS': assert rexpr result = pgast.NullTest(arg=rexpr, negated=True) elif expr.sql_operator: sql_oper = expr.sql_operator[0] if len(expr.sql_operator) > 1: # Explicit operand types given in FROM SQL OPERATOR lexpr, rexpr = _cast_operands(lexpr, rexpr, expr.sql_operator[1:]) elif expr.sql_function: sql_func = expr.sql_function[0] func_name = tuple(sql_func.split('.', 1)) if len(expr.sql_function) > 1: # Explicit operand types given in FROM SQL FUNCTION lexpr, rexpr = _cast_operands(lexpr, rexpr, expr.sql_function[1:]) args = [] if lexpr is not None: args.append(lexpr) if rexpr is not None: args.append(rexpr) result = pgast.FuncCall(name=func_name, args=args) elif expr.origin_name is not None: sql_oper = common.get_operator_backend_name(expr.origin_name)[1] else: sql_oper = common.get_operator_backend_name(expr.func_shortname)[1] # If result was not already computed, it's going to be a generic Expr. if result is None: result = pgast.Expr( kind=pgast.ExprKind.OP, name=sql_oper, lexpr=lexpr, rexpr=rexpr, ) if expr.force_return_cast: # The underlying operator has a return value type # different from that of the EdgeQL operator declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref))) return result
def compile_operator(expr: irast.OperatorCall, args: Sequence[pgast.BaseExpr], *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: lexpr = rexpr = None result: Optional[pgast.BaseExpr] = None if expr.operator_kind is ql_ft.OperatorKind.Infix: lexpr, rexpr = args elif expr.operator_kind is ql_ft.OperatorKind.Prefix: rexpr = args[0] elif expr.operator_kind is ql_ft.OperatorKind.Postfix: lexpr = args[0] else: raise RuntimeError(f'unexpected operator kind: {expr.operator_kind!r}') str_func_name = str(expr.func_shortname) if ((str_func_name in {'std::=', 'std::!='} or str(expr.origin_name) in {'std::=', 'std::!='}) and expr.args[0].expr.typeref is not None and irtyputils.is_object(expr.args[0].expr.typeref) and expr.args[1].expr.typeref is not None and irtyputils.is_object(expr.args[1].expr.typeref)): if str_func_name == 'std::=' or str(expr.origin_name) == 'std::=': sql_oper = '=' else: sql_oper = '!=' elif str_func_name == 'std::EXISTS': result = pgast.NullTest(arg=rexpr, negated=True) elif expr.sql_operator: sql_oper = expr.sql_operator[0] if len(expr.sql_operator) > 1: # Explicit operand types given in FROM SQL OPERATOR if lexpr is not None: lexpr = pgast.TypeCast( arg=lexpr, type_name=pgast.TypeName(name=(expr.sql_operator[1], ))) if rexpr is not None: rexpr_qry = None if (isinstance(rexpr, pgast.SubLink) and isinstance(rexpr.expr, pgast.SelectStmt)): rexpr_qry = rexpr.expr elif isinstance(rexpr, pgast.SelectStmt): rexpr_qry = rexpr if rexpr_qry is not None: # Handle cases like foo <op> ANY (SELECT) and # foo <OP> (SELECT). rexpr_qry.target_list[0] = pgast.ResTarget( name=rexpr_qry.target_list[0].name, val=pgast.TypeCast(arg=rexpr_qry.target_list[0].val, type_name=pgast.TypeName( name=(expr.sql_operator[2], )))) else: rexpr = pgast.TypeCast(arg=rexpr, type_name=pgast.TypeName( name=(expr.sql_operator[2], ))) elif expr.origin_name is not None: sql_oper = common.get_operator_backend_name(expr.origin_name)[1] else: sql_oper = common.get_operator_backend_name(expr.func_shortname)[1] # If result was not already computed, it's going to be a generic Expr. if result is None: result = pgast.Expr( kind=pgast.ExprKind.OP, name=sql_oper, lexpr=lexpr, rexpr=rexpr, ) if expr.force_return_cast: # The underlying operator has a return value type # different from that of the EdgeQL operator declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref))) return result
def compile_OperatorCall(expr: irast.OperatorCall, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: if (expr.func_shortname == 'std::IF' and expr.args[0].cardinality is ql_ft.Cardinality.ONE and expr.args[2].cardinality is ql_ft.Cardinality.ONE): if_expr, condition, else_expr = (a.expr for a in expr.args) return pgast.CaseExpr(args=[ pgast.CaseWhen(expr=dispatch.compile(condition, ctx=ctx), result=dispatch.compile(if_expr, ctx=ctx)) ], defresult=dispatch.compile(else_expr, ctx=ctx)) if expr.typemod is ql_ft.TypeModifier.SET_OF: raise RuntimeError( f'set returning operator {expr.func_shortname!r} is not supported ' f'in simple expressions') args = [dispatch.compile(a.expr, ctx=ctx) for a in expr.args] lexpr = rexpr = None if expr.operator_kind is ql_ft.OperatorKind.INFIX: lexpr, rexpr = args elif expr.operator_kind is ql_ft.OperatorKind.PREFIX: rexpr = args[0] elif expr.operator_kind is ql_ft.OperatorKind.POSTFIX: lexpr = args[0] else: raise RuntimeError(f'unexpected operator kind: {expr.operator_kind!r}') if (expr.func_shortname == 'std::=' and expr.args[0].expr.typeref is not None and irtyputils.is_object(expr.args[0].expr.typeref) and expr.args[1].expr.typeref is not None and irtyputils.is_object(expr.args[1].expr.typeref)): sql_oper = '=' elif expr.sql_operator: sql_oper = expr.sql_operator[0] if len(expr.sql_operator) > 1: # Explicit operand types given in FROM SQL OPERATOR if lexpr is not None: lexpr = pgast.TypeCast( arg=lexpr, type_name=pgast.TypeName(name=(expr.sql_operator[1], ))) if rexpr is not None: rexpr = pgast.TypeCast( arg=rexpr, type_name=pgast.TypeName(name=(expr.sql_operator[2], ))) else: sql_oper = common.get_operator_backend_name(expr.func_shortname, expr.func_module_id)[1] result: pgast.BaseExpr = pgast.Expr( kind=pgast.ExprKind.OP, name=sql_oper, lexpr=lexpr, rexpr=rexpr, ) if expr.force_return_cast: # The underlying operator has a return value type # different from that of the EdgeQL operator declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref))) return result
def get_ref_storage_info(schema, refs): link_biased = {} objtype_biased = {} ref_ptrs = {} refs = list(refs) for ref in refs: rptr = ref.rptr if rptr is None: source_typeref = ref.typeref if not irtyputils.is_object(source_typeref): continue schema, t = irtyputils.ir_typeref_to_type(schema, ref.typeref) ptr = t.getptr(schema, s_name.UnqualName('id')) else: ptrref = ref.rptr.ptrref schema, ptr = irtyputils.ptrcls_from_ptrref(ptrref, schema=schema) source_typeref = ref.rptr.source.typeref if ptr.is_link_property(schema): srcref = ref.rptr.source.rptr.ptrref schema, src = irtyputils.ptrcls_from_ptrref(srcref, schema=schema) if src.get_is_derived(schema): # This specialized pointer was derived specifically # for the purposes of constraint expr compilation. src = src.get_bases(schema).first(schema) elif ptr.is_tuple_indirection(): refs.append(ref.rptr.source) continue elif ptr.is_type_intersection(): refs.append(ref.rptr.source) continue else: schema, src = irtyputils.ir_typeref_to_type(schema, source_typeref) ref_ptrs[ref] = (ptr, src) for ref, (ptr, src) in ref_ptrs.items(): ptr_info = types.get_pointer_storage_info(ptr, source=src, resolve_type=False, schema=schema) # See if any of the refs are hosted in pointer tables and others # are not... if ptr_info.table_type == 'link': link_biased[ref] = ptr_info else: objtype_biased[ref] = ptr_info if link_biased and objtype_biased: break if link_biased and objtype_biased: for ref in objtype_biased.copy(): ptr, src = ref_ptrs[ref] ptr_info = types.get_pointer_storage_info(ptr, source=src, resolve_type=False, link_bias=True, schema=schema) if ptr_info is not None and ptr_info.table_type == 'link': link_biased[ref] = ptr_info objtype_biased.pop(ref) ref_tables = {} for ref, ptr_info in itertools.chain(objtype_biased.items(), link_biased.items()): ptr, src = ref_ptrs[ref] try: ref_tables[ptr_info.table_name].append((ref, ptr, src, ptr_info)) except KeyError: ref_tables[ptr_info.table_name] = [(ref, ptr, src, ptr_info)] return ref_tables