def get_param_anchors_for_callable( params: s_func.ParameterLikeList, schema: s_schema.Schema, *, inlined_defaults: bool, ) -> Tuple[Dict[str, irast.Parameter], List[qlast.AliasedExpr], ]: anchors = {} aliases = [] if inlined_defaults: anchors['__defaults_mask__'] = irast.Parameter( name='__defaults_mask__', typeref=irtyputils.type_to_typeref( # note: no cache schema, cast(s_scalars.ScalarType, schema.get('std::bytes')), ), ) pg_params = s_func.PgParams.from_params(schema, params) for pi, p in enumerate(pg_params.params): p_shortname = p.get_shortname(schema) anchors[p_shortname] = irast.Parameter( name=p_shortname, typeref=irtyputils.type_to_typeref(schema, p.get_type(schema))) if p.get_default(schema) is None: continue if not inlined_defaults: continue aliases.append( qlast.AliasedExpr( alias=p_shortname, expr=qlast. IfElse(condition=qlast.BinOp(left=qlast.FunctionCall( func=('std', 'bytes_get_bit'), args=[ qlast.Path( steps=[qlast.ObjectRef(name='__defaults_mask__')]), qlast.IntegerConstant(value=str(pi)), ]), right=qlast.IntegerConstant( value='0'), op='='), if_expr=qlast.Path( steps=[qlast.ObjectRef(name=p_shortname)]), else_expr=qlast._Optional( expr=p.get_ql_default(schema))))) return anchors, aliases
def _cast_array_literal(ir_set: irast.Set, orig_stype: s_types.Type, new_stype: s_types.Type, *, srcctx: typing.Optional[parsing.ParserContext], ctx: context.ContextLevel) -> irast.Set: assert isinstance(ir_set.expr, irast.Array) orig_typeref = irtyputils.type_to_typeref(ctx.env.schema, orig_stype) new_typeref = irtyputils.type_to_typeref(ctx.env.schema, new_stype) direct_cast = _find_cast(orig_stype, new_stype, srcctx=srcctx, ctx=ctx) if direct_cast is None: if not new_stype.is_array(): raise errors.QueryError( f'cannot cast {orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}', context=srcctx) from None assert isinstance(new_stype, s_types.Array) el_type = new_stype.get_subtypes(ctx.env.schema)[0] else: el_type = new_stype casted_els = [] for el in ir_set.expr.elements: el = compile_cast(el, el_type, ctx=ctx, srcctx=srcctx) casted_els.append(el) new_array = setgen.ensure_set(irast.Array(elements=casted_els, typeref=orig_typeref), ctx=ctx) if direct_cast is not None: return _cast_to_ir(new_array, direct_cast, orig_stype, new_stype, ctx=ctx) else: cast_ir = irast.TypeCast( expr=new_array, from_type=orig_typeref, to_type=new_typeref, sql_cast=True, ) return setgen.ensure_set(cast_ir, ctx=ctx)
def ql_typeexpr_to_ir_typeref(ql_t: qlast.TypeExpr, *, ctx: context.ContextLevel) -> irast.TypeRef: stype = ql_typeexpr_to_type(ql_t, ctx=ctx) return irtyputils.type_to_typeref(ctx.env.schema, stype, cache=ctx.env.type_ref_cache)
def new_tuple_set(elements: typing.List[irast.TupleElement], *, named: bool, ctx: context.ContextLevel) -> irast.Set: tup = irast.Tuple(elements=elements, named=named) stype = inference.infer_type(tup, env=ctx.env) result_path_id = pathctx.get_expression_path_id(stype, ctx=ctx) final_elems = [] for elem in elements: elem_path_id = pathctx.get_tuple_indirection_path_id( result_path_id, elem.name, get_set_type(elem.val, ctx=ctx), ctx=ctx).strip_weak_namespaces() final_elems.append( irast.TupleElement( name=elem.name, val=elem.val, path_id=elem_path_id, )) typeref = irtyputils.type_to_typeref(ctx.env.schema, stype) final_tup = irast.Tuple(elements=final_elems, named=named, typeref=typeref) return ensure_set(final_tup, path_id=result_path_id, type_override=stype, ctx=ctx)
def _inheritance_cast_to_ir(ir_set: irast.Set, orig_stype: s_types.Type, new_stype: s_types.Type, *, ctx: context.ContextLevel) -> irast.Set: orig_typeref = irtyputils.type_to_typeref(ctx.env.schema, orig_stype) new_typeref = irtyputils.type_to_typeref(ctx.env.schema, new_stype) cast_ir = irast.TypeCast( expr=ir_set, from_type=orig_typeref, to_type=new_typeref, cast_name=None, sql_function=None, sql_cast=True, sql_expr=False, ) return setgen.ensure_set(cast_ir, ctx=ctx)
def new_set(*, stype: s_types.Type, ctx: context.ContextLevel, **kwargs) -> irast.Set: """Create a new ir.Set instance with given attributes. Absolutely all ir.Set instances must be created using this constructor. """ typeref = irtyputils.type_to_typeref(ctx.env.schema, stype) ir_set = irast.Set(typeref=typeref, **kwargs) ctx.env.set_types[ir_set] = stype return ir_set
def _cast_to_ir(ir_set: irast.Set, cast: s_casts.Cast, orig_stype: s_types.Type, new_stype: s_types.Type, *, ctx: context.ContextLevel) -> irast.Set: orig_typeref = irtyputils.type_to_typeref(ctx.env.schema, orig_stype) new_typeref = irtyputils.type_to_typeref(ctx.env.schema, new_stype) cast_name = cast.get_name(ctx.env.schema) cast_ir = irast.TypeCast( expr=ir_set, from_type=orig_typeref, to_type=new_typeref, cast_name=cast_name, cast_module_id=ctx.env.schema.get_global(s_mod.Module, cast_name.module).id, sql_function=cast.get_from_function(ctx.env.schema), sql_cast=cast.get_from_cast(ctx.env.schema), sql_expr=bool(cast.get_code(ctx.env.schema)), ) return setgen.ensure_set(cast_ir, ctx=ctx)
def new_array_set(elements: typing.List[irast.Base], *, ctx: context.ContextLevel) -> irast.Set: arr = irast.Array(elements=elements) if elements: stype = inference.infer_type(arr, env=ctx.env) typeref = irtyputils.type_to_typeref(ctx.env.schema, stype) else: stype = typeref = None arr = irast.Array(elements=elements, typeref=typeref) return ensure_set(arr, type_override=stype, ctx=ctx)
def new_empty_set(*, stype: typing.Optional[s_types.Type]=None, alias: str, ctx: context.ContextLevel, srcctx: typing.Optional[ parsing.ParserContext]=None) -> irast.Set: if stype is None: stype = s_pseudo.Any.create() ctx.env.type_origins[stype] = srcctx typeref = irtyputils.type_to_typeref(ctx.env.schema, stype) path_id = pathctx.get_expression_path_id(stype, alias, ctx=ctx) ir_set = irast.EmptySet(path_id=path_id, typeref=typeref) ctx.env.set_types[ir_set] = stype return ir_set
def get_param_anchors_for_callable(params, schema): anchors = {} aliases = [] anchors['__defaults_mask__'] = irast.Parameter( name='__defaults_mask__', typeref=irtyputils.type_to_typeref(schema, schema.get('std::bytes'))) pg_params = s_func.PgParams.from_params(schema, params) for pi, p in enumerate(pg_params.params): p_shortname = p.get_shortname(schema) anchors[p_shortname] = irast.Parameter( name=p_shortname, typeref=irtyputils.type_to_typeref(schema, p.get_type(schema))) if p.get_default(schema) is None: continue aliases.append( qlast.AliasedExpr( alias=p_shortname, expr=qlast. IfElse(condition=qlast.BinOp(left=qlast.FunctionCall( func=('std', 'bytes_get_bit'), args=[ qlast.Path( steps=[qlast.ObjectRef(name='__defaults_mask__')]), qlast.IntegerConstant(value=str(pi)), ]), right=qlast.IntegerConstant( value='0'), op='='), if_expr=qlast.Path( steps=[qlast.ObjectRef(name=p_shortname)]), else_expr=qlast._Optional( expr=p.get_ql_default(schema))))) return anchors, aliases
def type_to_typeref( t: s_types.Type, env: context.Environment, ) -> irast.TypeRef: schema = env.schema cache = env.type_ref_cache expr_type = t.get_expr_type(env.schema) include_descendants = (expr_type is s_types.ExprType.Update or expr_type is s_types.ExprType.Delete) return irtyputils.type_to_typeref( schema, t, include_descendants=include_descendants, cache=cache, )
def compile_BaseConstant(expr: qlast.BaseConstant, *, ctx: context.ContextLevel) -> irast.Base: value = expr.value node_cls: typing.Type[irast.BaseConstant] if isinstance(expr, qlast.StringConstant): std_type = 'std::str' node_cls = irast.StringConstant elif isinstance(expr, qlast.RawStringConstant): std_type = 'std::str' node_cls = irast.RawStringConstant elif isinstance(expr, qlast.IntegerConstant): int_value = int(expr.value) if expr.is_negative: int_value = -int_value value = f'-{value}' # If integer value is out of int64 bounds, use decimal std_type = 'std::int64' node_cls = irast.IntegerConstant elif isinstance(expr, qlast.FloatConstant): if expr.is_negative: value = f'-{value}' std_type = 'std::float64' node_cls = irast.FloatConstant elif isinstance(expr, qlast.DecimalConstant): assert value[-1] == 'n' value = value[:-1] if expr.is_negative: value = f'-{value}' std_type = 'std::decimal' node_cls = irast.DecimalConstant elif isinstance(expr, qlast.BooleanConstant): std_type = 'std::bool' node_cls = irast.BooleanConstant elif isinstance(expr, qlast.BytesConstant): std_type = 'std::bytes' node_cls = irast.BytesConstant value = ast.literal_eval(f'b{expr.quote}{expr.value}{expr.quote}') else: raise RuntimeError(f'unexpected constant type: {type(expr)}') ct = irtyputils.type_to_typeref( ctx.env.schema, ctx.env.get_track_schema_type(std_type), ) return setgen.ensure_set(node_cls(value=value, typeref=ct), ctx=ctx)
def new_array_set( elements: typing.List[irast.Base], *, ctx: context.ContextLevel, srcctx: typing.Optional[parsing.ParserContext]=None) -> irast.Set: arr = irast.Array(elements=elements) if elements: stype = inference.infer_type(arr, env=ctx.env) else: anytype = s_pseudo.Any.create() stype = s_types.Array.from_subtypes(ctx.env.schema, [anytype]) if srcctx is not None: ctx.env.type_origins[anytype] = srcctx typeref = irtyputils.type_to_typeref(ctx.env.schema, stype) arr = irast.Array(elements=elements, typeref=typeref) return ensure_set(arr, type_override=stype, ctx=ctx)
def compile_Introspect(expr: qlast.Introspect, *, ctx: context.ContextLevel) -> irast.Base: typeref = typegen.ql_typeexpr_to_ir_typeref(expr.type, ctx=ctx) if typeref.material_type and not irtyputils.is_object(typeref): typeref = typeref.material_type if typeref.is_opaque_union: typeref = irtyputils.type_to_typeref(ctx.env.schema, ctx.env.schema.get('std::Object')) if irtyputils.is_view(typeref): raise errors.QueryError(f'cannot introspect views', context=expr.type.context) if irtyputils.is_collection(typeref): raise errors.QueryError(f'cannot introspect collection types', context=expr.type.context) if irtyputils.is_generic(typeref): raise errors.QueryError(f'cannot introspect generic types', context=expr.type.context) return irast.TypeIntrospection(typeref=typeref)
def _ql_typeexpr_to_ir_typeref( ql_t: qlast.TypeExpr, *, ctx: context.ContextLevel) -> typing.List[irast.TypeRef]: if isinstance(ql_t, qlast.TypeOf): with ctx.newscope(fenced=True, temporary=True) as subctx: ir_set = setgen.ensure_set(dispatch.compile(ql_t.expr, ctx=subctx), ctx=subctx) stype = setgen.get_set_type(ir_set, ctx=subctx) return [irtyputils.type_to_typeref(subctx.env.schema, stype)] elif isinstance(ql_t, qlast.TypeOp): if ql_t.op == '|': return (_ql_typeexpr_to_ir_typeref(ql_t.left, ctx=ctx) + _ql_typeexpr_to_ir_typeref(ql_t.right, ctx=ctx)) raise errors.UnsupportedFeatureError( f'type operator {ql_t.op!r} is not implemented', context=ql_t.context) else: return [_ql_typeref_to_ir_typeref(ql_t, ctx=ctx)]
def _ql_typeref_to_ir_typeref(ql_t: qlast.TypeName, *, ctx: context.ContextLevel) -> irast.TypeRef: stype = ql_typeref_to_type(ql_t, ctx=ctx) return irtyputils.type_to_typeref(ctx.env.schema, stype)
def compile_FunctionCall(expr: qlast.FunctionCall, *, ctx: context.ContextLevel) -> irast.Base: env = ctx.env if isinstance(expr.func, str): if (ctx.env.func_params is not None and ctx.env.func_params.get_by_name(env.schema, expr.func)): raise errors.QueryError(f'parameter `{expr.func}` is not callable', context=expr.context) funcname = expr.func else: funcname = sn.Name(expr.func[1], expr.func[0]) funcs = env.schema.get_functions(funcname, module_aliases=ctx.modaliases) if funcs is None: raise errors.QueryError(f'could not resolve function name {funcname}', context=expr.context) in_polymorphic_func = (ctx.env.func_params is not None and ctx.env.func_params.has_polymorphic(env.schema)) in_abstract_constraint = ( in_polymorphic_func and ctx.env.parent_object_type is s_constr.Constraint) args, kwargs = compile_call_args(expr, funcname, ctx=ctx) matched = polyres.find_callable(funcs, args=args, kwargs=kwargs, ctx=ctx) if not matched: raise errors.QueryError( f'could not find a function variant {funcname}', context=expr.context) elif len(matched) > 1: if in_abstract_constraint: matched_call = matched[0] else: raise errors.QueryError(f'function {funcname} is not unique', context=expr.context) else: matched_call = matched[0] func = matched_call.func assert isinstance(func, s_func.Function) func_name = func.get_shortname(env.schema) if not ctx.env.session_mode and func.get_session_only(env.schema): raise errors.QueryError( f'{func_name}() cannot be called in a non-session context', context=expr.context) final_args, params_typemods = finalize_args(matched_call, ctx=ctx) matched_func_params = func.get_params(env.schema) variadic_param = matched_func_params.find_variadic(env.schema) variadic_param_type = None if variadic_param is not None: variadic_param_type = irtyputils.type_to_typeref( env.schema, variadic_param.get_type(env.schema)) matched_func_ret_type = func.get_return_type(env.schema) is_polymorphic = (any( p.get_type(env.schema).is_polymorphic(env.schema) for p in matched_func_params.objects(env.schema)) and matched_func_ret_type.is_polymorphic(env.schema)) matched_func_initial_value = func.get_initial_value(env.schema) if not in_abstract_constraint: # We cannot add strong references to functions from # abstract constraints, since we cannot know which # form of the function is actually used. env.schema_refs.add(func) func_initial_value: typing.Optional[irast.Set] if matched_func_initial_value is not None: iv_ql = qlast.TypeCast( expr=qlparser.parse_fragment(matched_func_initial_value.text), type=typegen.type_to_ql_typeref(matched_call.return_type, ctx=ctx), ) func_initial_value = setgen.ensure_set( dispatch.compile(iv_ql, ctx=ctx), ctx=ctx, ) else: func_initial_value = None rtype = matched_call.return_type path_id = pathctx.get_expression_path_id(rtype, ctx=ctx) if rtype.is_tuple(): rtype = typing.cast(s_types.Tuple, rtype) tuple_path_ids = [] nested_path_ids = [] for n, st in rtype.iter_subtypes(ctx.env.schema): elem_path_id = pathctx.get_tuple_indirection_path_id( path_id, n, st, ctx=ctx).strip_weak_namespaces() if st.is_tuple(): nested_path_ids.append([ pathctx.get_tuple_indirection_path_id( elem_path_id, nn, sst, ctx=ctx).strip_weak_namespaces() for nn, sst in st.iter_subtypes(ctx.env.schema) ]) tuple_path_ids.append(elem_path_id) for nested in nested_path_ids: tuple_path_ids.extend(nested) else: tuple_path_ids = [] fcall = irast.FunctionCall( args=final_args, func_module_id=env.schema.get_global(s_mod.Module, func_name.module).id, func_shortname=func_name, func_polymorphic=is_polymorphic, func_sql_function=func.get_from_function(env.schema), force_return_cast=func.get_force_return_cast(env.schema), session_only=func.get_session_only(env.schema), volatility=func.get_volatility(env.schema), sql_func_has_out_params=func.get_sql_func_has_out_params(env.schema), error_on_null_result=func.get_error_on_null_result(env.schema), params_typemods=params_typemods, context=expr.context, typeref=irtyputils.type_to_typeref(env.schema, rtype), typemod=matched_call.func.get_return_typemod(env.schema), has_empty_variadic=matched_call.has_empty_variadic, variadic_param_type=variadic_param_type, func_initial_value=func_initial_value, tuple_path_ids=tuple_path_ids, ) return setgen.ensure_set(fcall, typehint=rtype, path_id=path_id, ctx=ctx)
def compile_operator(qlexpr: qlast.Base, op_name: str, qlargs: typing.List[qlast.Base], *, ctx: context.ContextLevel) -> irast.Set: env = ctx.env schema = env.schema opers = schema.get_operators(op_name, module_aliases=ctx.modaliases) if opers is None: raise errors.QueryError( f'no operator matches the given name and argument types', context=qlexpr.context) args = [] for ai, qlarg in enumerate(qlargs): with ctx.newscope(fenced=True) as fencectx: # We put on a SET OF fence preemptively in case this is # a SET OF arg, which we don't know yet due to polymorphic # matching. We will remove it if necessary in `finalize_args()`. arg_ir = setgen.ensure_set(dispatch.compile(qlarg, ctx=fencectx), ctx=fencectx) arg_ir = setgen.scoped_set(setgen.ensure_stmt(arg_ir, ctx=fencectx), ctx=fencectx) arg_type = inference.infer_type(arg_ir, ctx.env) if arg_type is None: raise errors.QueryError( f'could not resolve the type of operand ' f'#{ai} of {op_name}', context=qlarg.context) args.append((arg_type, arg_ir)) matched = None # Some 2-operand operators are special when their operands are # arrays or tuples. if len(args) == 2: coll_opers = None # If both of the args are arrays or tuples, potentially # compile the operator for them differently than for other # combinations. if args[0][0].is_tuple() and args[1][0].is_tuple(): # Out of the candidate operators, find the ones that # correspond to tuples. coll_opers = [ op for op in opers if all( param.get_type(schema).is_tuple() for param in op.get_params(schema).objects(schema)) ] elif args[0][0].is_array() and args[1][0].is_array(): # Out of the candidate operators, find the ones that # correspond to arrays. coll_opers = [ op for op in opers if all( param.get_type(schema).is_array() for param in op.get_params(schema).objects(schema)) ] # Proceed only if we have a special case of collection operators. if coll_opers: # Then check if they are recursive (i.e. validation must be # done recursively for the subtypes). We rely on the fact that # it is forbidden to define an operator that has both # recursive and non-recursive versions. if not coll_opers[0].get_recursive(schema): # The operator is non-recursive, so regular processing # is needed. matched = polyres.find_callable(coll_opers, args=args, kwargs={}, ctx=ctx) else: # Ultimately the operator will be the same, regardless of the # specific operand types, as long as it passed validation, so # we just use the first operand type for the purpose of # finding the callable. matched = polyres.find_callable(coll_opers, args=[(args[0][0], args[0][1]), (args[0][0], args[1][1]) ], kwargs={}, ctx=ctx) # Now that we have an operator, we need to validate that it # can be applied to the tuple or array elements. submatched = validate_recursive_operator(opers, args[0], args[1], ctx=ctx) if len(submatched) != 1: # This is an error. We want the error message to # reflect whether no matches were found or too # many, so we preserve the submatches found for # this purpose. matched = submatched # No special handling match was necessary, find a normal match. if matched is None: matched = polyres.find_callable(opers, args=args, kwargs={}, ctx=ctx) in_polymorphic_func = (ctx.env.func_params is not None and ctx.env.func_params.has_polymorphic(env.schema)) in_abstract_constraint = ( in_polymorphic_func and ctx.env.parent_object_type is s_constr.Constraint) if not in_polymorphic_func: matched = [ call for call in matched if not call.func.get_is_abstract(env.schema) ] if len(matched) == 1: matched_call = matched[0] else: if len(args) == 2: ltype = args[0][0].material_type(env.schema) rtype = args[1][0].material_type(env.schema) types = (f'{ltype.get_displayname(env.schema)!r} and ' f'{rtype.get_displayname(env.schema)!r}') else: types = ', '.join( repr(a[0].material_type(env.schema).get_displayname( env.schema)) for a in args) if not matched: hint = ('Consider using an explicit type cast or a conversion ' 'function.') if op_name == 'std::IF': hint = (f"The IF and ELSE result clauses must be of " f"compatible types, while the condition clause must " f"be 'std::bool'. {hint}") raise errors.QueryError( f'operator {str(op_name)!r} cannot be applied to ' f'operands of type {types}', hint=hint, context=qlexpr.context) elif len(matched) > 1: if in_abstract_constraint: matched_call = matched[0] else: detail = ', '.join( f'`{m.func.get_verbosename(ctx.env.schema)}`' for m in matched) raise errors.QueryError( f'operator {str(op_name)!r} is ambiguous for ' f'operands of type {types}', hint=f'Possible variants: {detail}.', context=qlexpr.context) final_args, params_typemods = finalize_args(matched_call, ctx=ctx) oper = matched_call.func assert isinstance(oper, s_oper.Operator) env.schema_refs.add(oper) oper_name = oper.get_shortname(env.schema) matched_params = oper.get_params(env.schema) rtype = matched_call.return_type if oper_name in {'std::UNION', 'std::IF'} and rtype.is_object_type(): # Special case for the UNION and IF operators, instead of common # parent type, we return a union type. if oper_name == 'std::UNION': larg, rarg = (a.expr for a in final_args) else: larg, _, rarg = (a.expr for a in final_args) left_type = setgen.get_set_type(larg, ctx=ctx).material_type(ctx.env.schema) right_type = setgen.get_set_type(rarg, ctx=ctx).material_type(ctx.env.schema) if left_type.issubclass(env.schema, right_type): rtype = right_type elif right_type.issubclass(env.schema, left_type): rtype = left_type else: env.schema, rtype = s_utils.get_union_type(env.schema, [left_type, right_type]) is_polymorphic = (any( p.get_type(env.schema).is_polymorphic(env.schema) for p in matched_params.objects(env.schema)) and oper.get_return_type( env.schema).is_polymorphic(env.schema)) from_op = oper.get_from_operator(env.schema) sql_operator = None if (from_op is not None and oper.get_code(env.schema) is None and oper.get_from_function(env.schema) is None and not in_polymorphic_func): sql_operator = tuple(from_op) node = irast.OperatorCall( args=final_args, func_module_id=env.schema.get_global(s_mod.Module, oper_name.module).id, func_shortname=oper_name, func_polymorphic=is_polymorphic, func_sql_function=oper.get_from_function(env.schema), sql_operator=sql_operator, force_return_cast=oper.get_force_return_cast(env.schema), volatility=oper.get_volatility(env.schema), operator_kind=oper.get_operator_kind(env.schema), params_typemods=params_typemods, context=qlexpr.context, typeref=irtyputils.type_to_typeref(env.schema, rtype), typemod=oper.get_return_typemod(env.schema), ) return setgen.ensure_set(node, typehint=rtype, ctx=ctx)
def compile_func_to_ir(func, schema, *, anchors=None, security_context=None, modaliases=None, implicit_id_in_shapes=False, implicit_tid_in_shapes=False): """Compile an EdgeQL function into EdgeDB IR.""" if debug.flags.edgeql_compile: debug.header('EdgeQL Function') debug.print(func.get_code(schema)) trees = ql_parser.parse_block(func.get_code(schema) + ';') if len(trees) != 1: raise errors.InvalidFunctionDefinitionError( 'functions can only contain one statement') tree = trees[0] if modaliases: ql_parser.append_module_aliases(tree, modaliases) if anchors is None: anchors = {} anchors['__defaults_mask__'] = irast.Parameter( name='__defaults_mask__', typeref=irtyputils.type_to_typeref(schema, schema.get('std::bytes'))) func_params = func.get_params(schema) pg_params = s_func.PgParams.from_params(schema, func_params) for pi, p in enumerate(pg_params.params): p_shortname = p.get_shortname(schema) anchors[p_shortname] = irast.Parameter( name=p_shortname, typeref=irtyputils.type_to_typeref(schema, p.get_type(schema))) if p.get_default(schema) is None: continue tree.aliases.append( qlast.AliasedExpr( alias=p_shortname, expr=qlast.IfElse( condition=qlast.BinOp( left=qlast.FunctionCall( func=('std', 'bytes_get_bit'), args=[ qlast.FuncArg( arg=qlast.Path(steps=[ qlast.ObjectRef( name='__defaults_mask__') ])), qlast.FuncArg( arg=qlast.IntegerConstant(value=str(pi))) ]), right=qlast.IntegerConstant(value='0'), op='='), if_expr=qlast.Path( steps=[qlast.ObjectRef(name=p_shortname)]), else_expr=qlast._Optional(expr=p.get_ql_default(schema))))) ir = compile_ast_to_ir( tree, schema, anchors=anchors, func=func, security_context=security_context, modaliases=modaliases, implicit_id_in_shapes=implicit_id_in_shapes, implicit_tid_in_shapes=implicit_tid_in_shapes) return ir
def try_bind_call_args(args: Sequence[Tuple[s_types.Type, irast.Set]], kwargs: Mapping[str, Tuple[s_types.Type, irast.Set]], func: s_func.CallableLike, *, ctx: context.ContextLevel) -> Optional[BoundCall]: return_type = func.get_return_type(ctx.env.schema) is_abstract = func.get_is_abstract(ctx.env.schema) resolved_poly_base_type: Optional[s_types.Type] = None def _get_cast_distance(arg, arg_type, param_type) -> int: nonlocal resolved_poly_base_type if in_polymorphic_func: # Compiling a body of a polymorphic function. if arg_type.is_polymorphic(schema): if param_type.is_polymorphic(schema): if arg_type.test_polymorphic(schema, param_type): return 0 else: return -1 else: if arg_type.resolve_polymorphic(schema, param_type): return 0 else: return -1 if param_type.is_polymorphic(schema): if not arg_type.test_polymorphic(schema, param_type): return -1 resolved = param_type.resolve_polymorphic(schema, arg_type) if resolved is None: return -1 if resolved_poly_base_type is None: resolved_poly_base_type = resolved if resolved_poly_base_type == resolved: return s_types.MAX_TYPE_DISTANCE if is_abstract else 0 ct = resolved_poly_base_type.find_common_implicitly_castable_type( resolved, ctx.env.schema) if ct is not None: # If we found a common implicitly castable type, we # refine our resolved_poly_base_type to be that as the # more general case. resolved_poly_base_type = ct return s_types.MAX_TYPE_DISTANCE if is_abstract else 0 else: return -1 if arg_type.issubclass(schema, param_type): return 0 return arg_type.get_implicit_cast_distance(param_type, schema) schema = ctx.env.schema in_polymorphic_func = (ctx.env.func_params is not None and ctx.env.func_params.has_polymorphic(schema)) has_empty_variadic = False no_args_call = not args and not kwargs has_inlined_defaults = func.has_inlined_defaults(schema) func_params = func.get_params(schema) if not func_params: if no_args_call: # Match: `func` is a function without parameters # being called with no arguments. bargs: List[BoundArg] = [] if has_inlined_defaults: bytes_t = ctx.env.get_track_schema_type('std::bytes') argval = setgen.ensure_set(irast.BytesConstant( value=b'\x00', typeref=irtyputils.type_to_typeref(schema, bytes_t)), typehint=bytes_t, ctx=ctx) bargs = [BoundArg(None, bytes_t, argval, bytes_t, 0)] return BoundCall(func, bargs, set(), return_type, False) else: # No match: `func` is a function without parameters # being called with some arguments. return None pg_params = s_func.PgParams.from_params(schema, func_params) named_only = func_params.find_named_only(schema) if no_args_call and pg_params.has_param_wo_default: # A call without arguments and there is at least # one parameter without default. return None bound_args_prep: List[Union[MissingArg, BoundArg]] = [] params = pg_params.params nparams = len(params) nargs = len(args) has_missing_args = False ai = 0 pi = 0 matched_kwargs = 0 # Bind NAMED ONLY arguments (they are compiled as first set of arguments). while True: if pi >= nparams: break param = params[pi] if param.get_kind(schema) is not _NAMED_ONLY: break pi += 1 param_shortname = param.get_shortname(schema) param_type = param.get_type(schema) if param_shortname in kwargs: matched_kwargs += 1 arg_type, arg_val = kwargs[param_shortname] cd = _get_cast_distance(arg_val, arg_type, param_type) if cd < 0: return None bound_args_prep.append( BoundArg(param, param_type, arg_val, arg_type, cd)) else: if param.get_default(schema) is None: # required named parameter without default and # without a matching argument return None has_missing_args = True bound_args_prep.append(MissingArg(param, param_type)) if matched_kwargs != len(kwargs): # extra kwargs? return None # Bind POSITIONAL arguments (compiled to go after NAMED ONLY arguments). while True: if ai < nargs: arg_type, arg_val = args[ai] ai += 1 if pi >= nparams: # too many positional arguments return None param = params[pi] param_type = param.get_type(schema) param_kind = param.get_kind(schema) pi += 1 if param_kind is _NAMED_ONLY: # impossible condition raise RuntimeError('unprocessed NAMED ONLY parameter') if param_kind is _VARIADIC: var_type = param.get_type(schema).get_subtypes(schema)[0] cd = _get_cast_distance(arg_val, arg_type, var_type) if cd < 0: return None bound_args_prep.append( BoundArg(param, param_type, arg_val, arg_type, cd)) for arg_type, arg_val in args[ai:]: cd = _get_cast_distance(arg_val, arg_type, var_type) if cd < 0: return None bound_args_prep.append( BoundArg(param, param_type, arg_val, arg_type, cd)) break cd = _get_cast_distance(arg_val, arg_type, param.get_type(schema)) if cd < 0: return None bound_args_prep.append( BoundArg(param, param_type, arg_val, arg_type, cd)) else: break # Handle yet unprocessed POSITIONAL & VARIADIC arguments. for pi in range(pi, nparams): param = params[pi] param_kind = param.get_kind(schema) if param_kind is _POSITIONAL: if param.get_default(schema) is None: # required positional parameter that we don't have a # positional argument for. return None has_missing_args = True param_type = param.get_type(schema) bound_args_prep.append(MissingArg(param, param_type)) elif param_kind is _VARIADIC: has_empty_variadic = True elif param_kind is _NAMED_ONLY: # impossible condition raise RuntimeError('unprocessed NAMED ONLY parameter') # Populate defaults. defaults_mask = 0 null_args: Set[str] = set() bound_param_args: List[BoundArg] = [] if has_missing_args: if has_inlined_defaults or named_only: for i, barg in enumerate(bound_args_prep): if isinstance(barg, BoundArg): bound_param_args.append(barg) continue param = barg.param param_shortname = param.get_shortname(schema) null_args.add(param_shortname) defaults_mask |= 1 << i if not has_inlined_defaults: ql_default = param.get_ql_default(schema) default = dispatch.compile(ql_default, ctx=ctx) empty_default = (has_inlined_defaults or irutils.is_empty(default)) param_type = param.get_type(schema) if empty_default: default_type = None if param_type.is_any(): if resolved_poly_base_type is None: raise errors.QueryError( f'could not resolve "anytype" type for the ' f'${param_shortname} parameter') else: default_type = resolved_poly_base_type else: default_type = param_type else: default_type = param_type if has_inlined_defaults: default = setgen.new_empty_set(stype=default_type, alias=param_shortname, ctx=ctx) default = setgen.ensure_set(default, typehint=default_type, ctx=ctx) bound_param_args.append( BoundArg( param, param_type, default, param_type, 0, )) else: bound_param_args = [ barg for barg in bound_args_prep if isinstance(barg, BoundArg) ] else: bound_param_args = cast(List[BoundArg], bound_args_prep) if has_inlined_defaults: # If we are compiling an EdgeQL function, inject the defaults # bit-mask as a first argument. bytes_t = ctx.env.get_track_schema_type('std::bytes') bm = defaults_mask.to_bytes(nparams // 8 + 1, 'little') bm_set = setgen.ensure_set(irast.BytesConstant( value=bm, typeref=irtyputils.type_to_typeref(ctx.env.schema, bytes_t)), typehint=bytes_t, ctx=ctx) bound_param_args.insert(0, BoundArg(None, bytes_t, bm_set, bytes_t, 0)) if return_type.is_polymorphic(schema): if resolved_poly_base_type is not None: return_type = return_type.to_nonpolymorphic( schema, resolved_poly_base_type) elif not in_polymorphic_func: return None # resolved_poly_base_type may be legitimately None within # bodies of polymorphic functions if resolved_poly_base_type is not None: for i, barg in enumerate(bound_param_args): if barg.param_type.is_polymorphic(schema): bound_param_args[i] = BoundArg( barg.param, barg.param_type.to_nonpolymorphic(schema, resolved_poly_base_type), barg.val, barg.valtype, barg.cast_distance, ) return BoundCall(func, bound_param_args, null_args, return_type, has_empty_variadic)
def compile_TypeCast(expr: qlast.TypeCast, *, ctx: context.ContextLevel) -> irast.Set: target_typeref = typegen.ql_typeexpr_to_ir_typeref(expr.type, ctx=ctx) ir_expr: irast.Base if (isinstance(expr.expr, qlast.Array) and not expr.expr.elements and irtyputils.is_array(target_typeref)): ir_expr = irast.Array() elif isinstance(expr.expr, qlast.Parameter): pt = typegen.ql_typeexpr_to_type(expr.type, ctx=ctx) param_name = expr.expr.name if param_name not in ctx.env.query_parameters: if ctx.env.query_parameters: first_key: str = next(iter(ctx.env.query_parameters)) if first_key.isdecimal(): if not param_name.isdecimal(): raise errors.QueryError( f'cannot combine positional and named parameters ' f'in the same query', context=expr.expr.context) else: if param_name.isdecimal(): raise errors.QueryError(f'expected a named argument', context=expr.expr.context) ctx.env.query_parameters[param_name] = pt else: param_first_type = ctx.env.query_parameters[param_name] if not param_first_type.explicitly_castable_to(pt, ctx.env.schema): raise errors.QueryError( f'cannot cast ' f'{param_first_type.get_displayname(ctx.env.schema)} to ' f'{pt.get_displayname(ctx.env.schema)}', context=expr.expr.context) if ctx.env.json_parameters: if param_name.isdecimal(): raise errors.QueryError( 'queries compiled to accept JSON parameters do not ' 'accept positional parameters', context=expr.expr.context) json_typeref = irtyputils.type_to_typeref( ctx.env.schema, ctx.env.get_track_schema_type('std::json')) param = cast.compile_cast( irast.Parameter( typeref=json_typeref, name=param_name, context=expr.expr.context, ), pt, srcctx=expr.expr.context, ctx=ctx, ) else: param = setgen.ensure_set( irast.Parameter( typeref=irtyputils.type_to_typeref(ctx.env.schema, pt), name=param_name, context=expr.expr.context, ), ctx=ctx, ) return param else: with ctx.new() as subctx: # We use "exposed" mode in case this is a type of a cast # that wants view shapes, e.g. a std::json cast. We do # this wholesale to support tuple and array casts without # having to analyze the target type (which is cumbersome # in QL AST). subctx.expr_exposed = True ir_expr = dispatch.compile(expr.expr, ctx=subctx) new_stype = typegen.ql_typeexpr_to_type(expr.type, ctx=ctx) return cast.compile_cast(ir_expr, new_stype, ctx=ctx, srcctx=expr.expr.context)
def compile_FunctionCall( expr: qlast.Base, *, ctx: context.ContextLevel) -> irast.Base: env = ctx.env if isinstance(expr.func, str): if ctx.func is not None: ctx_func_params = ctx.func.get_params(env.schema) if ctx_func_params.get_by_name(env.schema, expr.func): raise errors.QueryError( f'parameter `{expr.func}` is not callable', context=expr.context) funcname = expr.func else: funcname = sn.Name(expr.func[1], expr.func[0]) funcs = env.schema.get_functions(funcname, module_aliases=ctx.modaliases) if funcs is None: raise errors.QueryError( f'could not resolve function name {funcname}', context=expr.context) args, kwargs = compile_call_args(expr, funcname, ctx=ctx) matched = polyres.find_callable(funcs, args=args, kwargs=kwargs, ctx=ctx) if not matched: raise errors.QueryError( f'could not find a function variant {funcname}', context=expr.context) elif len(matched) > 1: raise errors.QueryError( f'function {funcname} is not unique', context=expr.context) else: matched_call = matched[0] args, params_typemods = finalize_args(matched_call, ctx=ctx) matched_func_params = matched_call.func.get_params(env.schema) variadic_param = matched_func_params.find_variadic(env.schema) variadic_param_type = None if variadic_param is not None: variadic_param_type = irtyputils.type_to_typeref( env.schema, variadic_param.get_type(env.schema)) matched_func_ret_type = matched_call.func.get_return_type(env.schema) is_polymorphic = ( any(p.get_type(env.schema).is_polymorphic(env.schema) for p in matched_func_params.objects(env.schema)) and matched_func_ret_type.is_polymorphic(env.schema) ) matched_func_initial_value = matched_call.func.get_initial_value( env.schema) func = matched_call.func func_name = func.get_shortname(env.schema) if matched_func_initial_value is not None: iv_ql = qlast.TypeCast( expr=qlparser.parse_fragment(matched_func_initial_value.text), type=typegen.type_to_ql_typeref(matched_call.return_type, ctx=ctx), ) func_initial_value = dispatch.compile(iv_ql, ctx=ctx) else: func_initial_value = None rtype = matched_call.return_type path_id = pathctx.get_expression_path_id(rtype, ctx=ctx) if rtype.is_tuple(): tuple_path_ids = [] nested_path_ids = [] for n, st in rtype.iter_subtypes(ctx.env.schema): elem_path_id = pathctx.get_tuple_indirection_path_id( path_id, n, st, ctx=ctx).strip_weak_namespaces() if st.is_tuple(): nested_path_ids.append([ pathctx.get_tuple_indirection_path_id( elem_path_id, nn, sst, ctx=ctx).strip_weak_namespaces() for nn, sst in st.iter_subtypes(ctx.env.schema) ]) tuple_path_ids.append(elem_path_id) for nested in nested_path_ids: tuple_path_ids.extend(nested) else: tuple_path_ids = None fcall = irast.FunctionCall( args=args, func_module_id=env.schema.get_global( s_mod.Module, func_name.module).id, func_shortname=func_name, func_polymorphic=is_polymorphic, func_sql_function=func.get_from_function(env.schema), force_return_cast=func.get_force_return_cast(env.schema), sql_func_has_out_params=func.get_sql_func_has_out_params(env.schema), error_on_null_result=func.get_error_on_null_result(env.schema), params_typemods=params_typemods, context=expr.context, typeref=irtyputils.type_to_typeref(env.schema, rtype), typemod=matched_call.func.get_return_typemod(env.schema), has_empty_variadic=matched_call.has_empty_variadic, variadic_param_type=variadic_param_type, func_initial_value=func_initial_value, tuple_path_ids=tuple_path_ids, ) return setgen.ensure_set(fcall, typehint=rtype, path_id=path_id, ctx=ctx)
def type_to_typeref(t: s_types.Type, env: context.Environment) -> irast.TypeRef: schema = env.schema cache = env.type_ref_cache return irtyputils.type_to_typeref(schema, t, cache=cache)
def compile_cast(ir_expr: irast.Base, new_stype: s_types.Type, *, srcctx: parsing.ParserContext, ctx: context.ContextLevel) -> irast.OperatorCall: if isinstance(ir_expr, irast.EmptySet): # For the common case of casting an empty set, we simply # generate a new EmptySet node of the requested type. return setgen.new_empty_set( stype=new_stype, alias=ir_expr.path_id.target_name_hint.name, ctx=ctx, srcctx=ir_expr.context) elif irutils.is_untyped_empty_array_expr(ir_expr): # Ditto for empty arrays. new_typeref = irtyputils.type_to_typeref(ctx.env.schema, new_stype) return setgen.ensure_set(irast.Array(elements=[], typeref=new_typeref), ctx=ctx) ir_set = setgen.ensure_set(ir_expr, ctx=ctx) orig_stype = setgen.get_set_type(ir_set, ctx=ctx) if orig_stype == new_stype: return ir_set elif orig_stype.is_object_type() and new_stype.is_object_type(): # Object types cannot be cast between themselves, # as cast is a _constructor_ operation, and the only # valid way to construct an object is to INSERT it. raise errors.QueryError( f'cannot cast object type ' f'{orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}, use ' f'`...[IS {new_stype.get_displayname(ctx.env.schema)}]` instead', context=srcctx) if isinstance(ir_set.expr, irast.Array): return _cast_array_literal(ir_set, orig_stype, new_stype, srcctx=srcctx, ctx=ctx) elif orig_stype.is_tuple(): return _cast_tuple(ir_set, orig_stype, new_stype, srcctx=srcctx, ctx=ctx) elif orig_stype.issubclass(ctx.env.schema, new_stype): # The new type is a supertype of the old type, # and is always a wider domain, so we simply reassign # the stype. return _inheritance_cast_to_ir(ir_set, orig_stype, new_stype, ctx=ctx) elif new_stype.issubclass(ctx.env.schema, orig_stype): # The new type is a subtype, so may potentially have # a more restrictive domain, generate a cast call. return _inheritance_cast_to_ir(ir_set, orig_stype, new_stype, ctx=ctx) elif orig_stype.is_array(): return _cast_array(ir_set, orig_stype, new_stype, srcctx=srcctx, ctx=ctx) else: json_t = ctx.env.get_track_schema_object('std::json') if (new_stype.issubclass(ctx.env.schema, json_t) and ir_set.path_id.is_objtype_path()): # JSON casts of objects are special: we want the full shape # and not just an identity. with ctx.new() as subctx: subctx.implicit_id_in_shapes = False subctx.implicit_tid_in_shapes = False viewgen.compile_view_shapes(ir_set, ctx=subctx) return _compile_cast(ir_expr, orig_stype, new_stype, srcctx=srcctx, ctx=ctx)