def compile_FunctionCall( expr: qlast.FunctionCall, *, ctx: context.ContextLevel) -> irast.Set: env = ctx.env if isinstance(expr.func, str): if ( ctx.env.options.func_params is not None and ctx.env.options.func_params.get_by_name( env.schema, expr.func ) ): raise errors.QueryError( f'parameter `{expr.func}` is not callable', context=expr.context) funcname = expr.func else: funcname = sn.Name(expr.func[1], expr.func[0]) funcs = env.schema.get_functions(funcname, module_aliases=ctx.modaliases) if funcs is None: raise errors.QueryError( f'could not resolve function name {funcname}', context=expr.context) in_polymorphic_func = ( ctx.env.options.func_params is not None and ctx.env.options.func_params.has_polymorphic(env.schema) ) in_abstract_constraint = ( in_polymorphic_func and ctx.env.options.schema_object_context is s_constr.Constraint ) args, kwargs = compile_call_args(expr, funcname, ctx=ctx) matched = polyres.find_callable(funcs, args=args, kwargs=kwargs, ctx=ctx) if not matched: raise errors.QueryError( f'could not find a function variant {funcname}', context=expr.context) elif len(matched) > 1: if in_abstract_constraint: matched_call = matched[0] else: raise errors.QueryError( f'function {funcname} is not unique', context=expr.context) else: matched_call = matched[0] func = matched_call.func assert isinstance(func, s_func.Function) func_name = func.get_shortname(env.schema) if not ctx.env.options.session_mode and func.get_session_only(env.schema): raise errors.QueryError( f'{func_name}() cannot be called in a non-session context', context=expr.context) matched_func_params = func.get_params(env.schema) variadic_param = matched_func_params.find_variadic(env.schema) variadic_param_type = None if variadic_param is not None: variadic_param_type = typegen.type_to_typeref( variadic_param.get_type(env.schema), env=env, ) matched_func_ret_type = func.get_return_type(env.schema) is_polymorphic = ( any(p.get_type(env.schema).is_polymorphic(env.schema) for p in matched_func_params.objects(env.schema)) and matched_func_ret_type.is_polymorphic(env.schema) ) matched_func_initial_value = func.get_initial_value(env.schema) final_args, params_typemods = finalize_args( matched_call, is_polymorphic=is_polymorphic, ctx=ctx, ) if not in_abstract_constraint: # We cannot add strong references to functions from # abstract constraints, since we cannot know which # form of the function is actually used. env.schema_refs.add(func) func_initial_value: Optional[irast.Set] if matched_func_initial_value is not None: iv_ql = qlast.TypeCast( expr=qlparser.parse_fragment(matched_func_initial_value.text), type=typegen.type_to_ql_typeref(matched_call.return_type, ctx=ctx), ) func_initial_value = setgen.ensure_set( dispatch.compile(iv_ql, ctx=ctx), ctx=ctx, ) else: func_initial_value = None rtype = matched_call.return_type path_id = pathctx.get_expression_path_id(rtype, ctx=ctx) if rtype.is_tuple(env.schema): rtype = cast(s_types.Tuple, rtype) tuple_path_ids = [] nested_path_ids = [] for n, st in rtype.iter_subtypes(ctx.env.schema): elem_path_id = pathctx.get_tuple_indirection_path_id( path_id, n, st, ctx=ctx).strip_weak_namespaces() if isinstance(st, s_types.Tuple): nested_path_ids.append([ pathctx.get_tuple_indirection_path_id( elem_path_id, nn, sst, ctx=ctx).strip_weak_namespaces() for nn, sst in st.iter_subtypes(ctx.env.schema) ]) tuple_path_ids.append(elem_path_id) for nested in nested_path_ids: tuple_path_ids.extend(nested) else: tuple_path_ids = [] fcall = irast.FunctionCall( args=final_args, func_module_id=env.schema.get_global( s_mod.Module, func_name.module).id, func_shortname=func_name, func_polymorphic=is_polymorphic, func_sql_function=func.get_from_function(env.schema), force_return_cast=func.get_force_return_cast(env.schema), session_only=func.get_session_only(env.schema), volatility=func.get_volatility(env.schema), sql_func_has_out_params=func.get_sql_func_has_out_params(env.schema), error_on_null_result=func.get_error_on_null_result(env.schema), params_typemods=params_typemods, context=expr.context, typeref=typegen.type_to_typeref( rtype, env=env, ), typemod=matched_call.func.get_return_typemod(env.schema), has_empty_variadic=matched_call.has_empty_variadic, variadic_param_type=variadic_param_type, func_initial_value=func_initial_value, tuple_path_ids=tuple_path_ids, ) return setgen.ensure_set(fcall, typehint=rtype, path_id=path_id, ctx=ctx)
def compile_operator( qlexpr: qlast.Base, op_name: str, qlargs: typing.List[qlast.Base], *, ctx: context.ContextLevel) -> irast.OperatorCall: env = ctx.env schema = env.schema opers = schema.get_operators(op_name, module_aliases=ctx.modaliases) if opers is None: raise errors.QueryError( f'no operator matches the given name and argument types', context=qlexpr.context) args = [] for ai, qlarg in enumerate(qlargs): with ctx.newscope(fenced=True) as fencectx: # We put on a SET OF fence preemptively in case this is # a SET OF arg, which we don't know yet due to polymorphic # matching. We will remove it if necessary in `finalize_args()`. arg_ir = setgen.ensure_set( dispatch.compile(qlarg, ctx=fencectx), ctx=fencectx) arg_ir = setgen.scoped_set( setgen.ensure_stmt(arg_ir, ctx=fencectx), ctx=fencectx) arg_type = inference.infer_type(arg_ir, ctx.env) if arg_type is None: raise errors.QueryError( f'could not resolve the type of operand ' f'#{ai} of {op_name}', context=qlarg.context) args.append((arg_type, arg_ir)) matched = None # Some 2-operand operators are special when their operands are # arrays or tuples. if len(args) == 2: coll_opers = None # If both of the args are arrays or tuples, potentially # compile the operator for them differently than for other # combinations. if args[0][0].is_tuple() and args[1][0].is_tuple(): # Out of the candidate operators, find the ones that # correspond to tuples. coll_opers = [op for op in opers if all(param.get_type(schema).is_tuple() for param in op.get_params(schema).objects(schema))] elif args[0][0].is_array() and args[1][0].is_array(): # Out of the candidate operators, find the ones that # correspond to arrays. coll_opers = [op for op in opers if all(param.get_type(schema).is_array() for param in op.get_params(schema).objects(schema))] # Proceed only if we have a special case of collection operators. if coll_opers: # Then check if they are recursive (i.e. validation must be # done recursively for the subtypes). We rely on the fact that # it is forbidden to define an operator that has both # recursive and non-recursive versions. if not coll_opers[0].get_recursive(schema): # The operator is non-recursive, so regular processing # is needed. matched = polyres.find_callable( coll_opers, args=args, kwargs={}, ctx=ctx) else: # Ultimately the operator will be the same, regardless of the # specific operand types, as long as it passed validation, so # we just use the first operand type for the purpose of # finding the callable. matched = polyres.find_callable( coll_opers, args=[(args[0][0], args[0][1]), (args[0][0], args[1][1])], kwargs={}, ctx=ctx) # Now that we have an operator, we need to validate that it # can be applied to the tuple or array elements. submatched = validate_recursive_operator( opers, args[0], args[1], ctx=ctx) if len(submatched) != 1: # This is an error. We want the error message to # reflect whether no matches were found or too # many, so we preserve the submatches found for # this purpose. matched = submatched # No special handling match was necessary, find a normal match. if matched is None: matched = polyres.find_callable(opers, args=args, kwargs={}, ctx=ctx) if len(matched) == 1: matched_call = matched[0] else: if len(args) == 2: ltype = args[0][0].material_type(env.schema) rtype = args[1][0].material_type(env.schema) types = ( f'{ltype.get_displayname(env.schema)!r} and ' f'{rtype.get_displayname(env.schema)!r}') else: types = ', '.join( repr( a[0].material_type(env.schema).get_displayname(env.schema) ) for a in args ) if not matched: raise errors.QueryError( f'operator {str(op_name)!r} cannot be applied to ' f'operands of type {types}', context=qlexpr.context) elif len(matched) > 1: detail = ', '.join( f'`{m.func.get_display_signature(ctx.env.schema)}`' for m in matched ) raise errors.QueryError( f'operator {str(op_name)!r} is ambiguous for ' f'operands of type {types}', hint=f'Possible variants: {detail}.', context=qlexpr.context) args, params_typemods = finalize_args(matched_call, ctx=ctx) oper = matched_call.func oper_name = oper.get_shortname(env.schema) matched_params = oper.get_params(env.schema) rtype = matched_call.return_type if oper_name in {'std::UNION', 'std::IF'} and rtype.is_object_type(): # Special case for the UNION and IF operators, instead of common # parent type, we return a union type. if oper_name == 'std::UNION': larg, rarg = (a.expr for a in args) else: larg, rarg = (a.expr for a in args[1:]) left_type = setgen.get_set_type(larg, ctx=ctx).material_type( ctx.env.schema) right_type = setgen.get_set_type(rarg, ctx=ctx).material_type( ctx.env.schema) if left_type.issubclass(env.schema, right_type): rtype = right_type elif right_type.issubclass(env.schema, left_type): rtype = left_type else: env.schema, rtype = s_inh.create_virtual_parent( env.schema, [left_type, right_type]) is_polymorphic = ( any(p.get_type(env.schema).is_polymorphic(env.schema) for p in matched_params.objects(env.schema)) and oper.get_return_type(env.schema).is_polymorphic(env.schema) ) in_polymorphic_func = ( ctx.func is not None and ctx.func.get_params(env.schema).has_polymorphic(env.schema) ) from_op = oper.get_from_operator(env.schema) if (from_op is not None and oper.get_code(env.schema) is None and oper.get_from_function(env.schema) is None and not in_polymorphic_func): sql_operator = tuple(from_op) else: sql_operator = None node = irast.OperatorCall( args=args, func_module_id=env.schema.get_global( s_mod.Module, oper_name.module).id, func_shortname=oper_name, func_polymorphic=is_polymorphic, func_sql_function=oper.get_from_function(env.schema), sql_operator=sql_operator, force_return_cast=oper.get_force_return_cast(env.schema), operator_kind=oper.get_operator_kind(env.schema), params_typemods=params_typemods, context=qlexpr.context, typeref=irtyputils.type_to_typeref(env.schema, rtype), typemod=oper.get_return_typemod(env.schema), ) return setgen.ensure_set(node, typehint=rtype, ctx=ctx)
def compile_ForQuery(qlstmt: qlast.ForQuery, *, ctx: context.ContextLevel) -> irast.Set: with ctx.subquery() as sctx: stmt = irast.SelectStmt() init_stmt(stmt, qlstmt, ctx=sctx, parent_ctx=ctx) with sctx.newscope(fenced=True) as scopectx: iterator_ctx = None if (ctx.expr_exposed and ctx.iterator_ctx is not None and ctx.iterator_ctx is not sctx): iterator_ctx = ctx.iterator_ctx if iterator_ctx is not None: iterator_scope_parent = iterator_ctx.path_scope path_id_ns = iterator_ctx.path_id_namespace else: iterator_scope_parent = sctx.path_scope path_id_ns = sctx.path_id_namespace iterator = qlstmt.iterator if isinstance(iterator, qlast.Set) and len(iterator.elements) == 1: iterator = iterator.elements[0] iterator_view = stmtctx.declare_view(iterator, qlstmt.iterator_alias, path_id_namespace=path_id_ns, ctx=scopectx) iterator_stmt = setgen.new_set_from_set(iterator_view, preserve_scope_ns=True, ctx=scopectx) iterator_type = setgen.get_set_type(iterator_stmt, ctx=ctx) anytype = iterator_type.find_any(ctx.env.schema) if anytype is not None: raise errors.QueryError( 'FOR statement has iterator of indeterminate type', context=ctx.env.type_origins.get(anytype), ) if iterator_ctx is not None and iterator_ctx.stmt is not None: iterator_ctx.stmt.hoisted_iterators.append(iterator_stmt) stmt.iterator_stmt = iterator_stmt view_scope_info = scopectx.path_scope_map[iterator_view] iterator_scope = view_scope_info.path_scope for cb in view_scope_info.tentative_work: stmtctx.at_stmt_fini(cb, ctx=ctx) view_scope_info.tentative_work[:] = [] pathctx.register_set_in_scope( iterator_stmt, path_scope=iterator_scope_parent, ctx=sctx, ) # Iterator symbol is, by construction, outside of the scope # of the UNION argument, but is perfectly legal to be referenced # inside a factoring fence that is an immediate child of this # scope. iterator_scope_parent.factoring_allowlist.add( stmt.iterator_stmt.path_id) node = iterator_scope_parent.find_descendant(iterator_stmt.path_id) if node is not None: node.attach_subtree(iterator_scope) stmt.result = compile_result_clause( qlstmt.result, view_scls=ctx.view_scls, view_rptr=ctx.view_rptr, result_alias=qlstmt.result_alias, view_name=ctx.toplevel_result_view_name, forward_rptr=True, ctx=sctx) if ((ctx.expr_exposed or sctx.stmt is ctx.toplevel_stmt) and ctx.implicit_limit): stmt.limit = setgen.ensure_set( dispatch.compile( qlast.IntegerConstant(value=str(ctx.implicit_limit)), ctx=sctx, ), ctx=sctx, ) result = fini_stmt(stmt, qlstmt, ctx=sctx, parent_ctx=ctx) return result
def compile_path(expr: qlast.Path, *, ctx: context.ContextLevel) -> irast.Set: """Create an ir.Set representing the given EdgeQL path expression.""" anchors = ctx.anchors path_tip = None if expr.partial: if ctx.partial_path_prefix is not None: path_tip = ctx.partial_path_prefix else: raise errors.QueryError('could not resolve partial path ', context=expr.context) extra_scopes = {} computables = [] path_sets = [] for i, step in enumerate(expr.steps): if isinstance(step, qlast.Source): # 'self' can only appear as the starting path label # syntactically and is a known anchor try: path_tip = anchors[step.__class__] except KeyError: path_tip = anchors['__source__'] elif isinstance(step, qlast.Subject): # '__subject__' can only appear as the starting path label # syntactically and is a known anchor try: path_tip = anchors[step.__class__] except KeyError: path_tip = anchors['__subject__'] elif isinstance(step, qlast.ObjectRef): if i > 0: # pragma: no cover raise RuntimeError( 'unexpected ObjectRef as a non-first path item') refnode = None if not step.module and step.name not in ctx.aliased_views: # Check if the starting path label is a known anchor refnode = anchors.get(step.name) if refnode is not None: path_tip = new_set_from_set(refnode, preserve_scope_ns=True, ctx=ctx) else: stype = schemactx.get_schema_type( step, item_types=(s_objtypes.ObjectType, ), ctx=ctx) if (stype.get_view_type(ctx.env.schema) is not None and stype.get_name(ctx.env.schema) not in ctx.view_nodes): # This is a schema-level view, as opposed to # a WITH-block or inline alias view. stype = stmtctx.declare_view_from_schema(stype, ctx=ctx) view_set = ctx.view_sets.get(stype) if view_set is not None: path_tip = new_set_from_set(view_set, ctx=ctx) path_scope = ctx.path_scope_map.get(view_set) extra_scopes[path_tip] = path_scope.copy() else: path_tip = class_set(stype, ctx=ctx) view_scls = ctx.class_view_overrides.get(stype.id) if (view_scls is not None and view_scls != get_set_type(path_tip, ctx=ctx)): path_tip = ensure_set(path_tip, type_override=view_scls, ctx=ctx) elif isinstance(step, qlast.Ptr): # Pointer traversal step ptr_expr = step direction = (ptr_expr.direction or s_pointers.PointerDirection.Outbound) ptr_name = ptr_expr.ptr.name if ptr_expr.type == 'property': # Link property reference; the source is the # link immediately preceding this step in the path. source = irtyputils.ptrcls_from_ptrref(path_tip.rptr.ptrref, schema=ctx.env.schema) else: source = get_set_type(path_tip, ctx=ctx) with ctx.newscope(fenced=True, temporary=True) as subctx: if isinstance(source, s_abc.Tuple): path_tip = tuple_indirection_set( path_tip, source=source, ptr_name=ptr_name, source_context=step.context, ctx=subctx) else: path_tip = ptr_step_set(path_tip, source=source, ptr_name=ptr_name, direction=direction, ignore_computable=True, source_context=step.context, ctx=subctx) ptrcls = irtyputils.ptrcls_from_ptrref( path_tip.rptr.ptrref, schema=ctx.env.schema) if _is_computable_ptr(ptrcls, ctx=ctx): computables.append(path_tip) elif isinstance(step, qlast.TypeIndirection): arg_type = inference.infer_type(path_tip, ctx.env) if not isinstance(arg_type, s_objtypes.ObjectType): raise errors.QueryError( f'invalid type filter operand: ' f'{arg_type.get_displayname(ctx.env.schema)} ' f'is not an object type', context=step.context) typ = schemactx.get_schema_type(step.type.maintype, ctx=ctx) if not isinstance(typ, s_objtypes.ObjectType): raise errors.QueryError( f'invalid type filter operand: ' f'{typ.get_displayname(ctx.env.schema)} is not ' f'an object type', context=step.type.context) # The expression already of the desired type, elide # the indirection. if arg_type != typ: path_tip = class_indirection_set(path_tip, typ, optional=False, ctx=ctx) else: # Arbitrary expression if i > 0: # pragma: no cover raise RuntimeError( 'unexpected expression as a non-first path item') with ctx.newscope(fenced=True, temporary=True) as subctx: path_tip = ensure_set(dispatch.compile(step, ctx=subctx), ctx=subctx) if path_tip.path_id.is_type_indirection_path(): scope_set = path_tip.rptr.source else: scope_set = path_tip extra_scopes[scope_set] = subctx.path_scope for key_path_id in path_tip.path_id.iter_weak_namespace_prefixes(): mapped = ctx.view_map.get(key_path_id) if mapped is not None: path_tip = new_set(path_id=mapped.path_id, stype=get_set_type(path_tip, ctx=ctx), expr=mapped.expr, rptr=mapped.rptr, ctx=ctx) break if pathctx.path_is_banned(path_tip.path_id, ctx=ctx): dname = stype.get_displayname(ctx.env.schema) raise errors.QueryError( f'invalid reference to {dname}: ' f'self-referencing INSERTs are not allowed', hint=(f'Use DETACHED if you meant to refer to an ' f'uncorrelated {dname} set'), context=step.context, ) path_sets.append(path_tip) path_tip.context = expr.context pathctx.register_set_in_scope(path_tip, ctx=ctx) for ir_set in computables: scope = ctx.path_scope.find_descendant(ir_set.path_id) if scope is None: # The path is already in the scope, no point # in recompiling the computable expression. continue with ctx.new() as subctx: subctx.path_scope = scope comp_ir_set = computable_ptr_set(ir_set.rptr, ctx=subctx) i = path_sets.index(ir_set) if i != len(path_sets) - 1: path_sets[i + 1].rptr.source = comp_ir_set else: path_tip = comp_ir_set path_sets[i] = comp_ir_set for ir_set, scope in extra_scopes.items(): node = ctx.path_scope.find_descendant(ir_set.path_id) if node is None: # The path portion not being a descendant means # that is is already present in the scope above us, # along with the view scope. continue fuse_scope_branch(ir_set, node, scope, ctx=ctx) if ir_set.path_scope_id is None: pathctx.assign_set_scope(ir_set, node, ctx=ctx) return path_tip
def compile_TypeCast(expr: qlast.TypeCast, *, ctx: context.ContextLevel) -> irast.Set: target_typeref = typegen.ql_typeexpr_to_ir_typeref(expr.type, ctx=ctx) ir_expr: irast.Base if (isinstance(expr.expr, qlast.Array) and not expr.expr.elements and irtyputils.is_array(target_typeref)): ir_expr = irast.Array() elif isinstance(expr.expr, qlast.Parameter): pt = typegen.ql_typeexpr_to_type(expr.type, ctx=ctx) param_name = expr.expr.name if param_name not in ctx.env.query_parameters: if ctx.env.query_parameters: first_key: str = next(iter(ctx.env.query_parameters)) if first_key.isdecimal(): if not param_name.isdecimal(): raise errors.QueryError( f'cannot combine positional and named parameters ' f'in the same query', context=expr.expr.context) else: if param_name.isdecimal(): raise errors.QueryError(f'expected a named argument', context=expr.expr.context) ctx.env.query_parameters[param_name] = pt else: param_first_type = ctx.env.query_parameters[param_name] if not param_first_type.explicitly_castable_to(pt, ctx.env.schema): raise errors.QueryError( f'cannot cast ' f'{param_first_type.get_displayname(ctx.env.schema)} to ' f'{pt.get_displayname(ctx.env.schema)}', context=expr.expr.context) if ctx.env.json_parameters: if param_name.isdecimal(): raise errors.QueryError( 'queries compiled to accept JSON parameters do not ' 'accept positional parameters', context=expr.expr.context) json_typeref = irtyputils.type_to_typeref( ctx.env.schema, ctx.env.get_track_schema_type('std::json')) param = cast.compile_cast( irast.Parameter( typeref=json_typeref, name=param_name, context=expr.expr.context, ), pt, srcctx=expr.expr.context, ctx=ctx, ) else: param = setgen.ensure_set( irast.Parameter( typeref=irtyputils.type_to_typeref(ctx.env.schema, pt), name=param_name, context=expr.expr.context, ), ctx=ctx, ) return param else: with ctx.new() as subctx: # We use "exposed" mode in case this is a type of a cast # that wants view shapes, e.g. a std::json cast. We do # this wholesale to support tuple and array casts without # having to analyze the target type (which is cumbersome # in QL AST). subctx.expr_exposed = True ir_expr = dispatch.compile(expr.expr, ctx=subctx) new_stype = typegen.ql_typeexpr_to_type(expr.type, ctx=ctx) return cast.compile_cast(ir_expr, new_stype, ctx=ctx, srcctx=expr.expr.context)
def _compile_ql_config_op(self, ctx: CompileContext, ql: qlast.Base): current_tx = ctx.state.current_tx() schema = current_tx.get_schema() modaliases = ctx.state.current_tx().get_modaliases() session_config = ctx.state.current_tx().get_session_config() if ql.system and not current_tx.is_implicit(): raise errors.QueryError('CONFIGURE SYSTEM cannot be executed in a ' 'transaction block') ir = ql_compiler.compile_ast_to_ir( ql, schema=schema, modaliases=modaliases, ) is_backend_setting = bool(getattr(ir, 'backend_setting', None)) requires_restart = bool(getattr(ir, 'requires_restart', False)) if is_backend_setting: if isinstance(ql, qlast.ConfigReset): val = None else: # Postgres is fine with all setting types to be passed # as strings. value = ireval.evaluate_to_python_val(ir.expr, schema=schema) val = pg_ast.StringConstant(val=str(value)) if ir.system: sql_ast = pg_ast.AlterSystem( name=ir.backend_setting, value=val, ) else: sql_ast = pg_ast.Set( name=ir.backend_setting, value=val, ) sql_text = pg_codegen.generate_source(sql_ast) + ';' sql = (sql_text.encode(), ) else: sql_text, _ = pg_compiler.compile_ir_to_sql( ir, pretty=debug.flags.edgeql_compile, output_format=pg_compiler.OutputFormat.JSONB) sql = (sql_text.encode(), ) if not ql.system: config_op = ireval.evaluate_to_config_op(ir, schema=schema) session_config = config_op.apply(config.get_settings(), session_config) ctx.state.current_tx().update_session_config(session_config) else: config_op = None return dbstate.SessionStateQuery( sql=sql, is_backend_setting=is_backend_setting, is_system_setting=ql.system, requires_restart=requires_restart, config_op=config_op, )
def compile_ast_to_ir(tree, schema, *, parent_object_type=None, anchors=None, path_prefix_anchor=None, singletons=None, func_params=None, security_context=None, derived_target_module=None, result_view_name=None, modaliases=None, implicit_id_in_shapes=False, implicit_tid_in_shapes=False, schema_view_mode=False, disable_constant_folding=False, json_parameters=False, session_mode=False, allow_abstract_operators=False, allow_generic_type_output=False): """Compile given EdgeQL AST into EdgeDB IR.""" if debug.flags.edgeql_compile: debug.header('EdgeQL AST') debug.dump(tree, schema=schema) ctx = stmtctx.init_context( schema=schema, anchors=anchors, singletons=singletons, modaliases=modaliases, security_context=security_context, func_params=func_params, derived_target_module=derived_target_module, result_view_name=result_view_name, implicit_id_in_shapes=implicit_id_in_shapes, implicit_tid_in_shapes=implicit_tid_in_shapes, schema_view_mode=schema_view_mode, disable_constant_folding=disable_constant_folding, json_parameters=json_parameters, session_mode=session_mode, allow_abstract_operators=allow_abstract_operators, allow_generic_type_output=allow_generic_type_output, parent_object_type=parent_object_type) if path_prefix_anchor is not None: path_prefix = anchors[path_prefix_anchor] ctx.partial_path_prefix = setgen.class_set(path_prefix, ctx=ctx) ctx.partial_path_prefix.anchor = path_prefix_anchor ctx.partial_path_prefix.show_as_anchor = path_prefix_anchor ir_set = dispatch.compile(tree, ctx=ctx) ir_expr = stmtctx.fini_expression(ir_set, ctx=ctx) if ctx.env.query_parameters: first_argname = next(iter(ctx.env.query_parameters)) if first_argname.isdecimal(): args_decnames = {int(arg) for arg in ctx.env.query_parameters} args_tpl = set(range(len(ctx.env.query_parameters))) if args_decnames != args_tpl: missing_args = args_tpl - args_decnames missing_args_repr = ', '.join(f'${a}' for a in missing_args) raise errors.QueryError( f'missing {missing_args_repr} positional argument' f'{"s" if len(missing_args) > 1 else ""}') if debug.flags.edgeql_compile: debug.header('Scope Tree') if ctx.path_scope is not None: print(ctx.path_scope.pdebugformat()) else: print('N/A') debug.header('EdgeDB IR') debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None)) return ir_expr
def compile_cast( ir_expr: Union[irast.Set, irast.Expr], new_stype: s_types.Type, *, srcctx: Optional[parsing.ParserContext], ctx: context.ContextLevel, cardinality_mod: Optional[qlast.CardinalityModifier]=None ) -> irast.Set: if isinstance(ir_expr, irast.EmptySet): # For the common case of casting an empty set, we simply # generate a new EmptySet node of the requested type. return setgen.new_empty_set( stype=new_stype, alias=ir_expr.path_id.target_name_hint.name, ctx=ctx, srcctx=ir_expr.context) elif irutils.is_untyped_empty_array_expr(ir_expr): # Ditto for empty arrays. new_typeref = typegen.type_to_typeref(new_stype, ctx.env) return setgen.ensure_set( irast.Array(elements=[], typeref=new_typeref), ctx=ctx) ir_set = setgen.ensure_set(ir_expr, ctx=ctx) orig_stype = setgen.get_set_type(ir_set, ctx=ctx) if (orig_stype == new_stype and cardinality_mod is not qlast.CardinalityModifier.Required): return ir_set elif orig_stype.is_object_type() and new_stype.is_object_type(): # Object types cannot be cast between themselves, # as cast is a _constructor_ operation, and the only # valid way to construct an object is to INSERT it. raise errors.QueryError( f'cannot cast object type ' f'{orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}, use ' f'`...[IS {new_stype.get_displayname(ctx.env.schema)}]` instead', context=srcctx) if isinstance(ir_set.expr, irast.Array): return _cast_array_literal( ir_set, orig_stype, new_stype, srcctx=srcctx, ctx=ctx) elif orig_stype.is_tuple(ctx.env.schema): return _cast_tuple( ir_set, orig_stype, new_stype, srcctx=srcctx, ctx=ctx) elif ( orig_stype.is_array() and not s_types.is_type_compatible( orig_stype, new_stype, schema=ctx.env.schema) ): return _cast_array( ir_set, orig_stype, new_stype, srcctx=srcctx, ctx=ctx) elif orig_stype.issubclass(ctx.env.schema, new_stype): # The new type is a supertype of the old type, # and is always a wider domain, so we simply reassign # the stype. return _inheritance_cast_to_ir( ir_set, orig_stype, new_stype, cardinality_mod=cardinality_mod, ctx=ctx) elif new_stype.issubclass(ctx.env.schema, orig_stype): # The new type is a subtype, so may potentially have # a more restrictive domain, generate a cast call. return _inheritance_cast_to_ir( ir_set, orig_stype, new_stype, cardinality_mod=cardinality_mod, ctx=ctx) else: json_t = ctx.env.get_track_schema_type( sn.QualName('std', 'json')) if (new_stype.issubclass(ctx.env.schema, json_t) and ir_set.path_id.is_objtype_path()): # JSON casts of objects are special: we want the full shape # and not just an identity. with ctx.new() as subctx: subctx.implicit_id_in_shapes = False subctx.implicit_tid_in_shapes = False subctx.implicit_tname_in_shapes = False viewgen.compile_view_shapes(ir_set, ctx=subctx) elif (orig_stype.issubclass(ctx.env.schema, json_t) and new_stype.is_enum(ctx.env.schema)): # Casts from json to enums need some special handling # here, where we have access to the enum type. Just turn # it into json->str and str->enum. str_typ = ctx.env.get_track_schema_type( sn.QualName('std', 'str')) str_ir = compile_cast(ir_expr, str_typ, srcctx=srcctx, ctx=ctx) return compile_cast(str_ir, new_stype, cardinality_mod=cardinality_mod, srcctx=srcctx, ctx=ctx) elif (orig_stype.issubclass(ctx.env.schema, json_t) and isinstance(new_stype, s_types.Array) and not new_stype.get_subtypes(ctx.env.schema)[0].issubclass( ctx.env.schema, json_t)): # Turn casts from json->array<T> into json->array<json> # and array<json>->array<T>. ctx.env.schema, json_array_typ = s_types.Array.from_subtypes( ctx.env.schema, [json_t]) json_array_ir = compile_cast( ir_expr, json_array_typ, srcctx=srcctx, ctx=ctx) return compile_cast( json_array_ir, new_stype, cardinality_mod=cardinality_mod, srcctx=srcctx, ctx=ctx) elif (orig_stype.issubclass(ctx.env.schema, json_t) and isinstance(new_stype, s_types.Tuple)): return _cast_json_to_tuple( ir_set, orig_stype, new_stype, srcctx=srcctx, ctx=ctx) return _compile_cast( ir_expr, orig_stype, new_stype, cardinality_mod=cardinality_mod, srcctx=srcctx, ctx=ctx)
ptr_anchors[name] = ( qlast.Path(steps=[qlast.ObjectRef(name=source_alias)])) # Fill in empty sets for pointers that are needed but not present present_ptrs = set(ptr_anchors) for p in (needed_ptrs - present_ptrs): ptr = subject_typ.getptr(ctx.env.schema, s_name.UnqualName(p)) typ = ptr.get_target(ctx.env.schema) assert typ ptr_anchors[p] = qlast.TypeCast( expr=qlast.Set(elements=[]), type=typegen.type_to_ql_typeref(typ, ctx=ctx)) if not ptr_anchors: raise errors.QueryError( 'INSERT UNLESS CONFLICT property requires matching shape', context=parser_context, ) conds: List[qlast.Expr] = [] for ptrname, (ptr, ptr_cnstrs) in constrs.items(): if ptrname not in present_ptrs: continue anchor = qlutils.subject_paths_substitute( ptr_anchors[ptrname], ptr_anchors) ptr_val = qlast.Path(partial=True, steps=[ qlast.Ptr(ptr=qlast.ObjectRef(name=ptrname)) ]) ptr, ptr_cnstrs = constrs[ptrname] ptr_card = ptr.get_cardinality(ctx.env.schema) for cnstr in ptr_cnstrs:
def _cast_tuple( ir_set: irast.Set, orig_stype: s_types.Type, new_stype: s_types.Type, *, srcctx: Optional[parsing.ParserContext], ctx: context.ContextLevel) -> irast.Set: assert isinstance(orig_stype, s_types.Tuple) # Make sure the source tuple expression is pinned in the scope, # so that we don't generate a cross-product of it by evaluating # the tuple indirections. pathctx.register_set_in_scope(ir_set, ctx=ctx) direct_cast = _find_cast(orig_stype, new_stype, srcctx=srcctx, ctx=ctx) orig_subtypes = dict(orig_stype.iter_subtypes(ctx.env.schema)) if direct_cast is not None: # Direct casting to non-tuple involves casting each tuple # element and also keeping the cast around the whole tuple. # This is to trigger the downstream logic of casting # objects (in elements of the tuple). elements = [] for n in orig_subtypes: val = setgen.tuple_indirection_set( ir_set, source=orig_stype, ptr_name=n, ctx=ctx, ) val_type = setgen.get_set_type(val, ctx=ctx) # Element cast val = compile_cast(val, new_stype, ctx=ctx, srcctx=srcctx) elements.append(irast.TupleElement(name=n, val=val)) new_tuple = setgen.new_tuple_set( elements, named=orig_stype.is_named(ctx.env.schema), ctx=ctx, ) return _cast_to_ir( new_tuple, direct_cast, orig_stype, new_stype, ctx=ctx) if not new_stype.is_tuple(ctx.env.schema): raise errors.QueryError( f'cannot cast {orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}', context=srcctx) assert isinstance(new_stype, s_types.Tuple) new_subtypes = list(new_stype.iter_subtypes(ctx.env.schema)) if len(orig_subtypes) != len(new_subtypes): raise errors.QueryError( f'cannot cast {orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}: ' f'the number of elements is not the same', context=srcctx) # For tuple-to-tuple casts we generate a new tuple # to simplify things on sqlgen side. elements = [] for i, n in enumerate(orig_subtypes): val = setgen.tuple_indirection_set( ir_set, source=orig_stype, ptr_name=n, ctx=ctx, ) val_type = setgen.get_set_type(val, ctx=ctx) new_el_name, new_st = new_subtypes[i] if val_type != new_st: # Element cast val = compile_cast(val, new_st, ctx=ctx, srcctx=srcctx) elements.append(irast.TupleElement(name=new_el_name, val=val)) return setgen.new_tuple_set( elements, named=new_stype.is_named(ctx.env.schema), ctx=ctx, )
def _cast_array( ir_set: irast.Set, orig_stype: s_types.Type, new_stype: s_types.Type, *, srcctx: Optional[parsing.ParserContext], ctx: context.ContextLevel) -> irast.Set: assert isinstance(orig_stype, s_types.Array) direct_cast = _find_cast(orig_stype, new_stype, srcctx=srcctx, ctx=ctx) if direct_cast is None: if not new_stype.is_array(): raise errors.QueryError( f'cannot cast {orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}', context=srcctx) assert isinstance(new_stype, s_types.Array) el_type = new_stype.get_subtypes(ctx.env.schema)[0] else: el_type = new_stype orig_el_type = orig_stype.get_subtypes(ctx.env.schema)[0] el_cast = _find_cast(orig_el_type, el_type, srcctx=srcctx, ctx=ctx) if el_cast is not None and el_cast.get_from_cast(ctx.env.schema): # Simple cast return _cast_to_ir( ir_set, el_cast, orig_stype, new_stype, ctx=ctx) else: pathctx.register_set_in_scope(ir_set, ctx=ctx) with ctx.new() as subctx: subctx.anchors = subctx.anchors.copy() source_alias = subctx.aliases.get('a') subctx.anchors[source_alias] = ir_set unpacked = qlast.FunctionCall( func=('__std__', 'array_unpack'), args=[ qlast.Path( steps=[qlast.ObjectRef(name=source_alias)], ), ], ) enumerated = dispatch.compile( qlast.FunctionCall( func=('__std__', 'enumerate'), args=[unpacked], ), ctx=subctx, ) enumerated_alias = subctx.aliases.get('e') subctx.anchors[enumerated_alias] = enumerated enumerated_ref = qlast.Path( steps=[qlast.ObjectRef(name=enumerated_alias)], ) elements = qlast.FunctionCall( func=('__std__', 'array_agg'), args=[ qlast.SelectQuery( result=qlast.TypeCast( expr=qlast.Path( steps=[ enumerated_ref, qlast.Ptr( ptr=qlast.ObjectRef( name='1', ), ), ], ), type=typegen.type_to_ql_typeref( el_type, ctx=subctx, ), cardinality_mod=qlast.CardinalityModifier.Required, ), orderby=[ qlast.SortExpr( path=qlast.Path( steps=[ enumerated_ref, qlast.Ptr( ptr=qlast.ObjectRef( name='0', ), ), ], ), direction=qlast.SortOrder.Asc, ), ], ), ], ) if el_type.contains_json(subctx.env.schema): subctx.inhibit_implicit_limit = True array_ir = dispatch.compile(elements, ctx=subctx) assert isinstance(array_ir, irast.Set) if direct_cast is not None: ctx.env.schema, array_stype = s_types.Array.from_subtypes( ctx.env.schema, [el_type]) return _cast_to_ir( array_ir, direct_cast, array_stype, new_stype, ctx=ctx ) else: return array_ir
def _validate_op(expr: qlast.ConfigOp, *, ctx: context.ContextLevel) -> SettingInfo: if expr.name.module and expr.name.module != 'cfg': raise errors.QueryError( 'invalid configuration parameter name: module must be either ' '\'cfg\' or empty', context=expr.name.context, ) name = expr.name.name cfg_host_type = ctx.env.get_track_schema_type('cfg::Config') assert isinstance(cfg_host_type, s_objtypes.ObjectType) cfg_type = None if isinstance(expr, (qlast.ConfigSet, qlast.ConfigReset)): # expr.name is the actual name of the property. ptr = cfg_host_type.getptr(ctx.env.schema, name) if ptr is not None: cfg_type = ptr.get_target(ctx.env.schema) if cfg_type is None: if isinstance(expr, qlast.ConfigSet): raise errors.ConfigurationError( f'unrecognized configuration parameter {name!r}', context=expr.context) # expr.name is the name of the configuration type cfg_type = ctx.env.get_track_schema_type(f'cfg::{name}', default=None) if cfg_type is None: raise errors.ConfigurationError( f'unrecognized configuration object {name!r}', context=expr.context) assert isinstance(cfg_type, s_objtypes.ObjectType) ptr_candidate: Optional[s_pointers.Pointer] = None mro = [cfg_type] + list( cfg_type.get_ancestors(ctx.env.schema).objects(ctx.env.schema)) for ct in mro: ptrs = ctx.env.schema.get_referrers(ct, scls_type=s_links.Link, field_name='target') if ptrs: pointer_link = next(iter(ptrs)) assert isinstance(pointer_link, s_links.Link) ptr_candidate = pointer_link break if (ptr_candidate is None or ptr_candidate.get_source(ctx.env.schema) != cfg_host_type): raise errors.ConfigurationError( f'{name!r} cannot be configured directly') ptr = ptr_candidate name = ptr.get_shortname(ctx.env.schema).name assert isinstance(ptr, s_pointers.Pointer) sys_attr = ptr.get_annotations(ctx.env.schema).get(ctx.env.schema, 'cfg::system', None) system = (sys_attr is not None and sys_attr.get_value(ctx.env.schema) == 'true') cardinality = ptr.get_cardinality(ctx.env.schema) assert cardinality is not None restart_attr = ptr.get_annotations(ctx.env.schema).get( ctx.env.schema, 'cfg::requires_restart', None) requires_restart = (restart_attr is not None and restart_attr.get_value(ctx.env.schema) == 'true') backend_attr = ptr.get_annotations(ctx.env.schema).get( ctx.env.schema, 'cfg::backend_setting', None) if backend_attr is not None: backend_setting = json.loads(backend_attr.get_value(ctx.env.schema)) else: backend_setting = None if not expr.system and system: raise errors.ConfigurationError( f'{name!r} is a system-level configuration parameter; ' f'use "CONFIGURE SYSTEM"') return SettingInfo(param_name=name, param_type=cfg_type, cardinality=cardinality, requires_restart=requires_restart, backend_setting=backend_setting)
def _infer_common_type( irs: List[irast.Base], env: context.Environment ) -> Optional[s_types.Type]: if not irs: raise errors.QueryError( 'cannot determine common type of an empty set', context=irs[0].context) types = [] empties = [] seen_object = False seen_scalar = False seen_coll = False for i, arg in enumerate(irs): if isinstance(arg, irast.EmptySet) and env.set_types[arg] is None: empties.append(i) continue t = infer_type(arg, env) if isinstance(t, s_abc.Collection): seen_coll = True elif isinstance(t, s_scalars.ScalarType): seen_scalar = True else: seen_object = True types.append(t) if seen_coll + seen_scalar + seen_object > 1: raise errors.QueryError( 'cannot determine common type', context=irs[0].context) if not types: raise errors.QueryError( 'cannot determine common type of an empty set', context=irs[0].context) common_type = None if seen_scalar or seen_coll: it = iter(types) common_type = next(it) while True: next_type = next(it, None) if next_type is None: break env.schema, common_type = ( common_type.find_common_implicitly_castable_type( next_type, env.schema, ) ) if common_type is None: break else: common_type = s_utils.get_class_nearest_common_ancestor( env.schema, cast(Sequence[s_types.InheritingType], types), ) if common_type is None: return None for i in empties: amend_empty_set_type( cast(irast.EmptySet, irs[i]), common_type, env) return common_type
def __infer_index( ir: irast.IndexIndirection, env: context.Environment, ) -> s_types.Type: node_type = infer_type(ir.expr, env) index_type = infer_type(ir.index, env) str_t = cast(s_scalars.ScalarType, env.schema.get('std::str')) bytes_t = cast(s_scalars.ScalarType, env.schema.get('std::bytes')) int_t = cast(s_scalars.ScalarType, env.schema.get('std::int64')) json_t = cast(s_scalars.ScalarType, env.schema.get('std::json')) result: s_types.Type if node_type.issubclass(env.schema, str_t): if not index_type.implicitly_castable_to(int_t, env.schema): raise errors.QueryError( f'cannot index string by ' f'{index_type.get_displayname(env.schema)}, ' f'{int_t.get_displayname(env.schema)} was expected', context=ir.index.context) result = str_t elif node_type.issubclass(env.schema, bytes_t): if not index_type.implicitly_castable_to(int_t, env.schema): raise errors.QueryError( f'cannot index bytes by ' f'{index_type.get_displayname(env.schema)}, ' f'{int_t.get_displayname(env.schema)} was expected', context=ir.index.context) result = bytes_t elif node_type.issubclass(env.schema, json_t): if not (index_type.implicitly_castable_to(int_t, env.schema) or index_type.implicitly_castable_to(str_t, env.schema)): raise errors.QueryError( f'cannot index json by ' f'{index_type.get_displayname(env.schema)}, ' f'{int_t.get_displayname(env.schema)} or ' f'{str_t.get_displayname(env.schema)} was expected', context=ir.index.context) result = json_t elif isinstance(node_type, s_types.Array): if not index_type.implicitly_castable_to(int_t, env.schema): raise errors.QueryError( f'cannot index array by ' f'{index_type.get_displayname(env.schema)}, ' f'{int_t.get_displayname(env.schema)} was expected', context=ir.index.context) result = node_type.get_subtypes(env.schema)[0] elif (node_type.is_any(env.schema) or (node_type.is_scalar() and str(node_type.get_name(env.schema)) == 'std::anyscalar') and (index_type.implicitly_castable_to(int_t, env.schema) or index_type.implicitly_castable_to(str_t, env.schema))): result = s_pseudo.PseudoType.get(env.schema, 'anytype') else: raise errors.QueryError( f'index indirection cannot be applied to ' f'{node_type.get_verbosename(env.schema)}', context=ir.expr.context) return result
def run_ddl(cls, schema, ddl, default_module=defines.DEFAULT_MODULE_ALIAS): statements = edgeql.parse_block(ddl) current_schema = schema target_schema = None migration_schema = None migration_target = None migration_script = [] for stmt in statements: if isinstance(stmt, qlast.StartMigration): # START MIGRATION if target_schema is None: target_schema = _load_std_schema() migration_target = s_ddl.apply_sdl( stmt.target, base_schema=target_schema, current_schema=current_schema, testmode=True, ) migration_schema = current_schema ddl_plan = None elif isinstance(stmt, qlast.PopulateMigration): # POPULATE MIGRATION if migration_target is None: raise errors.QueryError( 'unexpected POPULATE MIGRATION:' ' not currently in a migration block', context=stmt.context, ) migration_diff = s_ddl.delta_schemas( migration_schema, migration_target, ) if debug.flags.delta_plan: debug.header('Populate Migration Diff') debug.dump(migration_diff, schema=schema) new_ddl = s_ddl.ddlast_from_delta( migration_schema, migration_target, migration_diff, ) migration_script.extend(new_ddl) if debug.flags.delta_plan: debug.header('Populate Migration DDL AST') text = [] for cmd in new_ddl: debug.dump(cmd) text.append(edgeql.generate_source(cmd, pretty=True)) debug.header('Populate Migration DDL Text') debug.dump_code(';\n'.join(text) + ';') elif isinstance(stmt, qlast.CommitMigration): if migration_target is None: raise errors.QueryError( 'unexpected COMMIT MIGRATION:' ' not currently in a migration block', context=stmt.context, ) last_migration = current_schema.get_last_migration() if last_migration: last_migration_ref = s_utils.name_to_ast_ref( last_migration.get_name(current_schema), ) else: last_migration_ref = None create_migration = qlast.CreateMigration( body=qlast.NestedQLBlock(commands=migration_script), parent=last_migration_ref, ) ddl_plan = s_ddl.delta_from_ddl( create_migration, schema=migration_schema, modaliases={None: default_module}, testmode=True, ) if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(ddl_plan, schema=schema) migration_schema = None migration_target = None migration_script = [] elif isinstance(stmt, qlast.DDL): if migration_target is not None: migration_script.append(stmt) ddl_plan = None else: ddl_plan = s_ddl.delta_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True, ) if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(ddl_plan, schema=schema) else: raise ValueError( f'unexpected {stmt!r} in compiler setup script') if ddl_plan is not None: context = sd.CommandContext() context.testmode = True current_schema = ddl_plan.apply(current_schema, context) return current_schema
def compile_ForQuery( qlstmt: qlast.ForQuery, *, ctx: context.ContextLevel) -> irast.Set: with ctx.subquery() as sctx: stmt = irast.SelectStmt(context=qlstmt.context) init_stmt(stmt, qlstmt, ctx=sctx, parent_ctx=ctx) # As an optimization, if the iterator is a singleton set, use # the element directly. iterator = qlstmt.iterator if isinstance(iterator, qlast.Set) and len(iterator.elements) == 1: iterator = iterator.elements[0] # Compile the iterator iterator_ctx = None if (ctx.expr_exposed and ctx.iterator_ctx is not None and ctx.iterator_ctx is not sctx): iterator_ctx = ctx.iterator_ctx ictx = iterator_ctx or sctx contains_dml = qlutils.contains_dml(qlstmt.result) # If the body contains DML, then we need to prohibit # correlation between the iterator and the enclosing # query, since the correlation imposes compilation issues # we aren't willing to tackle. if contains_dml: ictx.path_scope.factoring_allowlist.update( ctx.iterator_path_ids) iterator_view = stmtctx.declare_view( iterator, s_name.UnqualName(qlstmt.iterator_alias), factoring_fence=contains_dml, path_id_namespace=ictx.path_id_namespace, ctx=ictx, ) iterator_stmt = setgen.new_set_from_set( iterator_view, preserve_scope_ns=True, ctx=sctx) stmt.iterator_stmt = iterator_stmt iterator_type = setgen.get_set_type(iterator_stmt, ctx=ctx) anytype = iterator_type.find_any(ctx.env.schema) if anytype is not None: raise errors.QueryError( 'FOR statement has iterator of indeterminate type', context=ctx.env.type_origins.get(anytype), ) if iterator_ctx is not None and iterator_ctx.stmt is not None: iterator_ctx.stmt.hoisted_iterators.append(iterator_stmt) view_scope_info = sctx.path_scope_map[iterator_view] pathctx.register_set_in_scope( iterator_stmt, path_scope=ictx.path_scope, ctx=sctx, ) # Iterator symbol is, by construction, outside of the scope # of the UNION argument, but is perfectly legal to be referenced # inside a factoring fence that is an immediate child of this # scope. ictx.path_scope.factoring_allowlist.add( stmt.iterator_stmt.path_id) sctx.iterator_path_ids |= {stmt.iterator_stmt.path_id} node = ictx.path_scope.find_descendant(iterator_stmt.path_id) if node is not None: # See above about why we need a factoring fence. # We need to do this again when we move the branch so # as to preserve the fencing. # Do this by sticking the iterator subtree onto a branch # with a factoring fence. if contains_dml: node = node.attach_branch() node.factoring_fence = True node = node.attach_branch() node.attach_subtree(view_scope_info.path_scope, context=iterator.context) # Compile the body with sctx.newscope(fenced=True) as bctx: stmt.result = setgen.scoped_set( compile_result_clause( qlstmt.result, view_scls=ctx.view_scls, view_rptr=ctx.view_rptr, result_alias=qlstmt.result_alias, view_name=ctx.toplevel_result_view_name, forward_rptr=True, ctx=bctx, ), ctx=bctx, ) # Inject an implicit limit if appropriate if ((ctx.expr_exposed or sctx.stmt is ctx.toplevel_stmt) and ctx.implicit_limit): stmt.limit = setgen.ensure_set( dispatch.compile( qlast.IntegerConstant(value=str(ctx.implicit_limit)), ctx=sctx, ), ctx=sctx, ) result = fini_stmt(stmt, qlstmt, ctx=sctx, parent_ctx=ctx) return result
def _compile_ql_query(self, ctx: CompileContext, ql: qlast.Base) -> dbstate.BaseQuery: current_tx = ctx.state.current_tx() session_config = current_tx.get_session_config() native_out_format = (ctx.output_format is pg_compiler.OutputFormat.NATIVE) single_stmt_mode = ctx.stmt_mode is enums.CompileStatementMode.SINGLE implicit_fields = (native_out_format and single_stmt_mode) disable_constant_folding = config.lookup(config.get_settings(), '__internal_no_const_folding', session_config, allow_unrecognized=True) # the capability to execute transaction or session control # commands indicates that session mode is available session_mode = ctx.state.capability & (enums.Capability.TRANSACTION | enums.Capability.SESSION) ir = ql_compiler.compile_ast_to_ir( ql, schema=current_tx.get_schema(), modaliases=current_tx.get_modaliases(), implicit_tid_in_shapes=implicit_fields, implicit_id_in_shapes=implicit_fields, disable_constant_folding=disable_constant_folding, json_parameters=ctx.json_parameters, session_mode=session_mode) if ir.cardinality is qltypes.Cardinality.ONE: result_cardinality = enums.ResultCardinality.ONE else: result_cardinality = enums.ResultCardinality.MANY if ctx.expected_cardinality_one: raise errors.ResultCardinalityMismatchError( f'the query has cardinality {result_cardinality} ' f'which does not match the expected cardinality ONE') sql_text, argmap = pg_compiler.compile_ir_to_sql( ir, pretty=debug.flags.edgeql_compile, expected_cardinality_one=ctx.expected_cardinality_one, output_format=ctx.output_format) sql_bytes = sql_text.encode(defines.EDGEDB_ENCODING) if single_stmt_mode: if native_out_format: out_type_data, out_type_id = sertypes.TypeSerializer.describe( ir.schema, ir.stype, ir.view_shapes, ir.view_shapes_metadata) else: out_type_data, out_type_id = \ sertypes.TypeSerializer.describe_json() in_array_backend_tids: typing.Optional[typing.Mapping[int, int]] = None if ir.params: array_params = [] subtypes = [None] * len(ir.params) first_param_name = next(iter(ir.params)) if first_param_name.isdecimal(): named = False for param_name, param_type in ir.params.items(): idx = int(param_name) subtypes[idx] = (param_name, param_type) if param_type.is_array(): el_type = param_type.get_element_type(ir.schema) array_params.append( (idx, el_type.get_backend_id(ir.schema))) else: named = True for param_name, param_type in ir.params.items(): idx = argmap[param_name] - 1 subtypes[idx] = (param_name, param_type) if param_type.is_array(): el_type = param_type.get_element_type(ir.schema) array_params.append( (idx, el_type.get_backend_id(ir.schema))) params_type = s_types.Tuple.create( ir.schema, element_types=collections.OrderedDict(subtypes), named=named) if array_params: in_array_backend_tids = {p[0]: p[1] for p in array_params} else: params_type = s_types.Tuple.create(ir.schema, element_types={}, named=False) in_type_data, in_type_id = sertypes.TypeSerializer.describe( ir.schema, params_type, {}, {}) in_type_args = None if ctx.json_parameters: in_type_args = [None] * len(argmap) for argname, argpos in argmap.items(): in_type_args[argpos - 1] = argname sql_hash = self._hash_sql(sql_bytes, mode=str(ctx.output_format).encode(), intype=in_type_id.bytes, outtype=out_type_id.bytes) return dbstate.Query( sql=(sql_bytes, ), sql_hash=sql_hash, cardinality=result_cardinality, in_type_id=in_type_id.bytes, in_type_data=in_type_data, in_type_args=in_type_args, in_array_backend_tids=in_array_backend_tids, out_type_id=out_type_id.bytes, out_type_data=out_type_data, ) else: if ir.params: raise errors.QueryError( 'EdgeQL script queries cannot accept parameters') return dbstate.SimpleQuery(sql=(sql_bytes, ))
def compile_query_subject( expr: irast.Set, *, shape: Optional[List[qlast.ShapeElement]]=None, view_rptr: Optional[context.ViewRPtr]=None, view_name: Optional[s_name.QualName]=None, result_alias: Optional[str]=None, view_scls: Optional[s_types.Type]=None, compile_views: bool=True, is_insert: bool=False, is_update: bool=False, is_delete: bool=False, parser_context: Optional[pctx.ParserContext]=None, ctx: context.ContextLevel) -> irast.Set: expr_stype = setgen.get_set_type(expr, ctx=ctx) expr_rptr = expr.rptr while isinstance(expr_rptr, irast.TypeIntersectionPointer): expr_rptr = expr_rptr.source.rptr is_ptr_alias = ( view_rptr is not None and view_rptr.ptrcls is None and view_rptr.ptrcls_name is not None and expr_rptr is not None and expr_rptr.direction is s_pointers.PointerDirection.Outbound and expr_rptr.source.rptr is None and ( view_rptr.source.get_bases(ctx.env.schema).first(ctx.env.schema).id == expr_rptr.source.typeref.id ) and ( view_rptr.ptrcls_is_linkprop == (expr_rptr.ptrref.source_ptr is not None) ) ) if is_ptr_alias: assert view_rptr is not None assert expr_rptr is not None # We are inside an expression that defines a link alias in # the parent shape, ie. Spam { alias := Spam.bar }, so # `Spam.alias` should be a subclass of `Spam.bar` inheriting # its properties. base_ptrcls = typegen.ptrcls_from_ptrref(expr_rptr.ptrref, ctx=ctx) if isinstance(base_ptrcls, s_pointers.Pointer): view_rptr.base_ptrcls = base_ptrcls view_rptr.ptrcls_is_alias = True if ( ctx.expr_exposed and viewgen.has_implicit_type_computables( expr_stype, is_mutation=is_insert or is_update or is_delete, ctx=ctx, ) and shape is None and expr_stype not in ctx.env.view_shapes ): # Force the subject to be compiled as a view if a __tid__ # insertion is anticipated (the actual decision is taken # by the compile_view_shapes() flow). shape = [] if shape is not None and view_scls is None: if (view_name is None and isinstance(result_alias, s_name.QualName)): view_name = result_alias if not isinstance(expr_stype, s_objtypes.ObjectType): raise errors.QueryError( f'shapes cannot be applied to ' f'{expr_stype.get_verbosename(ctx.env.schema)}', context=parser_context, ) view_scls = viewgen.process_view( stype=expr_stype, path_id=expr.path_id, elements=shape, view_rptr=view_rptr, view_name=view_name, is_insert=is_insert, is_update=is_update, is_delete=is_delete, parser_context=expr.context, ctx=ctx, ) if view_scls is not None: expr = setgen.ensure_set(expr, type_override=view_scls, ctx=ctx) expr_stype = view_scls if compile_views: rptr = view_rptr.rptr if view_rptr is not None else None if is_update: with ctx.new() as subctx: subctx.compiling_update_shape = True viewgen.compile_view_shapes(expr, rptr=rptr, ctx=subctx) else: viewgen.compile_view_shapes(expr, rptr=rptr, ctx=ctx) if (shape is not None or view_scls is not None) and len(expr.path_id) == 1: ctx.class_view_overrides[expr.path_id.target.id] = expr_stype return expr
def _compile(self, *, ctx: CompileContext, eql: bytes) -> typing.List[dbstate.QueryUnit]: # When True it means that we're compiling for "connection.fetchall()". # That means that the returned QueryUnit has to have the in/out codec # information, correctly inferred "singleton_result" field etc. single_stmt_mode = ctx.stmt_mode is enums.CompileStatementMode.SINGLE default_cardinality = enums.ResultCardinality.NOT_APPLICABLE eql = eql.decode() statements = edgeql.parse_block(eql) statements_len = len(statements) if ctx.stmt_mode is enums.CompileStatementMode.SKIP_FIRST: statements = statements[1:] if not statements: # pragma: no cover # Shouldn't ever happen as the server tracks the number # of statements (via the "try_compile_rollback()" method) # before using SKIP_FIRST. raise errors.ProtocolError( f'no statements to compile in SKIP_FIRST mode') elif single_stmt_mode and statements_len != 1: raise errors.ProtocolError( f'expected one statement, got {statements_len}') if not len(statements): # pragma: no cover raise errors.ProtocolError('nothing to compile') units = [] unit = None for stmt in statements: comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt) if unit is not None: if (isinstance(comp, dbstate.TxControlQuery) and comp.single_unit): units.append(unit) unit = None if unit is None: unit = dbstate.QueryUnit(dbver=ctx.state.dbver, sql=(), status=status.get_status(stmt), cardinality=default_cardinality) else: unit.status = status.get_status(stmt) if isinstance(comp, dbstate.Query): if single_stmt_mode: unit.sql = comp.sql unit.sql_hash = comp.sql_hash unit.out_type_data = comp.out_type_data unit.out_type_id = comp.out_type_id unit.in_type_data = comp.in_type_data unit.in_type_args = comp.in_type_args unit.in_type_id = comp.in_type_id unit.in_array_backend_tids = comp.in_array_backend_tids unit.cacheable = True unit.cardinality = comp.cardinality else: unit.sql += comp.sql elif isinstance(comp, dbstate.SimpleQuery): assert not single_stmt_mode unit.sql += comp.sql elif isinstance(comp, dbstate.DDLQuery): unit.sql += comp.sql unit.has_ddl = True unit.new_types = comp.new_types elif isinstance(comp, dbstate.TxControlQuery): unit.sql += comp.sql unit.cacheable = comp.cacheable if comp.modaliases is not None: unit.modaliases = comp.modaliases if comp.action == dbstate.TxAction.START: if unit.tx_id is not None: raise errors.InternalServerError( 'already in transaction') unit.tx_id = ctx.state.current_tx().id elif comp.action == dbstate.TxAction.COMMIT: unit.tx_commit = True elif comp.action == dbstate.TxAction.ROLLBACK: unit.tx_rollback = True elif comp.action is dbstate.TxAction.ROLLBACK_TO_SAVEPOINT: unit.tx_savepoint_rollback = True if comp.single_unit: units.append(unit) unit = None elif isinstance(comp, dbstate.SessionStateQuery): unit.sql += comp.sql if comp.is_system_setting: if (not ctx.state.current_tx().is_implicit() or statements_len > 1): raise errors.QueryError( 'CONFIGURE SYSTEM cannot be executed in a ' 'transaction block') unit.system_config = True if comp.is_backend_setting: unit.backend_config = True if comp.requires_restart: unit.config_requires_restart = True if ctx.state.current_tx().is_implicit(): unit.modaliases = ctx.state.current_tx().get_modaliases() if comp.config_op is not None: if unit.config_ops is None: unit.config_ops = [] unit.config_ops.append(comp.config_op) unit.has_set = True else: # pragma: no cover raise errors.InternalServerError('unknown compile state') if unit is not None: units.append(unit) if single_stmt_mode: if len(units) != 1: # pragma: no cover raise errors.InternalServerError( f'expected 1 compiled unit; got {len(units)}') for unit in units: # pragma: no cover # Sanity checks na_cardinality = (unit.cardinality is enums.ResultCardinality.NOT_APPLICABLE) if unit.cacheable and (unit.config_ops or unit.modaliases): raise errors.InternalServerError( f'QueryUnit {unit!r} is cacheable but has config/aliases') if not unit.sql: raise errors.InternalServerError( f'QueryUnit {unit!r} has no SQL commands in it') if not na_cardinality and ( len(unit.sql) > 1 or unit.tx_commit or unit.tx_rollback or unit.tx_savepoint_rollback or unit.out_type_id is sertypes.NULL_TYPE_ID or unit.system_config or unit.config_ops or unit.modaliases or unit.has_set or unit.has_ddl or not unit.sql_hash): raise errors.InternalServerError( f'unit has invalid "cardinality": {unit!r}') return units
def compile_insert_unless_conflict_on( stmt: irast.InsertStmt, insert_subject: qlast.Path, constraint_spec: qlast.Expr, else_branch: Optional[qlast.Expr], *, ctx: context.ContextLevel, ) -> irast.OnConflictClause: with ctx.new() as constraint_ctx: constraint_ctx.partial_path_prefix = stmt.subject # We compile the name here so we can analyze it, but we don't do # anything else with it. cspec_res = setgen.ensure_set(dispatch.compile( constraint_spec, ctx=constraint_ctx), ctx=constraint_ctx) if not cspec_res.rptr: raise errors.QueryError( 'UNLESS CONFLICT argument must be a property', context=constraint_spec.context, ) if cspec_res.rptr.source.path_id != stmt.subject.path_id: raise errors.QueryError( 'UNLESS CONFLICT argument must be a property of the ' 'type being inserted', context=constraint_spec.context, ) schema = ctx.env.schema schema, typ = typeutils.ir_typeref_to_type(schema, stmt.subject.typeref) assert isinstance(typ, s_objtypes.ObjectType) real_typ = typ.get_nearest_non_derived_parent(schema) schema, ptr = ( typeutils.ptrcls_from_ptrref(cspec_res.rptr.ptrref, schema=schema)) if not isinstance(ptr, s_pointers.Pointer): raise errors.QueryError( 'UNLESS CONFLICT property must be a property', context=constraint_spec.context, ) ptr = ptr.get_nearest_non_derived_parent(schema) ptr_card = ptr.get_cardinality(schema) if not ptr_card.is_single(): raise errors.QueryError( 'UNLESS CONFLICT property must be a SINGLE property', context=constraint_spec.context, ) exclusive_constr = schema.get('std::exclusive', type=s_constr.Constraint) ex_cnstrs = [c for c in ptr.get_constraints(schema).objects(schema) if c.issubclass(schema, exclusive_constr)] if len(ex_cnstrs) != 1: raise errors.QueryError( 'UNLESS CONFLICT property must have a single exclusive constraint', context=constraint_spec.context, ) module_id = schema.get_global( s_mod.Module, ptr.get_name(schema).module).id field_name = cspec_res.rptr.ptrref.shortname ds = {field_name.name: (ptr, ex_cnstrs)} select_ir = compile_insert_unless_conflict_select( stmt, insert_subject, real_typ, constrs=ds, obj_constrs=[], parser_context=stmt.context, ctx=ctx) # Compile an else branch else_ir = None if else_branch: # The ELSE needs to be able to reference the subject in an # UPDATE, even though that would normally be prohibited. ctx.path_scope.factoring_allowlist.add(stmt.subject.path_id) # Compile else else_ir = dispatch.compile( astutils.ensure_qlstmt(else_branch), ctx=ctx) assert isinstance(else_ir, irast.Set) return irast.OnConflictClause( constraint=irast.ConstraintRef( id=ex_cnstrs[0].id, module_id=module_id), select_ir=select_ir, else_ir=else_ir )
def __infer_index(ir, env): node_type = infer_type(ir.expr, env) index_type = infer_type(ir.index, env) str_t = env.schema.get('std::str') bytes_t = env.schema.get('std::bytes') int_t = env.schema.get('std::int64') json_t = env.schema.get('std::json') result = None if node_type.issubclass(env.schema, str_t): if not index_type.implicitly_castable_to(int_t, env.schema): raise errors.QueryError( f'cannot index string by ' f'{index_type.get_displayname(env.schema)}, ' f'{int_t.get_displayname(env.schema)} was expected', context=ir.index.context) result = str_t elif node_type.issubclass(env.schema, bytes_t): if not index_type.implicitly_castable_to(int_t, env.schema): raise errors.QueryError( f'cannot index bytes by ' f'{index_type.get_displayname(env.schema)}, ' f'{int_t.get_displayname(env.schema)} was expected', context=ir.index.context) result = bytes_t elif node_type.issubclass(env.schema, json_t): if not (index_type.implicitly_castable_to(int_t, env.schema) or index_type.implicitly_castable_to(str_t, env.schema)): raise errors.QueryError( f'cannot index json by ' f'{index_type.get_displayname(env.schema)}, ' f'{int_t.get_displayname(env.schema)} or ' f'{str_t.get_displayname(env.schema)} was expected', context=ir.index.context) result = json_t elif isinstance(node_type, s_abc.Array): if not index_type.implicitly_castable_to(int_t, env.schema): raise errors.QueryError( f'cannot index array by ' f'{index_type.get_displayname(env.schema)}, ' f'{int_t.get_displayname(env.schema)} was expected', context=ir.index.context) result = node_type.get_subtypes(env.schema)[0] elif (node_type.is_any() or (node_type.is_scalar() and node_type.get_name(env.schema) == 'std::anyscalar') and (index_type.implicitly_castable_to(int_t, env.schema) or index_type.implicitly_castable_to(str_t, env.schema))): result = s_pseudo.Any.instance else: raise errors.QueryError( f'index indirection cannot be applied to ' f'{node_type.get_verbosename(env.schema)}', context=ir.index.context) return result
def compile_InsertQuery( expr: qlast.InsertQuery, *, ctx: context.ContextLevel) -> irast.Set: if ctx.in_conditional is not None: raise errors.QueryError( 'INSERT statements cannot be used inside conditional ' 'expressions', context=expr.context, ) # Record this node in the list of potential DML expressions. ctx.env.dml_exprs.append(expr) with ctx.subquery() as ictx: stmt = irast.InsertStmt(context=expr.context) init_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx) subject = dispatch.compile(expr.subject, ctx=ictx) assert isinstance(subject, irast.Set) subject_stype = setgen.get_set_type(subject, ctx=ictx) if subject_stype.get_abstract(ctx.env.schema): raise errors.QueryError( f'cannot insert into abstract ' f'{subject_stype.get_verbosename(ctx.env.schema)}', context=expr.subject.context) if subject_stype.is_view(ctx.env.schema): raise errors.QueryError( f'cannot insert into expression alias ' f'{str(subject_stype.get_shortname(ctx.env.schema))!r}', context=expr.subject.context) with ictx.new() as bodyctx: # Self-references in INSERT are prohibited. bodyctx.banned_paths = ictx.banned_paths.copy() pathctx.ban_path(subject.path_id, ctx=bodyctx) bodyctx.class_view_overrides = ictx.class_view_overrides.copy() bodyctx.implicit_id_in_shapes = False bodyctx.implicit_tid_in_shapes = False bodyctx.implicit_tname_in_shapes = False bodyctx.implicit_limit = 0 stmt.subject = compile_query_subject( subject, shape=expr.shape, view_rptr=ctx.view_rptr, compile_views=True, result_alias=expr.subject_alias, is_insert=True, ctx=bodyctx) if expr.unless_conflict is not None: constraint_spec, else_branch = expr.unless_conflict if constraint_spec: stmt.on_conflict = compile_insert_unless_conflict_on( stmt, expr.subject, constraint_spec, else_branch, ctx=ictx) else: stmt.on_conflict = compile_insert_unless_conflict( stmt, expr.subject, ctx=ictx) stmt_subject_stype = setgen.get_set_type(subject, ctx=ictx) result = setgen.class_set( schemactx.get_material_type(stmt_subject_stype, ctx=ctx), path_id=stmt.subject.path_id, ctx=ctx, ) with ictx.new() as resultctx: if ictx.stmt is ctx.toplevel_stmt: resultctx.expr_exposed = True stmt.result = compile_query_subject( result, view_scls=ctx.view_scls, view_name=ctx.toplevel_result_view_name, compile_views=ictx.stmt is ictx.toplevel_stmt, ctx=resultctx, ) result = fini_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx) return result
def compile_ast_to_ir( tree: qlast.Base, schema: s_schema.Schema, *, modaliases: Optional[Mapping[Optional[str], str]] = None, anchors: Optional[Mapping[Union[str, qlast.SpecialAnchorT], Union[irast.Base, s_obj.Object], ]] = None, path_prefix_anchor: Optional[qlast.SpecialAnchorT] = None, singletons: Sequence[s_types.Type] = (), func_params: Optional[s_func.ParameterLikeList] = None, result_view_name: Optional[s_name.SchemaName] = None, derived_target_module: Optional[str] = None, parent_object_type: Optional[s_obj.ObjectMeta] = None, implicit_limit: int = 0, implicit_id_in_shapes: bool = False, implicit_tid_in_shapes: bool = False, schema_view_mode: bool = False, session_mode: bool = False, disable_constant_folding: bool = False, json_parameters: bool = False, allow_generic_type_output: bool = False, ) -> irast.Command: """Compile given EdgeQL AST into EdgeDB IR. This is the normal compiler entry point. It assumes that *tree* represents a complete statement. Args: tree: EdgeQL AST. schema: Schema instance. Must contain definitions for objects referenced by the AST *tree*. modaliases: Module name resolution table. Useful when this EdgeQL expression is part of some other construct, such as a DDL statement. anchors: Predefined symbol table. Maps identifiers (or ``qlast.SpecialAnchor`` instances) to specified schema objects or IR fragments. path_prefix_anchor: Symbol name used to resolve the prefix of abbreviated path expressions by default. The symbol must be present in *anchors*. singletons: An optional set of schema types that should be treated as singletons in the context of this compilation. func_params: When compiling a function body, specifies function parameter definitions. result_view_name: Optionally defines the name of the topmost generated view type. Useful when compiling schema views. derived_target_module: The name of the module where derived types and pointers should be placed. When compiling a schema view, this would be the name of the module where the view is defined. By default, the special ``__derived__`` module is used. parent_object_type: Optionaly specifies the class of the schema object, in the context of which this expression is compiled. Used in schema definitions. implicit_limit: If set to a non-zero integer value, this will be injected as an implicit `LIMIT` clause into each read query. implicit_id_in_shapes: Whether to include object id property in shapes by default. implicit_tid_in_shapes: Whether to implicitly include object type id in shapes as the ``__tid__`` computable. schema_view_mode: When compiling a schema view, set this to ``True``. session_mode: When ``True``, assumes that the expression is compiled in the presence of a persistent database session. Otherwise, the use of functions and other constructs that require a persistent session will trigger an error. disable_constant_folding: When ``True``, the compile-time evaluation and substitution of constant expressions is disabled. json_parameters: When ``True``, the argument values are assumed to be in JSON format. allow_generic_type_output: If ``True``, allows the expression to return a generic type. By default, expressions must resolve into concrete types. Returns: An instance of :class:`ir.ast.Command`. Most frequently, this would be an instance of :class:`ir.ast.Statement`. """ if debug.flags.edgeql_compile: debug.header('EdgeQL AST') debug.dump(tree, schema=schema) ctx = stmtctx.init_context( schema=schema, anchors=anchors, singletons=singletons, modaliases=modaliases, func_params=func_params, derived_target_module=derived_target_module, result_view_name=result_view_name, implicit_limit=implicit_limit, implicit_id_in_shapes=implicit_id_in_shapes, implicit_tid_in_shapes=implicit_tid_in_shapes, schema_view_mode=schema_view_mode, disable_constant_folding=disable_constant_folding, json_parameters=json_parameters, session_mode=session_mode, allow_generic_type_output=allow_generic_type_output, parent_object_type=parent_object_type, ) if path_prefix_anchor is not None: assert anchors is not None path_prefix = anchors[path_prefix_anchor] assert isinstance(path_prefix, s_types.Type) ctx.partial_path_prefix = setgen.class_set(path_prefix, ctx=ctx) ctx.partial_path_prefix.anchor = path_prefix_anchor ctx.partial_path_prefix.show_as_anchor = path_prefix_anchor ir_set = dispatch.compile(tree, ctx=ctx) ir_expr = stmtctx.fini_expression(ir_set, ctx=ctx) if ctx.env.query_parameters: first_argname = next(iter(ctx.env.query_parameters)) if first_argname.isdecimal(): args_decnames = {int(arg) for arg in ctx.env.query_parameters} args_tpl = set(range(len(ctx.env.query_parameters))) if args_decnames != args_tpl: missing_args = args_tpl - args_decnames missing_args_repr = ', '.join(f'${a}' for a in missing_args) raise errors.QueryError( f'missing {missing_args_repr} positional argument' f'{"s" if len(missing_args) > 1 else ""}') if debug.flags.edgeql_compile: debug.header('Scope Tree') if ctx.path_scope is not None: print(ctx.path_scope.pdebugformat()) else: print('N/A') debug.header('EdgeDB IR') debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None)) return ir_expr
def compile_UpdateQuery( expr: qlast.UpdateQuery, *, ctx: context.ContextLevel) -> irast.Set: if ctx.in_conditional is not None: raise errors.QueryError( 'UPDATE statements cannot be used inside conditional expressions', context=expr.context, ) # Record this node in the list of potential DML expressions. ctx.env.dml_exprs.append(expr) with ctx.subquery() as ictx: stmt = irast.UpdateStmt(context=expr.context) init_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx) subject = dispatch.compile(expr.subject, ctx=ictx) assert isinstance(subject, irast.Set) subj_type = inference.infer_type(subject, ictx.env) if not isinstance(subj_type, s_objtypes.ObjectType): raise errors.QueryError( f'cannot update non-ObjectType objects', context=expr.subject.context ) ictx.partial_path_prefix = subject clauses.compile_where_clause( stmt, expr.where, ctx=ictx) with ictx.new() as bodyctx: bodyctx.class_view_overrides = ictx.class_view_overrides.copy() bodyctx.implicit_id_in_shapes = False bodyctx.implicit_tid_in_shapes = False bodyctx.implicit_tname_in_shapes = False bodyctx.implicit_limit = 0 stmt.subject = compile_query_subject( subject, shape=expr.shape, view_rptr=ctx.view_rptr, compile_views=True, result_alias=expr.subject_alias, is_update=True, ctx=bodyctx) stmt_subject_stype = setgen.get_set_type(subject, ctx=ictx) result = setgen.class_set( schemactx.get_material_type(stmt_subject_stype, ctx=ctx), path_id=stmt.subject.path_id, ctx=ctx, ) with ictx.new() as resultctx: if ictx.stmt is ctx.toplevel_stmt: resultctx.expr_exposed = True stmt.result = compile_query_subject( result, view_scls=ctx.view_scls, view_name=ctx.toplevel_result_view_name, compile_views=ictx.stmt is ictx.toplevel_stmt, ctx=resultctx, ) result = fini_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx) return result
def _normalize_view_ptr_expr( shape_el: qlast.ShapeElement, view_scls: s_types.Type, *, path_id: irast.PathId, path_id_namespace: typing.Optional[irast.WeakNamespace] = None, is_insert: bool = False, is_update: bool = False, view_rptr: typing.Optional[context.ViewRPtr] = None, ctx: context.ContextLevel) -> s_pointers.Pointer: steps = shape_el.expr.steps is_linkprop = False is_polymorphic = False is_mutation = is_insert or is_update # Pointers may be qualified by the explicit source # class, which is equivalent to Expr[IS Type]. plen = len(steps) ptrsource = view_scls qlexpr = None target_typexpr = None source: qlast.Base if plen >= 2 and isinstance(steps[-1], qlast.TypeIndirection): # Target type indirection: foo: Type target_typexpr = steps[-1].type plen -= 1 steps = steps[:-1] if plen == 1: # regular shape lexpr = steps[0] assert isinstance(lexpr, qlast.Ptr) is_linkprop = lexpr.type == 'property' if is_linkprop: if view_rptr is None: raise errors.QueryError( 'invalid reference to link property ' 'in top level shape', context=lexpr.context) ptrsource = view_rptr.ptrcls source = qlast.Source() elif plen == 2 and isinstance(steps[0], qlast.TypeIndirection): # Source type indirection: [IS Type].foo source = qlast.Path(steps=[ qlast.Source(), steps[0], ]) lexpr = steps[1] ptype = steps[0].type if not isinstance(ptype, qlast.TypeName): raise errors.QueryError( 'complex type expressions are not supported here', context=ptype.context, ) ptrsource = schemactx.get_schema_type(ptype.maintype, ctx=ctx) is_polymorphic = True else: # pragma: no cover raise RuntimeError( f'unexpected path length in view shape: {len(steps)}') assert isinstance(lexpr, qlast.Ptr) ptrname = lexpr.ptr.name compexpr = shape_el.compexpr if compexpr is None and is_insert and shape_el.elements: # Short shape form in INSERT, e.g # INSERT Foo { bar: Spam { name := 'name' }} # is prohibited. raise errors.EdgeQLSyntaxError("unexpected ':'", context=steps[-1].context) if compexpr is None: ptrcls = setgen.resolve_ptr(ptrsource, ptrname, ctx=ctx) if is_polymorphic: ptrcls = schemactx.derive_ptr(ptrcls, view_scls, is_insert=is_insert, is_update=is_update, ctx=ctx) base_ptrcls = ptrcls.get_bases(ctx.env.schema).first(ctx.env.schema) base_ptr_is_computable = base_ptrcls in ctx.source_map ptr_name = sn.Name( module='__', name=ptrcls.get_shortname(ctx.env.schema).name, ) if (shape_el.where or shape_el.orderby or shape_el.offset or shape_el.limit or base_ptr_is_computable or is_polymorphic or target_typexpr is not None): if target_typexpr is None: qlexpr = qlast.Path(steps=[source, lexpr]) else: qlexpr = qlast.Path(steps=[ source, lexpr, qlast.TypeIndirection(type=target_typexpr), ]) qlexpr = astutils.ensure_qlstmt(qlexpr) qlexpr.where = shape_el.where qlexpr.orderby = shape_el.orderby qlexpr.offset = shape_el.offset qlexpr.limit = shape_el.limit if target_typexpr is not None: ptr_target = schemactx.get_schema_type(target_typexpr.maintype, ctx=ctx) else: ptr_target = ptrcls.get_target(ctx.env.schema) if base_ptrcls in ctx.pending_cardinality: # We do not know the parent's pointer cardinality yet. ptr_cardinality = None ctx.pointer_derivation_map[base_ptrcls].append(ptrcls) stmtctx.pend_pointer_cardinality_inference( ptrcls=ptrcls, specified_card=shape_el.cardinality, from_parent=True, source_ctx=shape_el.context, ctx=ctx) else: ptr_cardinality = base_ptrcls.get_cardinality(ctx.env.schema) implicit_tid = has_implicit_tid( ptr_target, is_mutation=is_mutation, ctx=ctx, ) if shape_el.elements or implicit_tid: sub_view_rptr = context.ViewRPtr( ptrsource if is_linkprop else view_scls, ptrcls=ptrcls, is_insert=is_insert, is_update=is_update) sub_path_id = pathctx.extend_path_id(path_id, ptrcls=base_ptrcls, target=ptrcls.get_target( ctx.env.schema), ns=ctx.path_id_namespace, ctx=ctx) ctx.path_scope.attach_path(sub_path_id) if is_update: for subel in shape_el.elements or []: is_prop = (isinstance(subel.expr.steps[0], qlast.Ptr) and subel.expr.steps[0].type == 'property') if not is_prop: raise errors.QueryError( 'only references to link properties are allowed ' 'in nested UPDATE shapes', context=subel.context) ptr_target = _process_view(stype=ptr_target, path_id=sub_path_id, path_id_namespace=path_id_namespace, view_rptr=sub_view_rptr, elements=shape_el.elements, is_update=True, ctx=ctx) else: ptr_target = _process_view(stype=ptr_target, path_id=sub_path_id, path_id_namespace=path_id_namespace, view_rptr=sub_view_rptr, elements=shape_el.elements, ctx=ctx) else: base_ptrcls = ptrcls = None if (is_mutation and ptrname not in ctx.special_computables_in_mutation_shape): # If this is a mutation, the pointer must exist. ptrcls = setgen.resolve_ptr(ptrsource, ptrname, ctx=ctx) base_ptrcls = ptrcls.get_bases(ctx.env.schema).first( ctx.env.schema) ptr_name = sn.Name( module='__', name=ptrcls.get_shortname(ctx.env.schema).name, ) else: # Otherwise, assume no pointer inheritance. # Every computable is a new pointer derived from # std::link or std::property. There is one exception: # pointer aliases (Foo {some := Foo.other}), where `foo` # gets derived from `Foo.other`. This logic is applied # in compile_query_subject() by populating the base_ptrcls. ptr_name = sn.Name( module='__', name=ptrname, ) qlexpr = astutils.ensure_qlstmt(compexpr) with ctx.newscope(fenced=True) as shape_expr_ctx: # Put current pointer class in context, so # that references to link properties in sub-SELECT # can be resolved. This is necessary for proper # evaluation of link properties on computable links, # most importantly, in INSERT/UPDATE context. shape_expr_ctx.view_rptr = context.ViewRPtr( ptrsource if is_linkprop else view_scls, ptrcls=ptrcls, ptrcls_name=ptr_name, ptrcls_is_linkprop=is_linkprop, is_insert=is_insert, is_update=is_update) shape_expr_ctx.defining_view = True shape_expr_ctx.path_scope.unnest_fence = True shape_expr_ctx.partial_path_prefix = setgen.class_set( view_scls, path_id=path_id, ctx=shape_expr_ctx) if is_mutation and ptrcls is not None: shape_expr_ctx.expr_exposed = True shape_expr_ctx.empty_result_type_hint = \ ptrcls.get_target(ctx.env.schema) irexpr = dispatch.compile(qlexpr, ctx=shape_expr_ctx) irexpr.context = compexpr.context if base_ptrcls is None: base_ptrcls = shape_expr_ctx.view_rptr.base_ptrcls ptr_cardinality = None ptr_target = inference.infer_type(irexpr, ctx.env) anytype = ptr_target.find_any(ctx.env.schema) if anytype is not None: raise errors.QueryError( 'expression returns value of indeterminate type', context=ctx.env.type_origins.get(anytype), ) # Validate that the insert/update expression is # of the correct class. if is_mutation and ptrcls is not None: base_target = ptrcls.get_target(ctx.env.schema) assert base_target is not None if ptr_target.assignment_castable_to(base_target, schema=ctx.env.schema): # Force assignment casts if the target type is not a # subclass of the base type and the cast is not to an # object type. if not (base_target.is_object_type() or ptr_target.issubclass(ctx.env.schema, base_target)): qlexpr = astutils.ensure_qlstmt( qlast.TypeCast( type=astutils.type_to_ql_typeref( base_target, schema=ctx.env.schema), expr=compexpr, )) ptr_target = base_target else: expected = [ repr(str(base_target.get_displayname(ctx.env.schema))) ] ercls: typing.Type[errors.EdgeDBError] if ptrcls.is_property(ctx.env.schema): ercls = errors.InvalidPropertyTargetError else: ercls = errors.InvalidLinkTargetError ptr_vn = ptrcls.get_verbosename(ctx.env.schema, with_parent=True) raise ercls( f'invalid target for {ptr_vn}: ' f'{str(ptr_target.get_displayname(ctx.env.schema))!r} ' f'(expecting {" or ".join(expected)})') if qlexpr is not None or ptrcls is None: if is_linkprop: # Proper checking was done when is_linkprop is defined. assert view_rptr is not None src_scls = view_rptr.ptrcls else: src_scls = view_scls if ptr_target.is_object_type(): base = ctx.env.get_track_schema_object('std::link') else: base = ctx.env.get_track_schema_object('std::property') if base_ptrcls is not None: derive_from = base_ptrcls else: derive_from = base derived_name = schemactx.derive_view_name( base_ptrcls, derived_name_base=ptr_name, derived_name_quals=[src_scls.get_name(ctx.env.schema)], ctx=ctx) existing = ctx.env.schema.get(derived_name, None) if existing is not None: existing_target = existing.get_target(ctx.env.schema) if ptr_target == existing_target: ptrcls = existing elif ptr_target.implicitly_castable_to(existing_target, ctx.env.schema): ctx.env.schema = existing.set_target(ctx.env.schema, ptr_target) ptrcls = existing else: target_rptr_set = (ptr_target.get_rptr(ctx.env.schema) is not None) if target_rptr_set: ctx.env.schema = ptr_target.set_field_value( ctx.env.schema, 'rptr', None, ) ctx.env.schema = existing.delete(ctx.env.schema) ptrcls = schemactx.derive_ptr(derive_from, src_scls, ptr_target, is_insert=is_insert, is_update=is_update, derived_name=derived_name, inheritance_merge=False, ctx=ctx) if target_rptr_set: ctx.env.schema = ptr_target.set_field_value( ctx.env.schema, 'rptr', ptrcls, ) else: ptrcls = schemactx.derive_ptr(derive_from, src_scls, ptr_target, is_insert=is_insert, is_update=is_update, derived_name=derived_name, ctx=ctx) elif ptrcls.get_target(ctx.env.schema) != ptr_target: ctx.env.schema = ptrcls.set_target(ctx.env.schema, ptr_target) assert ptrcls is not None if qlexpr is None: # This is not a computable, just a pointer # to a nested shape. Have it reuse the original # pointer name so that in `Foo.ptr.name` and # `Foo { ptr: {name}}` are the same path. path_id_name = base_ptrcls.get_name(ctx.env.schema) ctx.env.schema = ptrcls.set_field_value(ctx.env.schema, 'path_id_name', path_id_name) if qlexpr is not None: ctx.source_map[ptrcls] = (qlexpr, ctx, path_id, path_id_namespace) if not is_mutation: if ptr_cardinality is None: if ptrcls not in ctx.pending_cardinality: if qlexpr is not None: from_parent = False elif ptrcls is not base_ptrcls: ctx.pointer_derivation_map[base_ptrcls].append(ptrcls) from_parent = True else: from_parent = False stmtctx.pend_pointer_cardinality_inference( ptrcls=ptrcls, specified_card=shape_el.cardinality, from_parent=from_parent, source_ctx=shape_el.context, ctx=ctx) ctx.env.schema = ptrcls.set_field_value(ctx.env.schema, 'cardinality', None) else: ctx.env.schema = ptrcls.set_field_value(ctx.env.schema, 'cardinality', ptr_cardinality) if ptrcls.is_protected_pointer(ctx.env.schema) and qlexpr is not None: ptrcls_sn = ptrcls.get_shortname(ctx.env.schema) if is_polymorphic: msg = (f'cannot access {ptrcls_sn.name} on a polymorphic ' f'shape element') else: msg = f'cannot assign to {ptrcls_sn.name}' raise errors.QueryError(msg, context=shape_el.context) return ptrcls
def compile_DeleteQuery( expr: qlast.DeleteQuery, *, ctx: context.ContextLevel) -> irast.Set: if ctx.in_conditional is not None: raise errors.QueryError( 'DELETE statements cannot be used inside conditional expressions', context=expr.context, ) # Record this node in the list of potential DML expressions. ctx.env.dml_exprs.append(expr) with ctx.subquery() as ictx: stmt = irast.DeleteStmt(context=expr.context) # Expand the DELETE from sugar into full DELETE (SELECT ...) # form, if there's any additional clauses. if any([expr.where, expr.orderby, expr.offset, expr.limit]): if expr.offset or expr.limit: subjql = qlast.SelectQuery( result=qlast.SelectQuery( result=expr.subject, result_alias=expr.subject_alias, where=expr.where, orderby=expr.orderby, context=expr.context, implicit=True, ), limit=expr.limit, offset=expr.offset, context=expr.context, ) else: subjql = qlast.SelectQuery( result=expr.subject, result_alias=expr.subject_alias, where=expr.where, orderby=expr.orderby, offset=expr.offset, limit=expr.limit, context=expr.context, ) expr = qlast.DeleteQuery( aliases=expr.aliases, context=expr.context, subject=subjql, ) init_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx) # DELETE Expr is a delete(SET OF X), so we need a scope fence. with ictx.newscope(fenced=True) as scopectx: scopectx.implicit_limit = 0 subject = setgen.scoped_set( dispatch.compile(expr.subject, ctx=scopectx), ctx=scopectx) subj_type = inference.infer_type(subject, ictx.env) if not isinstance(subj_type, s_objtypes.ObjectType): raise errors.QueryError( f'cannot delete non-ObjectType objects', context=expr.subject.context ) with ictx.new() as bodyctx: bodyctx.implicit_id_in_shapes = False bodyctx.implicit_tid_in_shapes = False bodyctx.implicit_tname_in_shapes = False stmt.subject = compile_query_subject( subject, shape=None, is_delete=True, ctx=bodyctx, ) stmt_subject_stype = setgen.get_set_type(subject, ctx=ictx) result = setgen.class_set( schemactx.get_material_type(stmt_subject_stype, ctx=ctx), path_id=stmt.subject.path_id, ctx=ctx, ) with ictx.new() as resultctx: if ictx.stmt is ctx.toplevel_stmt: resultctx.expr_exposed = True stmt.result = compile_query_subject( result, view_scls=ctx.view_scls, view_name=ctx.toplevel_result_view_name, compile_views=ictx.stmt is ictx.toplevel_stmt, ctx=resultctx, ) result = fini_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx) return result
def compile_FunctionCall( expr: qlast.Base, *, ctx: context.ContextLevel) -> irast.Base: env = ctx.env if isinstance(expr.func, str): if ctx.func is not None: ctx_func_params = ctx.func.get_params(env.schema) if ctx_func_params.get_by_name(env.schema, expr.func): raise errors.QueryError( f'parameter `{expr.func}` is not callable', context=expr.context) funcname = expr.func else: funcname = sn.Name(expr.func[1], expr.func[0]) funcs = env.schema.get_functions(funcname, module_aliases=ctx.modaliases) if funcs is None: raise errors.QueryError( f'could not resolve function name {funcname}', context=expr.context) args, kwargs = compile_call_args(expr, funcname, ctx=ctx) matched = polyres.find_callable(funcs, args=args, kwargs=kwargs, ctx=ctx) if not matched: raise errors.QueryError( f'could not find a function variant {funcname}', context=expr.context) elif len(matched) > 1: raise errors.QueryError( f'function {funcname} is not unique', context=expr.context) else: matched_call = matched[0] args, params_typemods = finalize_args(matched_call, ctx=ctx) matched_func_params = matched_call.func.get_params(env.schema) variadic_param = matched_func_params.find_variadic(env.schema) variadic_param_type = None if variadic_param is not None: variadic_param_type = irtyputils.type_to_typeref( env.schema, variadic_param.get_type(env.schema)) matched_func_ret_type = matched_call.func.get_return_type(env.schema) is_polymorphic = ( any(p.get_type(env.schema).is_polymorphic(env.schema) for p in matched_func_params.objects(env.schema)) and matched_func_ret_type.is_polymorphic(env.schema) ) matched_func_initial_value = matched_call.func.get_initial_value( env.schema) func = matched_call.func func_name = func.get_shortname(env.schema) if matched_func_initial_value is not None: iv_ql = qlast.TypeCast( expr=qlparser.parse_fragment(matched_func_initial_value.text), type=typegen.type_to_ql_typeref(matched_call.return_type, ctx=ctx), ) func_initial_value = dispatch.compile(iv_ql, ctx=ctx) else: func_initial_value = None rtype = matched_call.return_type path_id = pathctx.get_expression_path_id(rtype, ctx=ctx) if rtype.is_tuple(): tuple_path_ids = [] nested_path_ids = [] for n, st in rtype.iter_subtypes(ctx.env.schema): elem_path_id = pathctx.get_tuple_indirection_path_id( path_id, n, st, ctx=ctx).strip_weak_namespaces() if st.is_tuple(): nested_path_ids.append([ pathctx.get_tuple_indirection_path_id( elem_path_id, nn, sst, ctx=ctx).strip_weak_namespaces() for nn, sst in st.iter_subtypes(ctx.env.schema) ]) tuple_path_ids.append(elem_path_id) for nested in nested_path_ids: tuple_path_ids.extend(nested) else: tuple_path_ids = None fcall = irast.FunctionCall( args=args, func_module_id=env.schema.get_global( s_mod.Module, func_name.module).id, func_shortname=func_name, func_polymorphic=is_polymorphic, func_sql_function=func.get_from_function(env.schema), force_return_cast=func.get_force_return_cast(env.schema), sql_func_has_out_params=func.get_sql_func_has_out_params(env.schema), error_on_null_result=func.get_error_on_null_result(env.schema), params_typemods=params_typemods, context=expr.context, typeref=irtyputils.type_to_typeref(env.schema, rtype), typemod=matched_call.func.get_return_typemod(env.schema), has_empty_variadic=matched_call.has_empty_variadic, variadic_param_type=variadic_param_type, func_initial_value=func_initial_value, tuple_path_ids=tuple_path_ids, ) return setgen.ensure_set(fcall, typehint=rtype, path_id=path_id, ctx=ctx)
def compile_DescribeStmt( ql: qlast.DescribeStmt, *, ctx: context.ContextLevel) -> irast.Set: with ctx.subquery() as ictx: stmt = irast.SelectStmt() init_stmt(stmt, ql, ctx=ictx, parent_ctx=ctx) if ql.object is qlast.DescribeGlobal.Schema: if ql.language is qltypes.DescribeLanguage.DDL: # DESCRIBE SCHEMA text = s_ddl.ddl_text_from_schema( ctx.env.schema, ) else: raise errors.QueryError( f'cannot describe full schema as {ql.language}') ct = typegen.type_to_typeref( ctx.env.get_track_schema_type( s_name.QualName('std', 'str')), env=ctx.env, ) stmt.result = setgen.ensure_set( irast.StringConstant(value=text, typeref=ct), ctx=ictx, ) elif ql.object is qlast.DescribeGlobal.DatabaseConfig: if ql.language is qltypes.DescribeLanguage.DDL: function_call = dispatch.compile( qlast.FunctionCall( func=('cfg', '_describe_database_config_as_ddl'), ), ctx=ictx) assert isinstance(function_call, irast.Set), function_call stmt.result = function_call else: raise errors.QueryError( f'cannot describe config as {ql.language}') elif ql.object is qlast.DescribeGlobal.SystemConfig: if ql.language is qltypes.DescribeLanguage.DDL: function_call = dispatch.compile( qlast.FunctionCall( func=('cfg', '_describe_system_config_as_ddl'), ), ctx=ictx) assert isinstance(function_call, irast.Set), function_call stmt.result = function_call else: raise errors.QueryError( f'cannot describe config as {ql.language}') elif ql.object is qlast.DescribeGlobal.Roles: if ql.language is qltypes.DescribeLanguage.DDL: function_call = dispatch.compile( qlast.FunctionCall( func=('sys', '_describe_roles_as_ddl'), ), ctx=ictx) assert isinstance(function_call, irast.Set), function_call stmt.result = function_call else: raise errors.QueryError( f'cannot describe roles as {ql.language}') else: assert isinstance(ql.object, qlast.ObjectRef), ql.object modules = [] items: DefaultDict[str, List[s_name.Name]] = defaultdict(list) referenced_classes: List[s_obj.ObjectMeta] = [] objref = ql.object itemclass = objref.itemclass if itemclass is qltypes.SchemaObjectClass.MODULE: modules.append(s_utils.ast_ref_to_unqualname(objref)) else: itemtype: Optional[Type[s_obj.Object]] = None name = s_utils.ast_ref_to_name(objref) if itemclass is not None: if itemclass is qltypes.SchemaObjectClass.ALIAS: # Look for underlying derived type. itemtype = s_types.Type else: itemtype = ( s_obj.ObjectMeta.get_schema_metaclass_for_ql_class( itemclass) ) last_exc = None # Search in the current namespace AND in std. We do # this to avoid masking a `std` object/function by one # in a default module. search_ns = [ictx.modaliases] # Only check 'std' separately if the current # modaliases don't already include it. if ictx.modaliases.get(None, 'std') != 'std': search_ns.append({None: 'std'}) # Search in the current namespace AND in std. for aliases in search_ns: # Use the specific modaliases instead of the # context ones. with ictx.subquery() as newctx: newctx.modaliases = aliases # Get the default module name modname = aliases[None] # Is the current item a function is_function = (itemclass is qltypes.SchemaObjectClass.FUNCTION) # We need to check functions if we're looking for them # specifically or if this is a broad search. They are # handled separately because they allow multiple # matches for the same name. if (itemclass is None or is_function): try: funcs: Tuple[s_func.Function, ...] = ( newctx.env.schema.get_functions( name, module_aliases=aliases) ) except errors.InvalidReferenceError: pass else: for func in funcs: items[f'function_{modname}'].append( func.get_name(newctx.env.schema)) # Also find an object matching the name as long as # it's not a function we're looking for specifically. if not is_function: try: if itemclass is not \ qltypes.SchemaObjectClass.ALIAS: condition = None label = None else: condition = ( lambda obj: obj.get_alias_is_persistent( ctx.env.schema ) ) label = 'alias' obj = schemactx.get_schema_object( objref, item_type=itemtype, condition=condition, label=label, ctx=newctx, ) items[f'other_{modname}'].append( obj.get_name(newctx.env.schema)) except errors.InvalidReferenceError as exc: # Record the exception to be possibly # raised if no matches are found last_exc = exc # If we already have some results, suppress the exception, # otherwise raise the recorded exception. if not items and last_exc: raise last_exc verbose = ql.options.get_flag('VERBOSE') method: Any if ql.language is qltypes.DescribeLanguage.DDL: method = s_ddl.ddl_text_from_schema elif ql.language is qltypes.DescribeLanguage.SDL: method = s_ddl.sdl_text_from_schema elif ql.language is qltypes.DescribeLanguage.TEXT: method = s_ddl.descriptive_text_from_schema if not verbose.val: referenced_classes = [s_links.Link, s_lprops.Property] else: raise errors.InternalServerError( f'cannot handle describe language {ql.language}' ) # Based on the items found generate main text and a # potential comment about masked items. defmod = ictx.modaliases.get(None, 'std') default_items = [] masked_items = set() for objtype in ['function', 'other']: defkey = f'{objtype}_{defmod}' mskkey = f'{objtype}_std' default_items += items.get(defkey, []) if defkey in items and mskkey in items: # We have a match in default module and some masked. masked_items.update(items.get(mskkey, [])) else: default_items += items.get(mskkey, []) # Throw out anything in the masked set that's already in # the default. masked_items.difference_update(default_items) text = method( ctx.env.schema, included_modules=modules, included_items=default_items, included_ref_classes=referenced_classes, include_module_ddl=False, include_std_ddl=True, ) if masked_items: text += ('\n\n' '# The following builtins are masked by the above:' '\n\n') masked = method( ctx.env.schema, included_modules=modules, included_items=masked_items, included_ref_classes=referenced_classes, include_module_ddl=False, include_std_ddl=True, ) masked = textwrap.indent(masked, '# ') text += masked ct = typegen.type_to_typeref( ctx.env.get_track_schema_type( s_name.QualName('std', 'str')), env=ctx.env, ) stmt.result = setgen.ensure_set( irast.StringConstant(value=text, typeref=ct), ctx=ictx, ) result = fini_stmt(stmt, ql, ctx=ictx, parent_ctx=ctx) return result
def compile_insert_unless_conflict( stmt: irast.InsertStmt, insert_subject: qlast.Path, constraint_spec: qlast.Expr, else_branch: Optional[qlast.Expr], *, ctx: context.ContextLevel, ) -> irast.OnConflictClause: with ctx.new() as constraint_ctx: constraint_ctx.partial_path_prefix = stmt.subject # We compile the name here so we can analyze it, but we don't do # anything else with it. cspec_res = setgen.ensure_set(dispatch.compile(constraint_spec, ctx=constraint_ctx), ctx=constraint_ctx) if not cspec_res.rptr: raise errors.QueryError( 'ON CONFLICT argument must be a property', context=constraint_spec.context, ) if cspec_res.rptr.source.path_id != stmt.subject.path_id: raise errors.QueryError( 'ON CONFLICT argument must be a property of the ' 'type being inserted', context=constraint_spec.context, ) schema = ctx.env.schema schema, ptr = (typeutils.ptrcls_from_ptrref(cspec_res.rptr.ptrref, schema=schema)) if not isinstance(ptr, s_pointers.Pointer): raise errors.QueryError( 'ON CONFLICT property must be a property', context=constraint_spec.context, ) ptr = ptr.get_nearest_non_derived_parent(schema) if ptr.get_cardinality(schema) != qltypes.SchemaCardinality.ONE: raise errors.QueryError( 'ON CONFLICT property must be a SINGLE property', context=constraint_spec.context, ) exclusive_constr: s_constr.Constraint = schema.get('std::exclusive') ex_cnstrs = [ c for c in ptr.get_constraints(schema).objects(schema) if c.issubclass(schema, exclusive_constr) ] if len(ex_cnstrs) != 1: raise errors.QueryError( 'ON CONFLICT property must have a single exclusive constraint', context=constraint_spec.context, ) module_id = schema.get_global(s_mod.Module, ptr.get_name(schema).module).id field_name = cspec_res.rptr.ptrref.shortname # Find the IR corresponding to our field # FIXME: Is there a better way to do this? for elem, _ in stmt.subject.shape: if elem.rptr.ptrref.shortname == field_name: key = elem.expr break else: raise errors.QueryError( 'INSERT ON CONFLICT property requires matching shape', context=constraint_spec.context, ) # FIXME: This reuse of the source ctx.anchors = ctx.anchors.copy() source_alias = ctx.aliases.get('a') ctx.anchors[source_alias] = setgen.ensure_set(key, ctx=ctx) anchor = qlast.Path(steps=[qlast.ObjectRef(name=source_alias)]) ctx.env.schema = schema # Compile an else branch else_info = None if else_branch: # Produce a query that finds the conflicting objects nobe = qlast.SelectQuery( result=insert_subject, where=qlast.BinOp(op='=', left=constraint_spec, right=anchor), ) select_ir = dispatch.compile(nobe, ctx=ctx) select_ir = setgen.scoped_set(select_ir, force_reassign=True, ctx=ctx) assert isinstance(select_ir, irast.Set) # The ELSE needs to be able to reference the subject in an # UPDATE, even though that would normally be prohibited. ctx.path_scope.factoring_allowlist.add(stmt.subject.path_id) # Compile else else_ir = dispatch.compile(astutils.ensure_qlstmt(else_branch), ctx=ctx) assert isinstance(else_ir, irast.Set) else_info = irast.OnConflictElse(select_ir, else_ir) return irast.OnConflictClause( irast.ConstraintRef(id=ex_cnstrs[0].id, module_id=module_id), else_info)
def compile_operator( qlexpr: qlast.Base, op_name: str, qlargs: List[qlast.Base], *, ctx: context.ContextLevel) -> irast.Set: env = ctx.env schema = env.schema opers = schema.get_operators(op_name, module_aliases=ctx.modaliases) if opers is None: raise errors.QueryError( f'no operator matches the given name and argument types', context=qlexpr.context) fq_op_name = next(iter(opers)).get_shortname(ctx.env.schema) conditional_args = CONDITIONAL_OPS.get(fq_op_name) args = [] for ai, qlarg in enumerate(qlargs): with ctx.newscope(fenced=True) as fencectx: # We put on a SET OF fence preemptively in case this is # a SET OF arg, which we don't know yet due to polymorphic # matching. We will remove it if necessary in `finalize_args()`. if conditional_args and ai in conditional_args: fencectx.in_conditional = qlexpr.context arg_ir = setgen.ensure_set( dispatch.compile(qlarg, ctx=fencectx), ctx=fencectx) arg_ir = setgen.scoped_set( setgen.ensure_stmt(arg_ir, ctx=fencectx), ctx=fencectx) arg_type = inference.infer_type(arg_ir, ctx.env) if arg_type is None: raise errors.QueryError( f'could not resolve the type of operand ' f'#{ai} of {op_name}', context=qlarg.context) args.append((arg_type, arg_ir)) # Check if the operator is a derived operator, and if so, # find the origins. origin_op = opers[0].get_derivative_of(env.schema) derivative_op: Optional[s_oper.Operator] if origin_op is not None: # If this is a derived operator, there should be # exactly one form of it. This is enforced at the DDL # level, but check again to be sure. if len(opers) > 1: raise errors.InternalServerError( f'more than one derived operator of the same name: {op_name}', context=qlarg.context) derivative_op = opers[0] opers = schema.get_operators(origin_op) if not opers: raise errors.InternalServerError( f'cannot find the origin operator for {op_name}', context=qlarg.context) actual_typemods = [ param.get_typemod(schema) for param in derivative_op.get_params(schema).objects(schema) ] else: derivative_op = None actual_typemods = [] matched = None # Some 2-operand operators are special when their operands are # arrays or tuples. if len(args) == 2: coll_opers = None # If both of the args are arrays or tuples, potentially # compile the operator for them differently than for other # combinations. if args[0][0].is_tuple(env.schema) and args[1][0].is_tuple(env.schema): # Out of the candidate operators, find the ones that # correspond to tuples. coll_opers = [ op for op in opers if all( param.get_type(schema).is_tuple(schema) for param in op.get_params(schema).objects(schema) ) ] elif args[0][0].is_array() and args[1][0].is_array(): # Out of the candidate operators, find the ones that # correspond to arrays. coll_opers = [ op for op in opers if all( param.get_type(schema).is_array() for param in op.get_params(schema).objects(schema) ) ] # Proceed only if we have a special case of collection operators. if coll_opers: # Then check if they are recursive (i.e. validation must be # done recursively for the subtypes). We rely on the fact that # it is forbidden to define an operator that has both # recursive and non-recursive versions. if not coll_opers[0].get_recursive(schema): # The operator is non-recursive, so regular processing # is needed. matched = polyres.find_callable( coll_opers, args=args, kwargs={}, ctx=ctx) else: # The recursive operators are usually defined as # being polymorphic on all parameters, and so this has # a side-effect of forcing both operands to be of # the same type (via casting) before the operator is # applied. This might seem suboptmial, since there might # be a more specific operator for the types of the # elements, but the current version of Postgres # actually requires tuples and arrays to be of the # same type in comparison, so this behavior is actually # what we want. matched = polyres.find_callable( coll_opers, args=args, kwargs={}, ctx=ctx, ) # Now that we have an operator, we need to validate that it # can be applied to the tuple or array elements. submatched = validate_recursive_operator( opers, args[0], args[1], ctx=ctx) if len(submatched) != 1: # This is an error. We want the error message to # reflect whether no matches were found or too # many, so we preserve the submatches found for # this purpose. matched = submatched # No special handling match was necessary, find a normal match. if matched is None: matched = polyres.find_callable(opers, args=args, kwargs={}, ctx=ctx) in_polymorphic_func = ( ctx.env.options.func_params is not None and ctx.env.options.func_params.has_polymorphic(env.schema) ) in_abstract_constraint = ( in_polymorphic_func and ctx.env.options.schema_object_context is s_constr.Constraint ) if not in_polymorphic_func: matched = [call for call in matched if not call.func.get_is_abstract(env.schema)] if len(matched) == 1: matched_call = matched[0] else: if len(args) == 2: ltype = schemactx.get_material_type(args[0][0], ctx=ctx) rtype = schemactx.get_material_type(args[1][0], ctx=ctx) types = ( f'{ltype.get_displayname(env.schema)!r} and ' f'{rtype.get_displayname(env.schema)!r}') else: types = ', '.join( repr( schemactx.get_material_type( a[0], ctx=ctx).get_displayname(env.schema) ) for a in args ) if not matched: hint = ('Consider using an explicit type cast or a conversion ' 'function.') if op_name == 'std::IF': hint = (f"The IF and ELSE result clauses must be of " f"compatible types, while the condition clause must " f"be 'std::bool'. {hint}") elif op_name == '+': str_t = cast(s_scalars.ScalarType, env.schema.get('std::str')) bytes_t = cast(s_scalars.ScalarType, env.schema.get('std::bytes')) if ( (ltype.issubclass(env.schema, str_t) and rtype.issubclass(env.schema, str_t)) or (ltype.issubclass(env.schema, bytes_t) and rtype.issubclass(env.schema, bytes_t)) or (ltype.is_array() and rtype.is_array()) ): hint = 'Consider using the "++" operator for concatenation' raise errors.QueryError( f'operator {str(op_name)!r} cannot be applied to ' f'operands of type {types}', hint=hint, context=qlexpr.context) elif len(matched) > 1: if in_abstract_constraint: matched_call = matched[0] else: detail = ', '.join( f'`{m.func.get_verbosename(ctx.env.schema)}`' for m in matched ) raise errors.QueryError( f'operator {str(op_name)!r} is ambiguous for ' f'operands of type {types}', hint=f'Possible variants: {detail}.', context=qlexpr.context) oper = matched_call.func assert isinstance(oper, s_oper.Operator) env.schema_refs.add(oper) oper_name = oper.get_shortname(env.schema) matched_params = oper.get_params(env.schema) rtype = matched_call.return_type is_polymorphic = ( any(p.get_type(env.schema).is_polymorphic(env.schema) for p in matched_params.objects(env.schema)) and rtype.is_polymorphic(env.schema) ) final_args, params_typemods = finalize_args( matched_call, actual_typemods=actual_typemods, is_polymorphic=is_polymorphic, ctx=ctx, ) if oper_name in {'std::UNION', 'std::IF'} and rtype.is_object_type(): # Special case for the UNION and IF operators, instead of common # parent type, we return a union type. if oper_name == 'std::UNION': larg, rarg = (a.expr for a in final_args) else: larg, _, rarg = (a.expr for a in final_args) left_type = schemactx.get_material_type( setgen.get_set_type(larg, ctx=ctx), ctx=ctx, ) right_type = schemactx.get_material_type( setgen.get_set_type(rarg, ctx=ctx), ctx=ctx, ) if left_type.issubclass(env.schema, right_type): rtype = right_type elif right_type.issubclass(env.schema, left_type): rtype = left_type else: assert isinstance(left_type, s_types.InheritingType) assert isinstance(right_type, s_types.InheritingType) rtype = schemactx.get_union_type([left_type, right_type], ctx=ctx) from_op = oper.get_from_operator(env.schema) sql_operator = None if (from_op is not None and oper.get_code(env.schema) is None and oper.get_from_function(env.schema) is None and not in_polymorphic_func): sql_operator = tuple(from_op) origin_name: Optional[sn.SchemaName] origin_module_id: Optional[uuid.UUID] if derivative_op is not None: origin_name = oper_name origin_module_id = env.schema.get_global( s_mod.Module, origin_name.module).id oper_name = derivative_op.get_shortname(env.schema) else: origin_name = None origin_module_id = None node = irast.OperatorCall( args=final_args, func_module_id=env.schema.get_global( s_mod.Module, oper_name.module).id, func_shortname=oper_name, func_polymorphic=is_polymorphic, origin_name=origin_name, origin_module_id=origin_module_id, func_sql_function=oper.get_from_function(env.schema), sql_operator=sql_operator, force_return_cast=oper.get_force_return_cast(env.schema), volatility=oper.get_volatility(env.schema), operator_kind=oper.get_operator_kind(env.schema), params_typemods=params_typemods, context=qlexpr.context, typeref=typegen.type_to_typeref(rtype, env=env), typemod=oper.get_return_typemod(env.schema), ) return setgen.ensure_set(node, typehint=rtype, ctx=ctx)