def _init_objtypes(self, objtypes): for objtype, objtypedecl in objtypes.items(): self._parse_source_props(objtype, objtypedecl) if objtypedecl.fields: self._parse_field_setters(objtype, objtypedecl.fields) for linkdecl in objtypedecl.links: link_name = linkdecl.name if len(link_name) > s_pointers.MAX_NAME_LENGTH: raise errors.SchemaDefinitionError( f'link or property name length exceeds the maximum of ' f'{s_pointers.MAX_NAME_LENGTH} characters', context=linkdecl.context) if linkdecl.extends: link_bases = [ self._get_ref_type(b) for b in linkdecl.extends ] else: link_bases = [ self._schema.get(s_links.Link.get_default_base_name()) ] if linkdecl.expr is not None: # This is a computable, but we cannot interpret # the expression yet, so set the target to `any` # temporarily. _targets = [s_pseudo.Any.instance] else: _targets = [self._get_ref_type(t) for t in linkdecl.target] if len(_targets) == 1: # Usual case, just one target spectargets = None target = _targets[0] else: # Multiple explicit targets, create common virtual # parent and use it as target. spectargets = s_obj.ObjectSet.create( self._schema, _targets) self._schema, target = s_inh.create_virtual_parent( self._schema, _targets, module_name=self._module.get_name(self._schema)) self._schema = target.set_field_value( self._schema, 'is_derived', True) if (not target.is_any() and not isinstance(target, s_objtypes.ObjectType)): raise errors.InvalidLinkTargetError( f'invalid link target, expected object type, got ' f'{target.__class__.__name__}', context=linkdecl.target[0].context ) new_props = { 'sourcectx': linkdecl.context, } name = self._get_derived_ptr_name(link_name, objtype) self._schema, link = link_bases[0].derive( self._schema, objtype, target, attrs=new_props, merge_bases=link_bases, apply_defaults=not linkdecl.inherited, name=name) if linkdecl.cardinality is None: if linkdecl.expr is None: cardinality = qltypes.Cardinality.ONE else: cardinality = None else: cardinality = linkdecl.cardinality self._schema = link.update(self._schema, { 'spectargets': spectargets, 'required': bool(linkdecl.required), 'cardinality': cardinality, 'declared_inherited': linkdecl.inherited, }) if linkdecl.on_target_delete is not None: self._schema = link.set_field_value( self._schema, 'on_target_delete', linkdecl.on_target_delete.cascade) if linkdecl.expr is not None: self._schema = link.set_field_value( self._schema, 'computable', True) self._parse_source_props(link, linkdecl) self._schema = objtype.add_pointer(self._schema, link) for objtype, objtypedecl in objtypes.items(): if objtypedecl.indexes: self._parse_subject_indexes(objtype, objtypedecl) if objtypedecl.constraints: self._parse_subject_constraints(objtype, objtypedecl)
def compile_operator(qlexpr: qlast.Base, op_name: str, qlargs: typing.List[qlast.Base], *, ctx: context.ContextLevel) -> irast.OperatorCall: env = ctx.env schema = env.schema opers = schema.get_operators(op_name, module_aliases=ctx.modaliases) if opers is None: raise errors.QueryError( f'no operator matches the given name and argument types', context=qlexpr.context) args = [] for ai, qlarg in enumerate(qlargs): with ctx.newscope(fenced=True) as fencectx: # We put on a SET OF fence preemptively in case this is # a SET OF arg, which we don't know yet due to polymorphic # matching. We will remove it if necessary in `finalize_args()`. arg_ir = setgen.ensure_set(dispatch.compile(qlarg, ctx=fencectx), ctx=fencectx) arg_ir = setgen.scoped_set(setgen.ensure_stmt(arg_ir, ctx=fencectx), ctx=fencectx) arg_type = inference.infer_type(arg_ir, ctx.env) if arg_type is None: raise errors.QueryError( f'could not resolve the type of operand ' f'#{ai} of {op_name}', context=qlarg.context) args.append((arg_type, arg_ir)) matched = None # Some 2-operand operators are special when their operands are # arrays or tuples. if len(args) == 2: coll_opers = None # If both of the args are arrays or tuples, potentially # compile the operator for them differently than for other # combinations. if args[0][0].is_tuple() and args[1][0].is_tuple(): # Out of the candidate operators, find the ones that # correspond to tuples. coll_opers = [ op for op in opers if all( param.get_type(schema).is_tuple() for param in op.get_params(schema).objects(schema)) ] elif args[0][0].is_array() and args[1][0].is_array(): # Out of the candidate operators, find the ones that # correspond to arrays. coll_opers = [ op for op in opers if all( param.get_type(schema).is_array() for param in op.get_params(schema).objects(schema)) ] # Proceed only if we have a special case of collection operators. if coll_opers: # Then check if they are recursive (i.e. validation must be # done recursively for the subtypes). We rely on the fact that # it is forbidden to define an operator that has both # recursive and non-recursive versions. if not coll_opers[0].get_recursive(schema): # The operator is non-recursive, so regular processing # is needed. matched = polyres.find_callable(coll_opers, args=args, kwargs={}, ctx=ctx) else: # Ultimately the operator will be the same, regardless of the # specific operand types, as long as it passed validation, so # we just use the first operand type for the purpose of # finding the callable. matched = polyres.find_callable(coll_opers, args=[(args[0][0], args[0][1]), (args[0][0], args[1][1]) ], kwargs={}, ctx=ctx) # Now that we have an operator, we need to validate that it # can be applied to the tuple or array elements. submatched = validate_recursive_operator(opers, args[0], args[1], ctx=ctx) if len(submatched) != 1: # This is an error. We want the error message to # reflect whether no matches were found or too # many, so we preserve the submatches found for # this purpose. matched = submatched # No special handling match was necessary, find a normal match. if matched is None: matched = polyres.find_callable(opers, args=args, kwargs={}, ctx=ctx) in_polymorphic_func = (ctx.env.func_params is not None and ctx.env.func_params.has_polymorphic(env.schema)) in_abstract_constraint = ( in_polymorphic_func and ctx.env.parent_object_type is s_constr.Constraint) if not in_polymorphic_func: matched = [ call for call in matched if not call.func.get_is_abstract(env.schema) ] if len(matched) == 1: matched_call = matched[0] else: if len(args) == 2: ltype = args[0][0].material_type(env.schema) rtype = args[1][0].material_type(env.schema) types = (f'{ltype.get_displayname(env.schema)!r} and ' f'{rtype.get_displayname(env.schema)!r}') else: types = ', '.join( repr(a[0].material_type(env.schema).get_displayname( env.schema)) for a in args) if not matched: raise errors.QueryError( f'operator {str(op_name)!r} cannot be applied to ' f'operands of type {types}', hint='Consider using an explicit type cast or a conversion ' 'function.', context=qlexpr.context) elif len(matched) > 1: if in_abstract_constraint: matched_call = matched[0] else: detail = ', '.join( f'`{m.func.get_display_signature(ctx.env.schema)}`' for m in matched) raise errors.QueryError( f'operator {str(op_name)!r} is ambiguous for ' f'operands of type {types}', hint=f'Possible variants: {detail}.', context=qlexpr.context) args, params_typemods = finalize_args(matched_call, ctx=ctx) oper = matched_call.func env.schema_refs.add(oper) oper_name = oper.get_shortname(env.schema) matched_params = oper.get_params(env.schema) rtype = matched_call.return_type if oper_name in {'std::UNION', 'std::IF'} and rtype.is_object_type(): # Special case for the UNION and IF operators, instead of common # parent type, we return a union type. if oper_name == 'std::UNION': larg, rarg = (a.expr for a in args) else: larg, rarg = (a.expr for a in args[1:]) left_type = setgen.get_set_type(larg, ctx=ctx).material_type(ctx.env.schema) right_type = setgen.get_set_type(rarg, ctx=ctx).material_type(ctx.env.schema) if left_type.issubclass(env.schema, right_type): rtype = right_type elif right_type.issubclass(env.schema, left_type): rtype = left_type else: env.schema, rtype = s_inh.create_virtual_parent( env.schema, [left_type, right_type]) is_polymorphic = (any( p.get_type(env.schema).is_polymorphic(env.schema) for p in matched_params.objects(env.schema)) and oper.get_return_type( env.schema).is_polymorphic(env.schema)) from_op = oper.get_from_operator(env.schema) if (from_op is not None and oper.get_code(env.schema) is None and oper.get_from_function(env.schema) is None and not in_polymorphic_func): sql_operator = tuple(from_op) else: sql_operator = None node = irast.OperatorCall( args=args, func_module_id=env.schema.get_global(s_mod.Module, oper_name.module).id, func_shortname=oper_name, func_polymorphic=is_polymorphic, func_sql_function=oper.get_from_function(env.schema), sql_operator=sql_operator, force_return_cast=oper.get_force_return_cast(env.schema), volatility=oper.get_volatility(env.schema), operator_kind=oper.get_operator_kind(env.schema), params_typemods=params_typemods, context=qlexpr.context, typeref=irtyputils.type_to_typeref(env.schema, rtype), typemod=oper.get_return_typemod(env.schema), ) return setgen.ensure_set(node, typehint=rtype, ctx=ctx)