def compile_Path( node: qlast.Path, *, schema: s_schema.Schema, modaliases: Mapping[Optional[str], str], localnames: AbstractSet[str] = frozenset(), ) -> None: for step in node.steps: if isinstance(step, (qlast.Expr, qlast.TypeIntersection)): normalize( step, schema=schema, modaliases=modaliases, localnames=localnames, ) elif isinstance(step, qlast.ObjectRef): # This is a specific path root, resolve it. if not step.module and step.name not in localnames: obj = schema.get( step.name, default=None, module_aliases=modaliases, ) if obj is not None: step.module = obj.get_name(schema).module elif None in modaliases: # Even if the name was not resolved in the # schema it may be the name of the object # being defined, as such the default module # should be used. Names that must be ignored # (like aliases and parameters) have already # been filtered by the localnames. step.module = modaliases[None]
def _validate_base_refs( cls, schema: s_schema.Schema, base_refs: List[so.Object], astnode: qlast.ObjectDDL, context: sd.CommandContext, ) -> so.ObjectList[so.InheritingObject]: classname = cls._classname_from_ast(schema, astnode, context) bases = so.ObjectList[so.InheritingObject].create(schema, base_refs) for base in bases.objects(schema): if base.is_type() and base.contains_any(schema): base_type_name = base.get_displayname(schema) raise errors.SchemaError( f"{base_type_name!r} cannot be a parent type") mcls = cls.get_schema_metaclass() if not bases and classname not in mcls.get_root_classes(): default_base = mcls.get_default_base_name() if default_base is not None and classname != default_base: default_base = schema.get(default_base) bases = so.ObjectList[so.InheritingObject].create( schema, [utils.reduce_to_typeref(schema, default_base)], ) return bases
def localnames_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> Set[str]: localnames = super().localnames_from_ast(schema, astnode, context) # Set up the constraint parameters as part of names to be # ignored in expression normalization. if isinstance(astnode, qlast.CreateConstraint): localnames |= {param.name for param in astnode.params} elif isinstance(astnode, qlast.AlterConstraint): # ALTER ABSTRACT CONSTRAINT doesn't repeat the params, # but we can get them from the schema. objref = astnode.name # Merge the context modaliases and the command modaliases. modaliases = dict(context.modaliases) modaliases.update( cls._modaliases_from_ast(schema, astnode, context)) # Get the original constraint. constr = schema.get( utils.ast_ref_to_name(objref), module_aliases=modaliases, type=Constraint, ) localnames |= { param.get_parameter_name(schema) for param in constr.get_params(schema).objects(schema) } return localnames
def _normalize_objref( ref: qlast.ObjectRef, *, schema: s_schema.Schema, modaliases: Mapping[Optional[str], str], localnames: AbstractSet[str] = frozenset(), ) -> None: if not ref.module and ref.name not in localnames: obj = schema.get( ref.name, default=None, module_aliases=modaliases, ) if obj is not None: name = obj.get_name(schema) assert isinstance(name, sn.QualName) ref.module = name.module elif None in modaliases: # Even if the name was not resolved in the # schema it may be the name of the object # being defined, as such the default module # should be used. Names that must be ignored # (like aliases and parameters) have already # been filtered by the localnames. ref.module = modaliases[None]
def int_const_to_python(ir: irast.IntegerConstant, schema: s_schema.Schema) -> Any: stype = schema.get_by_id(ir.typeref.id) if stype.issubclass(schema, schema.get('std::bigint')): return decimal.Decimal(ir.value) else: return int(ir.value)
def float_const_to_python(ir: irast.FloatConstant, schema: s_schema.Schema) -> object: stype = schema.get_by_id(ir.typeref.id) if stype.issubclass(schema, schema.get('std::decimal')): return decimal.Decimal(ir.value) else: return float(ir.value)
def _reinherit_classref_dict( self: InheritingObjectCommand[so.InheritingObjectT], schema: s_schema.Schema, context: sd.CommandContext, refdict: so.RefDict, ) -> s_schema.Schema: from edb.schema import referencing as s_referencing scls = self.scls refs = self.get_inherited_ref_layout(schema, context, refdict) refnames = set(refs) obj_op: InheritingObjectCommand[so.InheritingObjectT] if isinstance(self, sd.AlterObjectFragment): obj_op = cast(InheritingObjectCommand[so.InheritingObjectT], self.get_parent_op(context)) else: obj_op = self for refalter in obj_op.get_subcommands(metaclass=refdict.ref_cls): if refalter.get_attribute_value('owned'): assert isinstance(refalter, sd.QualifiedObjectCommand) refnames.add(refalter.classname) deleted_refs = self.get_no_longer_inherited_ref_layout( schema, context, refdict, refnames) group = sd.CommandGroup() for create_cmd, astnode, bases in refs.values(): cmd = create_cmd.as_inherited_ref_cmd( schema=schema, context=context, astnode=astnode, bases=bases, referrer=scls, ) obj = schema.get(cmd.classname, default=None) if obj is None: cmd.set_attribute_value(refdict.backref_attr, scls) group.add(cmd) schema = cmd.apply(schema, context) else: assert isinstance(obj, s_referencing.ReferencedInheritingObject) existing_bases = obj.get_implicit_bases(schema) schema, cmd2 = self._rebase_ref(schema, context, obj, existing_bases, bases) group.add(cmd2) for fqname, delete_cmd_cls in deleted_refs.items(): delete_cmd = delete_cmd_cls(classname=fqname) group.add(delete_cmd) schema = delete_cmd.apply(schema, context) self.add(group) return schema
def float_const_to_python(ir: irast.FloatConstant, schema: s_schema.Schema) -> Any: stype = schema.get_by_id(ir.typeref.id) assert isinstance(stype, s_types.Type) if stype.issubclass(schema, schema.get('std::decimal')): return decimal.Decimal(ir.value) else: return float(ir.value)
def maybe_lookup_obj_pointer( schema: s_schema.Schema, name: s_name.QualName, ptr_name: s_name.UnqualName, ) -> Optional[s_pointers.Pointer]: base_object = schema.get(name, type=s_objtypes.ObjectType, default=None) if not base_object: return None ptr = base_object.maybe_get_ptr(schema, ptr_name) return ptr
def float_const_to_python(ir: irast.FloatConstant, schema: s_schema.Schema) -> Any: stype = schema.get_by_id(ir.typeref.id) assert isinstance(stype, s_types.Type) bigint = schema.get('std::bigint', type=s_obj.SubclassableObject) if stype.issubclass(schema, bigint): return decimal.Decimal(ir.value) else: return float(ir.value)
def _classbases_from_ast( cls, schema: s_schema.Schema, astnode: qlast.ObjectDDL, context: sd.CommandContext, ) -> so.ObjectList[Constraint]: if isinstance(astnode, qlast.CreateConcreteConstraint): classname = cls._classname_from_ast(schema, astnode, context) base_name = sn.shortname_from_fullname(classname) base = schema.get(base_name, type=Constraint) return so.ObjectList.create(schema, [base]) else: return super()._classbases_from_ast(schema, astnode, context)
def get_param_anchors_for_callable( params: s_func.ParameterLikeList, schema: s_schema.Schema, *, inlined_defaults: bool, ) -> Tuple[Dict[str, irast.Parameter], List[qlast.AliasedExpr], ]: anchors = {} aliases = [] if inlined_defaults: anchors['__defaults_mask__'] = irast.Parameter( name='__defaults_mask__', typeref=irtyputils.type_to_typeref( # note: no cache schema, cast(s_scalars.ScalarType, schema.get('std::bytes')), ), ) pg_params = s_func.PgParams.from_params(schema, params) for pi, p in enumerate(pg_params.params): p_shortname = p.get_shortname(schema) anchors[p_shortname] = irast.Parameter( name=p_shortname, typeref=irtyputils.type_to_typeref(schema, p.get_type(schema))) if p.get_default(schema) is None: continue if not inlined_defaults: continue aliases.append( qlast.AliasedExpr( alias=p_shortname, expr=qlast. IfElse(condition=qlast.BinOp(left=qlast.FunctionCall( func=('std', 'bytes_get_bit'), args=[ qlast.Path( steps=[qlast.ObjectRef(name='__defaults_mask__')]), qlast.IntegerConstant(value=str(pi)), ]), right=qlast.IntegerConstant( value='0'), op='='), if_expr=qlast.Path( steps=[qlast.ObjectRef(name=p_shortname)]), else_expr=qlast._Optional( expr=p.get_ql_default(schema))))) return anchors, aliases
def scalar_type_to_python_type( stype: s_types.Type, schema: s_schema.Schema, ) -> type: for basetype_name, pytype in typemap.items(): basetype = schema.get(basetype_name, type=s_obj.InheritingObject) if stype.issubclass(schema, basetype): return pytype if stype.is_enum(schema): return str raise UnsupportedExpressionError( f'{stype.get_displayname(schema)} is not representable in Python')
def format_error_message( self, schema: s_schema.Schema, ) -> str: errmsg = self.get_errmessage(schema) subject = self.get_subject(schema) titleattr = subject.get_annotation(schema, 'std::title') if not titleattr: subjname = subject.get_shortname(schema) subjtitle = subjname.name else: subjtitle = titleattr args = self.get_args(schema) if args: from edb.edgeql import parser as qlparser from edb.edgeql import utils as qlutils args_ql: List[qlast.Base] = [ qlast.Path(steps=[qlast.ObjectRef(name=subjtitle)]), ] args_ql.extend(qlparser.parse(arg.text) for arg in args) constr_base: Constraint = schema.get(self.get_name(schema), type=type(self)) index_parameters = qlutils.index_parameters( args_ql, parameters=constr_base.get_params(schema), schema=schema, ) expr = constr_base.get_field_value(schema, 'expr') expr_ql = qlparser.parse(expr.text) qlutils.inline_parameters(expr_ql, index_parameters) args_map = { name: edgeql.generate_source(val, pretty=False) for name, val in index_parameters.items() } else: args_map = {'__subject__': subjtitle} assert errmsg is not None formatted = errmsg.format(**args_map) return formatted
def _create_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: fullname = self.classname cast = schema.get(fullname, None) if cast: from_type = self.get_attribute_value('from_type') to_type = self.get_attribute_value('to_type') raise errors.DuplicateCastDefinitionError( f'a cast from {from_type.get_displayname(schema)!r} ' f'to {to_type.get_displayname(schema)!r} is already defined', context=self.source_context) return super()._create_begin(schema, context)
def _reinherit_classref_dict( self, schema: s_schema.Schema, context: sd.CommandContext, refdict: so.RefDict, ) -> s_schema.Schema: from edb.schema import referencing as s_referencing scls = self.scls refs = self.get_inherited_ref_layout(schema, context, refdict) deleted_refs = self.get_no_longer_inherited_ref_layout( schema, context, refdict, refs) group = sd.CommandGroup() for create_cmd, astnode, bases in refs.values(): cmd = create_cmd.as_inherited_ref_cmd(schema, context, astnode, bases) obj = schema.get(cmd.classname, default=None) if obj is None: cmd.set_attribute_value( refdict.backref_attr, so.ObjectRef(name=scls.get_name(schema)), ) group.add(cmd) schema = cmd.apply(schema, context) else: assert isinstance(obj, s_referencing.ReferencedInheritingObject) existing_bases = obj.get_implicit_bases(schema) schema, cmd = self._rebase_ref(schema, context, obj, existing_bases, bases) group.add(cmd) schema = cmd.apply(schema, context) for fqname, delete_cmd in deleted_refs.items(): cmd = delete_cmd(classname=fqname) group.add(cmd) schema = cmd.apply(schema, context) self.add(group) return schema
def scalar_type_to_python_type(stype: s_types.Type, schema: s_schema.Schema) -> type: typemap = { 'std::str': str, 'std::anyint': int, 'std::anyfloat': float, 'std::decimal': decimal.Decimal, 'std::bool': bool, 'std::json': str, 'std::uuid': uuid.UUID, } for basetype, python_type in typemap.items(): if stype.issubclass(schema, schema.get(basetype)): return python_type raise UnsupportedExpressionError( f'{stype.get_displayname(schema)} is not representable in Python')
def _maybe_fix_name( cls, name: sn.QualName, *, schema: s_schema.Schema, context: so.ComparisonContext, ) -> sn.Name: obj = schema.get(name, type=Constraint) if not obj.generic(schema): base = obj.get_bases(schema).objects(schema)[0] base_name = context.get_obj_name(schema, base) quals = list(sn.quals_from_fullname(name)) name = sn.QualName( name=sn.get_specialized_name(base_name, *quals), module=name.module, ) return name
def compile_TypeName( node: qlast.TypeName, *, schema: s_schema.Schema, modaliases: Mapping[Optional[str], str], localnames: AbstractSet[str] = frozenset(), ) -> None: # Resolve the main type if isinstance(node.maintype, qlast.ObjectRef): # This is a specific path root, resolve it. if (not node.maintype.module and # maintype names 'array' and 'tuple' specifically # should also be ignored node.maintype.name not in {'array', 'tuple', *localnames}): maintype = schema.get( node.maintype.name, default=None, module_aliases=modaliases, ) if maintype is not None: name = maintype.get_name(schema) assert isinstance(name, sn.QualName) node.maintype.module = name.module elif None in modaliases: # Even if the name was not resolved in the schema it # may be the name of the object being defined, as such # the default module should be used. Names that must # be ignored (like aliases and parameters) have # already been filtered by the localnames. node.maintype.module = modaliases[None] if node.subtypes is not None: for st in node.subtypes: normalize( st, schema=schema, modaliases=modaliases, localnames=localnames, )
def scalar_type_to_python_type(stype: s_types.Type, schema: s_schema.Schema) -> type: typemap = { 'std::str': str, 'std::anyint': int, 'std::anyfloat': float, 'std::decimal': decimal.Decimal, 'std::bigint': decimal.Decimal, 'std::bool': bool, 'std::json': str, 'std::uuid': uuidgen.UUID, } for basetype_name, python_type in typemap.items(): basetype = schema.get(basetype_name) assert isinstance(basetype, s_inh.InheritingObject) if stype.issubclass(schema, basetype): return python_type raise UnsupportedExpressionError( f'{stype.get_displayname(schema)} is not representable in Python')
def _validate_base_refs( cls, schema: s_schema.Schema, base_refs: Iterable[so.InheritingObjectT], astnode: qlast.ObjectDDL, context: sd.CommandContext, ) -> so.ObjectList[so.InheritingObjectT]: classname = cls._classname_from_ast(schema, astnode, context) bases = so.ObjectList[so.InheritingObjectT].create(schema, base_refs) mcls = cls.get_schema_metaclass() if not bases and classname not in mcls.get_root_classes(): default_base = mcls.get_default_base_name() if default_base is not None and classname != default_base: default_base = schema.get(default_base) bases = so.ObjectList[so.InheritingObjectT].create( schema, [default_base], ) return bases
def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> CreateConstraint: cmd = super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(astnode, qlast.CreateConcreteConstraint): if astnode.delegated: cmd.set_attribute_value('delegated', astnode.delegated) args = cls._constraint_args_from_ast(schema, astnode, context) if args: cmd.set_attribute_value('args', args) elif isinstance(astnode, qlast.CreateConstraint): params = cls._get_param_desc_from_ast(schema, context.modaliases, astnode) for param in params: if param.get_kind(schema) is ft.ParameterKind.NAMED_ONLY: raise errors.InvalidConstraintDefinitionError( 'named only parameters are not allowed ' 'in this context', context=astnode.context) if param.get_default(schema) is not None: raise errors.InvalidConstraintDefinitionError( 'constraints do not support parameters ' 'with defaults', context=astnode.context) if cmd.get_attribute_value('return_type') is None: cmd.set_attribute_value( 'return_type', schema.get('std::bool'), ) if cmd.get_attribute_value('return_typemod') is None: cmd.set_attribute_value( 'return_typemod', ft.TypeModifier.SINGLETON, ) assert isinstance( astnode, (qlast.CreateConstraint, qlast.CreateConcreteConstraint)) # 'subjectexpr' can be present in either astnode type if astnode.subjectexpr: orig_text = cls.get_orig_expr_text(schema, astnode, 'subjectexpr') subjectexpr = s_expr.Expression.from_ast( astnode.subjectexpr, schema, context.modaliases, orig_text=orig_text, ) cmd.set_attribute_value( 'subjectexpr', subjectexpr, ) cls._validate_subcommands(astnode) assert isinstance(cmd, CreateConstraint) return cmd
def _populate_concrete_constraint_attrs( self, schema: s_schema.Schema, subject_obj: Optional[so.Object], *, name: str, subjectexpr: Optional[s_expr.Expression] = None, sourcectx: Optional[c_parsing.ParserContext] = None, args: Any = None, **kwargs: Any) -> None: from edb.ir import ast as ir_ast from edb.ir import utils as ir_utils constr_base = schema.get(name, type=Constraint) orig_subjectexpr = subjectexpr orig_subject = subject_obj base_subjectexpr = constr_base.get_field_value(schema, 'subjectexpr') if subjectexpr is None: subjectexpr = base_subjectexpr elif (base_subjectexpr is not None and subjectexpr.text != base_subjectexpr.text): raise errors.InvalidConstraintDefinitionError( f'subjectexpr is already defined for {name!r}') if (isinstance(subject_obj, s_scalars.ScalarType) and constr_base.get_is_aggregate(schema)): raise errors.InvalidConstraintDefinitionError( f'{constr_base.get_verbosename(schema)} may not ' f'be used on scalar types') if subjectexpr is not None: subject_ql = subjectexpr.qlast subject = subject_ql else: subject = subject_obj expr: s_expr.Expression = constr_base.get_field_value(schema, 'expr') if not expr: raise errors.InvalidConstraintDefinitionError( f'missing constraint expression in {name!r}') # Re-parse instead of using expr.qlast, because we mutate # the AST below. expr_ql = qlparser.parse(expr.text) if not args: args = constr_base.get_field_value(schema, 'args') attrs = dict(kwargs) inherited = dict() if orig_subjectexpr is not None: attrs['subjectexpr'] = orig_subjectexpr else: base_subjectexpr = constr_base.get_subjectexpr(schema) if base_subjectexpr is not None: attrs['subjectexpr'] = base_subjectexpr inherited['subjectexpr'] = True errmessage = attrs.get('errmessage') if not errmessage: errmessage = constr_base.get_errmessage(schema) inherited['errmessage'] = True attrs['errmessage'] = errmessage if subject is not orig_subject: # subject has been redefined assert isinstance(subject, qlast.Base) qlutils.inline_anchors(expr_ql, anchors={qlast.Subject().name: subject}) subject = orig_subject if args: args_ql: List[qlast.Base] = [ qlast.Path(steps=[qlast.Subject()]), ] args_ql.extend(arg.qlast for arg in args) args_map = qlutils.index_parameters( args_ql, parameters=constr_base.get_params(schema), schema=schema, ) qlutils.inline_parameters(expr_ql, args_map) attrs['args'] = args if expr == '__subject__': expr_context = sourcectx else: expr_context = None assert subject is not None final_expr = s_expr.Expression.compiled( s_expr.Expression.from_ast(expr_ql, schema, {}), schema=schema, options=qlcompiler.CompilerOptions( anchors={qlast.Subject().name: subject}, ), ) bool_t: s_scalars.ScalarType = schema.get('std::bool') assert isinstance(final_expr.irast, ir_ast.Statement) expr_type = final_expr.irast.stype if not expr_type.issubclass(schema, bool_t): raise errors.InvalidConstraintDefinitionError( f'{name} constraint expression expected ' f'to return a bool value, got ' f'{expr_type.get_verbosename(schema)}', context=expr_context) if (subjectexpr is not None and isinstance(subject_obj, s_types.Type) and subject_obj.is_object_type()): final_subjectexpr = s_expr.Expression.compiled( subjectexpr, schema=schema, options=qlcompiler.CompilerOptions( anchors={qlast.Subject().name: subject}, singletons=frozenset({subject_obj}), ), ) assert isinstance(final_subjectexpr.irast, ir_ast.Statement) if final_subjectexpr.irast.cardinality.is_multi(): refs = ir_utils.get_longest_paths(final_expr.irast) if len(refs) > 1: raise errors.InvalidConstraintDefinitionError( "Constraint with multi cardinality may not " "reference multiple links or properties", context=expr_context) attrs['return_type'] = constr_base.get_return_type(schema) attrs['return_typemod'] = constr_base.get_return_typemod(schema) attrs['finalexpr'] = final_expr attrs['params'] = constr_base.get_params(schema) attrs['is_abstract'] = False for k, v in attrs.items(): self.set_attribute_value(k, v, inherited=bool(inherited.get(k)))
def object_type_to_python_type( objtype: s_types.Type, schema: s_schema.Schema, *, base_class: typing.Optional[type] = None, _memo: typing.Optional[typing.Mapping[s_types.Type, type]] = None) -> type: if _memo is None: _memo = {} fields = [] subclasses = [] for pn, p in objtype.get_pointers(schema).items(schema): if pn in ('id', '__type__'): continue ptype = p.get_target(schema) if ptype.is_object_type(): pytype = _memo.get(ptype) if pytype is None: pytype = object_type_to_python_type(ptype, schema, base_class=base_class, _memo=_memo) _memo[ptype] = pytype for subtype in ptype.children(schema): subclasses.append( object_type_to_python_type(subtype, schema, base_class=pytype, _memo=_memo)) else: pytype = scalar_type_to_python_type(ptype, schema) is_multi = p.get_cardinality(schema) is qltypes.Cardinality.MANY if is_multi: pytype = typing.FrozenSet[pytype] default = p.get_default(schema) if default is None: if p.get_required(schema): default = dataclasses.MISSING else: default = ql_compiler.evaluate_to_python_val(default.text, schema=schema) if is_multi and not isinstance(default, frozenset): default = frozenset((default, )) constraints = p.get_constraints(schema).objects(schema) exclusive = schema.get('std::exclusive') unique = (not ptype.is_object_type() and any( c.issubclass(schema, exclusive) for c in constraints)) field = dataclasses.field( compare=unique, hash=unique, repr=True, default=default, ) fields.append((pn, pytype, field)) return dataclasses.make_dataclass( objtype.get_name(schema).name, fields=fields, bases=(base_class, ) if base_class is not None else (), frozen=True, namespace={'_subclasses': subclasses}, )
def generate_structure(schema: s_schema.Schema) -> SchemaReflectionParts: """Generate schema reflection structure from Python schema classes. Returns: A quadruple (as a SchemaReflectionParts instance) containing: - Delta, which, when applied to stdlib, yields an enhanced version of the `schema` module that contains all types and properties, not just those that are publicly exposed for introspection. - A mapping, containing type layout description for all schema classes. - A sequence of EdgeQL queries necessary to introspect a database schema. - A sequence of EdgeQL queries necessary to introspect global objects, such as roles and databases. """ delta = sd.DeltaRoot() classlayout: Dict[Type[s_obj.Object], SchemaTypeLayout, ] = {} ordered_link = schema.get('schema::ordered', type=s_links.Link) py_classes = [] schema = _run_ddl( ''' CREATE FUNCTION sys::_get_pg_type_for_scalar_type( typeid: std::uuid ) -> std::int64 { USING SQL $$ SELECT coalesce( ( SELECT tn::regtype::oid FROM edgedb._get_base_scalar_type_map() AS m(tid uuid, tn text) WHERE m.tid = "typeid" ), ( SELECT typ.oid FROM pg_catalog.pg_type typ WHERE typ.typname = "typeid"::text || '_domain' ), edgedb.raise( NULL::bigint, 'invalid_parameter_value', msg => ( 'cannot determine OID of ' || typeid::text ) ) )::bigint $$; SET volatility := 'STABLE'; }; CREATE FUNCTION sys::_expr_from_json( data: json ) -> OPTIONAL tuple<text: str, refs: array<uuid>> { USING SQL $$ SELECT "data"->>'text' AS text, coalesce(r.refs, ARRAY[]::uuid[]) AS refs FROM (SELECT array_agg(v::uuid) AS refs FROM jsonb_array_elements_text("data"->'refs') AS v ) AS r WHERE jsonb_typeof("data") != 'null' $$; SET volatility := 'IMMUTABLE'; }; ''', schema=schema, delta=delta, ) for py_cls in s_obj.ObjectMeta.get_schema_metaclasses(): if isinstance(py_cls, adapter.Adapter): continue if py_cls is s_obj.GlobalObject: continue py_classes.append(py_cls) read_sets: Dict[Type[s_obj.Object], List[str]] = {} for py_cls in py_classes: rschema_name = get_schema_name_for_pycls(py_cls) schema_objtype = schema.get( rschema_name, type=s_objtypes.ObjectType, default=None, ) bases = [] for base in py_cls.__bases__: if base in py_classes: bases.append(get_schema_name_for_pycls(base)) default_base = get_default_base_for_pycls(py_cls) if not bases and rschema_name != default_base: bases.append(default_base) reflection = py_cls.get_reflection_method() is_simple_wrapper = issubclass(py_cls, s_types.CollectionExprAlias) if schema_objtype is None: as_abstract = (reflection is s_obj.ReflectionMethod.REGULAR and not is_simple_wrapper) schema = _run_ddl( f''' CREATE {'ABSTRACT' if as_abstract else ''} TYPE {rschema_name} EXTENDING {', '.join(str(b) for b in bases)}; ''', schema=schema, delta=delta, ) schema_objtype = schema.get(rschema_name, type=s_objtypes.ObjectType) else: ex_bases = schema_objtype.get_bases(schema).names(schema) _, added_bases = s_inh.delta_bases(ex_bases, bases) if added_bases: for subset, position in added_bases: if isinstance(position, tuple): position_clause = (f'{position[0]} {position[1].name}') else: position_clause = position bases_expr = ', '.join(str(t.name) for t in subset) stmt = f''' ALTER TYPE {rschema_name} {{ EXTENDING {bases_expr} {position_clause} }} ''' schema = _run_ddl( stmt, schema=schema, delta=delta, ) if reflection is s_obj.ReflectionMethod.NONE: continue referrers = py_cls.get_referring_classes() if reflection is s_obj.ReflectionMethod.AS_LINK: if not referrers: raise RuntimeError( f'schema class {py_cls.__name__} is declared with AS_LINK ' f'reflection method but is not referenced in any RefDict') is_concrete = not schema_objtype.get_abstract(schema) if (is_concrete and not is_simple_wrapper and any(not b.get_abstract(schema) for b in schema_objtype.get_ancestors(schema).objects(schema))): raise RuntimeError( f'non-abstract {schema_objtype.get_verbosename(schema)} has ' f'non-abstract ancestors') read_shape = read_sets[py_cls] = [] if is_concrete: read_shape.append( '_tname := .__type__[IS schema::ObjectType].name') classlayout[py_cls] = {} ownfields = py_cls.get_ownfields() for fn, field in py_cls.get_fields().items(): if (field.ephemeral or (field.reflection_method is not s_obj.ReflectionMethod.REGULAR)): continue storage = _classify_object_field(field) ptr = schema_objtype.maybe_get_ptr(schema, sn.UnqualName(fn)) if fn in ownfields: qual = "REQUIRED" if field.required else "OPTIONAL" if ptr is None: schema = _run_ddl( f''' ALTER TYPE {rschema_name} {{ CREATE {qual} {storage.ptrkind} {fn} -> {storage.ptrtype}; }} ''', schema=schema, delta=delta, ) ptr = schema_objtype.getptr(schema, sn.UnqualName(fn)) if storage.shadow_ptrkind is not None: pn = f'{fn}__internal' internal_ptr = schema_objtype.maybe_get_ptr( schema, sn.UnqualName(pn)) if internal_ptr is None: ptrkind = storage.shadow_ptrkind ptrtype = storage.shadow_ptrtype schema = _run_ddl( f''' ALTER TYPE {rschema_name} {{ CREATE {qual} {ptrkind} {pn} -> {ptrtype}; }} ''', schema=schema, delta=delta, ) else: assert ptr is not None if is_concrete: read_ptr = fn if field.type_is_generic_self: read_ptr = f'{read_ptr}[IS {rschema_name}]' if field.reflection_proxy: proxy_type, proxy_link = field.reflection_proxy read_ptr = ( f'{read_ptr}: {{name, value := .{proxy_link}.id}}') if ptr.issubclass(schema, ordered_link): read_ptr = f'{read_ptr} ORDER BY @index' read_shape.append(read_ptr) if storage.shadow_ptrkind is not None: read_shape.append(f'{fn}__internal') if field.reflection_proxy: proxy_type_name, proxy_link_name = field.reflection_proxy proxy_obj = schema.get(proxy_type_name, type=s_objtypes.ObjectType) proxy_link_obj = proxy_obj.getptr( schema, sn.UnqualName(proxy_link_name)) tgt = proxy_link_obj.get_target(schema) else: tgt = ptr.get_target(schema) assert tgt is not None cardinality = ptr.get_cardinality(schema) assert cardinality is not None classlayout[py_cls][fn] = SchemaFieldDesc( fieldname=fn, type=tgt, cardinality=cardinality, properties={}, storage=storage, is_ordered=ptr.issubclass(schema, ordered_link), reflection_proxy=field.reflection_proxy, ) # Second pass: deal with RefDicts, which are reflected as links. for py_cls in py_classes: rschema_name = get_schema_name_for_pycls(py_cls) schema_cls = schema.get(rschema_name, type=s_objtypes.ObjectType) for refdict in py_cls.get_own_refdicts().values(): ref_ptr = schema_cls.maybe_get_ptr(schema, sn.UnqualName(refdict.attr)) ref_cls = refdict.ref_cls assert issubclass(ref_cls, s_obj.Object) shadow_ref_ptr = None reflect_as_link = (ref_cls.get_reflection_method() is s_obj.ReflectionMethod.AS_LINK) if reflect_as_link: reflection_link = ref_cls.get_reflection_link() assert reflection_link is not None target_field = ref_cls.get_field(reflection_link) target_cls = target_field.type shadow_pn = f'{refdict.attr}__internal' schema = _run_ddl( f''' ALTER TYPE {rschema_name} {{ CREATE OPTIONAL MULTI LINK {shadow_pn} EXTENDING schema::reference -> {get_schema_name_for_pycls(ref_cls)} {{ ON TARGET DELETE ALLOW; }}; }} ''', schema=schema, delta=delta, ) shadow_ref_ptr = schema_cls.getptr(schema, sn.UnqualName(shadow_pn)) else: target_cls = ref_cls if ref_ptr is None: ptr_type = get_schema_name_for_pycls(target_cls) schema = _run_ddl( f''' ALTER TYPE {rschema_name} {{ CREATE OPTIONAL MULTI LINK {refdict.attr} EXTENDING schema::reference -> {ptr_type} {{ ON TARGET DELETE ALLOW; }}; }} ''', schema=schema, delta=delta, ) ref_ptr = schema_cls.getptr(schema, sn.UnqualName(refdict.attr)) else: schema = _run_ddl( f''' ALTER TYPE {rschema_name} {{ ALTER LINK {refdict.attr} ON TARGET DELETE ALLOW; }} ''', schema=schema, delta=delta, ) assert isinstance(ref_ptr, s_links.Link) if py_cls not in classlayout: classlayout[py_cls] = {} # First, fields declared to be reflected as link properties. props = _get_reflected_link_props( ref_ptr=ref_ptr, target_cls=ref_cls, schema=schema, ) if reflect_as_link: # Then, because it's a passthrough reflection, all scalar # fields of the proxy object. fields_as_props = [ f for f in ref_cls.get_ownfields().values() if (not f.ephemeral and ( f.reflection_method is not s_obj.ReflectionMethod. AS_LINK) and f.name != refdict.backref_attr and f.name != ref_cls.get_reflection_link()) ] extra_props = _classify_scalar_object_fields(fields_as_props) for fn, storage in {**props, **extra_props}.items(): prop_ptr = ref_ptr.maybe_get_ptr(schema, sn.UnqualName(fn)) if prop_ptr is None: pty = storage.ptrtype schema = _run_ddl( f''' ALTER TYPE {rschema_name} {{ ALTER LINK {refdict.attr} {{ CREATE OPTIONAL PROPERTY {fn} -> {pty}; }} }} ''', schema=schema, delta=delta, ) if shadow_ref_ptr is not None: assert isinstance(shadow_ref_ptr, s_links.Link) shadow_pn = shadow_ref_ptr.get_shortname(schema).name for fn, storage in props.items(): prop_ptr = shadow_ref_ptr.maybe_get_ptr( schema, sn.UnqualName(fn)) if prop_ptr is None: pty = storage.ptrtype schema = _run_ddl( f''' ALTER TYPE {rschema_name} {{ ALTER LINK {shadow_pn} {{ CREATE OPTIONAL PROPERTY {fn} -> {pty}; }} }} ''', schema=schema, delta=delta, ) for py_cls in py_classes: rschema_name = get_schema_name_for_pycls(py_cls) schema_cls = schema.get(rschema_name, type=s_objtypes.ObjectType) is_concrete = not schema_cls.get_abstract(schema) read_shape = read_sets[py_cls] for refdict in py_cls.get_refdicts(): if py_cls not in classlayout: classlayout[py_cls] = {} ref_ptr = schema_cls.getptr(schema, sn.UnqualName(refdict.attr), type=s_links.Link) tgt = ref_ptr.get_target(schema) assert tgt is not None cardinality = ref_ptr.get_cardinality(schema) assert cardinality is not None classlayout[py_cls][refdict.attr] = SchemaFieldDesc( fieldname=refdict.attr, type=tgt, cardinality=cardinality, properties={}, is_ordered=ref_ptr.issubclass(schema, ordered_link), reflection_proxy=None, is_refdict=True, ) target_cls = refdict.ref_cls props = _get_reflected_link_props( ref_ptr=ref_ptr, target_cls=target_cls, schema=schema, ) reflect_as_link = (target_cls.get_reflection_method() is s_obj.ReflectionMethod.AS_LINK) prop_layout = {} extra_prop_layout = {} for fn, storage in props.items(): prop_ptr = ref_ptr.getptr(schema, sn.UnqualName(fn)) prop_tgt = prop_ptr.get_target(schema) assert prop_tgt is not None prop_layout[fn] = (prop_tgt, storage.fieldtype) if reflect_as_link: # Then, because it's a passthrough reflection, all scalar # fields of the proxy object. fields_as_props = [ f for f in target_cls.get_ownfields().values() if (not f.ephemeral and ( f.reflection_method is not s_obj.ReflectionMethod. AS_LINK) and f.name != refdict.backref_attr and f.name != target_cls.get_reflection_link()) ] extra_props = _classify_scalar_object_fields(fields_as_props) for fn, storage in extra_props.items(): prop_ptr = ref_ptr.getptr(schema, sn.UnqualName(fn)) prop_tgt = prop_ptr.get_target(schema) assert prop_tgt is not None extra_prop_layout[fn] = (prop_tgt, storage.fieldtype) else: extra_prop_layout = {} classlayout[py_cls][refdict.attr].properties.update({ **prop_layout, **extra_prop_layout, }) if reflect_as_link: shadow_tgt = schema.get( get_schema_name_for_pycls(ref_cls), type=s_objtypes.ObjectType, ) classlayout[py_cls][f'{refdict.attr}__internal'] = ( SchemaFieldDesc( fieldname=refdict.attr, type=shadow_tgt, cardinality=qltypes.SchemaCardinality.Many, properties=prop_layout, is_refdict=True, )) if is_concrete: read_ptr = refdict.attr prop_shape_els = [] if reflect_as_link: read_ptr = f'{read_ptr}__internal' ref_ptr = schema_cls.getptr( schema, sn.UnqualName(f'{refdict.attr}__internal'), ) for fn in props: prop_shape_els.append(f'@{fn}') if prop_shape_els: prop_shape = ',\n'.join(prop_shape_els) read_ptr = f'{read_ptr}: {{id, {prop_shape}}}' if ref_ptr.issubclass(schema, ordered_link): read_ptr = f'{read_ptr} ORDER BY @index' read_shape.append(read_ptr) local_parts = [] global_parts = [] for py_cls, shape_els in read_sets.items(): if (not shape_els # The CollectionExprAlias family needs to be excluded # because TupleExprAlias and ArrayExprAlias inherit from # concrete classes and so are picked up from those. or issubclass(py_cls, s_types.CollectionExprAlias)): continue rschema_name = get_schema_name_for_pycls(py_cls) shape = ',\n'.join(shape_els) qry = f''' SELECT {rschema_name} {{ {shape} }} ''' if not issubclass(py_cls, (s_types.Collection, s_obj.GlobalObject)): qry += ' FILTER NOT .builtin' if issubclass(py_cls, s_obj.GlobalObject): global_parts.append(qry) else: local_parts.append(qry) delta.canonical = True return SchemaReflectionParts( intro_schema_delta=delta, class_layout=classlayout, local_intro_parts=local_parts, global_intro_parts=global_parts, )
def _create_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: fullname = self.classname shortname = sn.shortname_from_fullname(fullname) schema, cp = self._get_param_desc_from_delta(schema, context, self) signature = f'{shortname}({", ".join(p.as_str(schema) for p in cp)})' func = schema.get(fullname, None) if func: raise errors.InvalidOperatorDefinitionError( f'cannot create the `{signature}` operator: ' f'an operator with the same signature ' f'is already defined', context=self.source_context) schema = super()._create_begin(schema, context) params: s_func.FuncParameterList = self.scls.get_params(schema) fullname = self.scls.get_name(schema) shortname = sn.shortname_from_fullname(fullname) return_typemod = self.scls.get_return_typemod(schema) assert isinstance(self.scls, Operator) recursive = self.scls.get_recursive(schema) derivative_of = self.scls.get_derivative_of(schema) # an operator must have operands if len(params) == 0: raise errors.InvalidOperatorDefinitionError( f'cannot create the `{signature}` operator: ' f'an operator must have operands', context=self.source_context) # We'll need to make sure that there's no mix of recursive and # non-recursive operators being overloaded. all_arrays = all_tuples = True for param in params.objects(schema): ptype = param.get_type(schema) all_arrays = all_arrays and ptype.is_array() all_tuples = all_tuples and ptype.is_tuple(schema) # It's illegal to declare an operator as recursive unless all # of its operands are the same basic type of collection. if recursive and not (all_arrays or all_tuples): raise errors.InvalidOperatorDefinitionError( f'cannot create the `{signature}` operator: ' f'operands of a recursive operator must either be ' f'all arrays or all tuples', context=self.source_context) for oper in schema.get_operators(shortname, ()): if oper == self.scls: continue oper_return_typemod = oper.get_return_typemod(schema) if oper_return_typemod != return_typemod: raise errors.DuplicateOperatorDefinitionError( f'cannot create the `{signature}` ' f'operator: overloading another operator with different ' f'return type {oper_return_typemod.to_edgeql()} ' f'{oper.get_return_type(schema).name}', context=self.source_context) oper_derivative_of = oper.get_derivative_of(schema) if oper_derivative_of: raise errors.DuplicateOperatorDefinitionError( f'cannot create the `{signature}` ' f'operator: there exists a derivative operator of the ' f'same name', context=self.source_context) elif derivative_of: raise errors.DuplicateOperatorDefinitionError( f'cannot create `{signature}` ' f'as a derivative operator: there already exists an ' f'operator of the same name', context=self.source_context) # Check if there is a recursive/non-recursive operator # overloading. oper_recursive = oper.get_recursive(schema) if recursive != oper_recursive: oper_signature = oper.get_display_signature(schema) oper_all_arrays = oper_all_tuples = True for param in oper.get_params(schema).objects(schema): ptype = param.get_type(schema) oper_all_arrays = oper_all_arrays and ptype.is_array() oper_all_tuples = (oper_all_tuples and ptype.is_tuple(schema)) if (all_arrays == oper_all_arrays and all_tuples == oper_all_tuples): new_rec = 'recursive' if recursive else 'non-recursive' oper_rec = \ 'recursive' if oper_recursive else 'non-recursive' raise errors.InvalidOperatorDefinitionError( f'cannot create the {new_rec} `{signature}` operator: ' f'overloading a {oper_rec} operator ' f'`{oper_signature}` with a {new_rec} one ' f'is not allowed', context=self.source_context) return schema
def object_type_to_python_type( objtype: s_objtypes.ObjectType, schema: s_schema.Schema, *, base_class: Optional[type] = None, _memo: Optional[Dict[s_types.Type, type]] = None, ) -> type: if _memo is None: _memo = {} default: Any fields = [] subclasses = [] for pn, p in objtype.get_pointers(schema).items(schema): str_pn = str(pn) if str_pn in ('id', '__type__'): continue ptype = p.get_target(schema) assert ptype is not None if isinstance(ptype, s_objtypes.ObjectType): pytype = _memo.get(ptype) if pytype is None: pytype = object_type_to_python_type( ptype, schema, base_class=base_class, _memo=_memo) _memo[ptype] = pytype for subtype in ptype.children(schema): subclasses.append( object_type_to_python_type( subtype, schema, base_class=pytype, _memo=_memo)) else: pytype = scalar_type_to_python_type(ptype, schema) ptr_card = p.get_cardinality(schema) is_multi = ptr_card.is_multi() if is_multi: pytype = FrozenSet[pytype] # type: ignore default = p.get_default(schema) if default is None: if p.get_required(schema): default = dataclasses.MISSING else: default = qlcompiler.evaluate_to_python_val( default.text, schema=schema) if is_multi and not isinstance(default, frozenset): default = frozenset((default,)) constraints = p.get_constraints(schema).objects(schema) exclusive = schema.get('std::exclusive', type=s_constr.Constraint) unique = ( not ptype.is_object_type() and any(c.issubclass(schema, exclusive) for c in constraints) ) field = dataclasses.field( compare=unique, hash=unique, repr=True, default=default, ) fields.append((str_pn, pytype, field)) bases: Tuple[type, ...] if base_class is not None: bases = (base_class,) else: bases = () ptype_dataclass = dataclasses.make_dataclass( objtype.get_name(schema).name, fields=fields, bases=bases, frozen=True, namespace={'_subclasses': subclasses}, ) assert isinstance(ptype_dataclass, type) return ptype_dataclass
def _populate_concrete_constraint_attrs( self, schema: s_schema.Schema, context: sd.CommandContext, subject_obj: Optional[so.Object], *, name: sn.QualName, subjectexpr: Optional[s_expr.Expression] = None, subjectexpr_inherited: bool = False, sourcectx: Optional[c_parsing.ParserContext] = None, args: Any = None, **kwargs: Any ) -> None: from edb.ir import ast as ir_ast from edb.ir import utils as ir_utils from . import pointers as s_pointers from . import links as s_links from . import scalars as s_scalars bases = self.get_resolved_attribute_value( 'bases', schema=schema, context=context, ) if not bases: bases = self.scls.get_bases(schema) constr_base = bases.objects(schema)[0] # If we have a concrete base, then we should inherit all of # these attrs through the normal inherit_fields() mechanisms, # and populating them ourselves will just mess up # inherited_fields. if not constr_base.generic(schema): return orig_subjectexpr = subjectexpr orig_subject = subject_obj base_subjectexpr = constr_base.get_field_value(schema, 'subjectexpr') if subjectexpr is None: subjectexpr = base_subjectexpr elif (base_subjectexpr is not None and subjectexpr.text != base_subjectexpr.text): raise errors.InvalidConstraintDefinitionError( f'subjectexpr is already defined for {name}' ) if (isinstance(subject_obj, s_scalars.ScalarType) and constr_base.get_is_aggregate(schema)): raise errors.InvalidConstraintDefinitionError( f'{constr_base.get_verbosename(schema)} may not ' f'be used on scalar types' ) if subjectexpr is not None: subject_ql = subjectexpr.qlast subject = subject_ql else: subject = subject_obj expr: s_expr.Expression = constr_base.get_field_value(schema, 'expr') if not expr: raise errors.InvalidConstraintDefinitionError( f'missing constraint expression in {name}') # Re-parse instead of using expr.qlast, because we mutate # the AST below. expr_ql = qlparser.parse(expr.text) if not args: args = constr_base.get_field_value(schema, 'args') attrs = dict(kwargs) inherited = dict() if orig_subjectexpr is not None: attrs['subjectexpr'] = orig_subjectexpr inherited['subjectexpr'] = subjectexpr_inherited else: base_subjectexpr = constr_base.get_subjectexpr(schema) if base_subjectexpr is not None: attrs['subjectexpr'] = base_subjectexpr inherited['subjectexpr'] = True errmessage = attrs.get('errmessage') if not errmessage: errmessage = constr_base.get_errmessage(schema) inherited['errmessage'] = True attrs['errmessage'] = errmessage if subject is not orig_subject: # subject has been redefined assert isinstance(subject, qlast.Base) qlutils.inline_anchors( expr_ql, anchors={qlast.Subject().name: subject}) subject = orig_subject if args: args_ql: List[qlast.Base] = [ qlast.Path(steps=[qlast.Subject()]), ] args_ql.extend(arg.qlast for arg in args) args_map = qlutils.index_parameters( args_ql, parameters=constr_base.get_params(schema), schema=schema, ) qlutils.inline_parameters(expr_ql, args_map) attrs['args'] = args assert subject is not None final_expr = s_expr.Expression.compiled( s_expr.Expression.from_ast(expr_ql, schema, {}), schema=schema, options=qlcompiler.CompilerOptions( anchors={qlast.Subject().name: subject}, path_prefix_anchor=qlast.Subject().name, apply_query_rewrites=not context.stdmode, ), ) bool_t = schema.get('std::bool', type=s_scalars.ScalarType) assert isinstance(final_expr.irast, ir_ast.Statement) expr_type = final_expr.irast.stype if not expr_type.issubclass(schema, bool_t): raise errors.InvalidConstraintDefinitionError( f'{name} constraint expression expected ' f'to return a bool value, got ' f'{expr_type.get_verbosename(schema)}', context=sourcectx ) if subjectexpr is not None: assert isinstance(subject_obj, (s_types.Type, s_pointers.Pointer)) singletons = frozenset({subject_obj}) final_subjectexpr = s_expr.Expression.compiled( subjectexpr, schema=schema, options=qlcompiler.CompilerOptions( anchors={qlast.Subject().name: subject}, path_prefix_anchor=qlast.Subject().name, singletons=singletons, apply_query_rewrites=not context.stdmode, ), ) assert isinstance(final_subjectexpr.irast, ir_ast.Statement) refs = ir_utils.get_longest_paths(final_expr.irast) has_multi = False for ref in refs: while ref.rptr: rptr = ref.rptr if rptr.dir_cardinality.is_multi(): has_multi = True # We don't need to look further than the subject, # which is always valid. (And which is a singleton # in a constraint expression if it is itself a # singleton, regardless of other parts of the path.) if ( isinstance(rptr.ptrref, ir_ast.PointerRef) and rptr.ptrref.id == subject_obj.id ): break if (not isinstance(rptr.ptrref, ir_ast.TupleIndirectionPointerRef) and rptr.ptrref.source_ptr is None and rptr.source.rptr is not None): if isinstance(subject, s_links.Link): raise errors.InvalidConstraintDefinitionError( "link constraints may not access " "the link target", context=sourcectx ) else: raise errors.InvalidConstraintDefinitionError( "constraints cannot contain paths with more " "than one hop", context=sourcectx ) ref = rptr.source if has_multi and len(refs) > 1: raise errors.InvalidConstraintDefinitionError( "cannot reference multiple links or properties in a " "constraint where at least one link or property is MULTI", context=sourcectx ) if has_multi and ir_utils.contains_set_of_op( final_subjectexpr.irast): raise errors.InvalidConstraintDefinitionError( "cannot use aggregate functions or operators " "in a non-aggregating constraint", context=sourcectx ) attrs['finalexpr'] = final_expr attrs['params'] = constr_base.get_params(schema) inherited['params'] = True attrs['abstract'] = False for k, v in attrs.items(): self.set_attribute_value(k, v, inherited=bool(inherited.get(k)))
def _populate_concrete_constraint_attrs( self, schema: s_schema.Schema, context: sd.CommandContext, subject_obj: Optional[so.Object], *, name: sn.QualName, subjectexpr: Optional[s_expr.Expression] = None, sourcectx: Optional[c_parsing.ParserContext] = None, args: Any = None, **kwargs: Any) -> None: from edb.ir import ast as ir_ast from edb.ir import utils as ir_utils constr_base = schema.get(name, type=Constraint) orig_subjectexpr = subjectexpr orig_subject = subject_obj base_subjectexpr = constr_base.get_field_value(schema, 'subjectexpr') if subjectexpr is None: subjectexpr = base_subjectexpr elif (base_subjectexpr is not None and subjectexpr.text != base_subjectexpr.text): raise errors.InvalidConstraintDefinitionError( f'subjectexpr is already defined for {name}') if (isinstance(subject_obj, s_scalars.ScalarType) and constr_base.get_is_aggregate(schema)): raise errors.InvalidConstraintDefinitionError( f'{constr_base.get_verbosename(schema)} may not ' f'be used on scalar types') if subjectexpr is not None: subject_ql = subjectexpr.qlast subject = subject_ql else: subject = subject_obj expr: s_expr.Expression = constr_base.get_field_value(schema, 'expr') if not expr: raise errors.InvalidConstraintDefinitionError( f'missing constraint expression in {name}') # Re-parse instead of using expr.qlast, because we mutate # the AST below. expr_ql = qlparser.parse(expr.text) if not args: args = constr_base.get_field_value(schema, 'args') attrs = dict(kwargs) inherited = dict() if orig_subjectexpr is not None: attrs['subjectexpr'] = orig_subjectexpr else: base_subjectexpr = constr_base.get_subjectexpr(schema) if base_subjectexpr is not None: attrs['subjectexpr'] = base_subjectexpr inherited['subjectexpr'] = True errmessage = attrs.get('errmessage') if not errmessage: errmessage = constr_base.get_errmessage(schema) inherited['errmessage'] = True attrs['errmessage'] = errmessage if subject is not orig_subject: # subject has been redefined assert isinstance(subject, qlast.Base) qlutils.inline_anchors(expr_ql, anchors={qlast.Subject().name: subject}) subject = orig_subject if args: args_ql: List[qlast.Base] = [ qlast.Path(steps=[qlast.Subject()]), ] args_ql.extend(arg.qlast for arg in args) args_map = qlutils.index_parameters( args_ql, parameters=constr_base.get_params(schema), schema=schema, ) qlutils.inline_parameters(expr_ql, args_map) attrs['args'] = args assert subject is not None path_prefix_anchor = (qlast.Subject().name if isinstance( subject, s_types.Type) else None) final_expr = s_expr.Expression.compiled( s_expr.Expression.from_ast(expr_ql, schema, {}), schema=schema, options=qlcompiler.CompilerOptions( anchors={qlast.Subject().name: subject}, path_prefix_anchor=path_prefix_anchor, apply_query_rewrites=not context.stdmode, ), ) bool_t = schema.get('std::bool', type=s_scalars.ScalarType) assert isinstance(final_expr.irast, ir_ast.Statement) expr_type = final_expr.irast.stype if not expr_type.issubclass(schema, bool_t): raise errors.InvalidConstraintDefinitionError( f'{name} constraint expression expected ' f'to return a bool value, got ' f'{expr_type.get_verbosename(schema)}', context=sourcectx) if subjectexpr is not None: if (isinstance(subject_obj, s_types.Type) and subject_obj.is_object_type()): singletons = frozenset({subject_obj}) else: singletons = frozenset() final_subjectexpr = s_expr.Expression.compiled( subjectexpr, schema=schema, options=qlcompiler.CompilerOptions( anchors={qlast.Subject().name: subject}, path_prefix_anchor=path_prefix_anchor, singletons=singletons, apply_query_rewrites=not context.stdmode, ), ) assert isinstance(final_subjectexpr.irast, ir_ast.Statement) refs = ir_utils.get_longest_paths(final_expr.irast) has_multi = False for ref in refs: while ref.rptr: rptr = ref.rptr if rptr.ptrref.dir_cardinality.is_multi(): has_multi = True if (not isinstance(rptr.ptrref, ir_ast.TupleIndirectionPointerRef) and rptr.ptrref.source_ptr is None and rptr.source.rptr is not None): raise errors.InvalidConstraintDefinitionError( "constraints cannot contain paths with more " "than one hop", context=sourcectx) ref = rptr.source if has_multi and len(refs) > 1: raise errors.InvalidConstraintDefinitionError( "cannot reference multiple links or properties in a " "constraint where at least one link or property is MULTI", context=sourcectx) if has_multi and ir_utils.contains_set_of_op( final_subjectexpr.irast): raise errors.InvalidConstraintDefinitionError( "cannot use aggregate functions or operators " "in a non-aggregating constraint", context=sourcectx) attrs['return_type'] = constr_base.get_return_type(schema) attrs['return_typemod'] = constr_base.get_return_typemod(schema) attrs['finalexpr'] = final_expr attrs['params'] = constr_base.get_params(schema) attrs['abstract'] = False for k, v in attrs.items(): self.set_attribute_value(k, v, inherited=bool(inherited.get(k)))
def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> CreateConstraint: cmd = super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(astnode, qlast.CreateConcreteConstraint): if astnode.delegated: cmd.set_attribute_value('delegated', astnode.delegated) args = cls._constraint_args_from_ast(schema, astnode, context) if args: cmd.set_attribute_value('args', args) elif isinstance(astnode, qlast.CreateConstraint): params = cls._get_param_desc_from_ast(schema, context.modaliases, astnode) for param in params: if param.get_kind(schema) is ft.ParameterKind.NamedOnlyParam: raise errors.InvalidConstraintDefinitionError( 'named only parameters are not allowed ' 'in this context', context=astnode.context) if param.get_default(schema) is not None: raise errors.InvalidConstraintDefinitionError( 'constraints do not support parameters ' 'with defaults', context=astnode.context) if cmd.get_attribute_value('return_type') is None: cmd.set_attribute_value( 'return_type', schema.get('std::bool'), ) if cmd.get_attribute_value('return_typemod') is None: cmd.set_attribute_value( 'return_typemod', ft.TypeModifier.SingletonType, ) assert isinstance( astnode, (qlast.CreateConstraint, qlast.CreateConcreteConstraint)) # 'subjectexpr' can be present in either astnode type if astnode.subjectexpr: orig_text = cls.get_orig_expr_text(schema, astnode, 'subjectexpr') if (orig_text is not None and context.compat_ver_is_before( (1, 0, verutils.VersionStage.ALPHA, 6))): # Versions prior to a6 used a different expression # normalization strategy, so we must renormalize the # expression. expr_ql = qlcompiler.renormalize_compat( astnode.subjectexpr, orig_text, schema=schema, localnames=context.localnames, ) else: expr_ql = astnode.subjectexpr subjectexpr = s_expr.Expression.from_ast( expr_ql, schema, context.modaliases, context.localnames, ) cmd.set_attribute_value( 'subjectexpr', subjectexpr, ) cls._validate_subcommands(astnode) assert isinstance(cmd, CreateConstraint) return cmd