def _ql_typeexpr_to_type(ql_t: qlast.TypeExpr, *, ctx: context.ContextLevel) -> List[s_types.Type]: if isinstance(ql_t, qlast.TypeOf): with ctx.newscope(fenced=True, temporary=True) as subctx: ir_set = setgen.ensure_set(dispatch.compile(ql_t.expr, ctx=subctx), ctx=subctx) stype = setgen.get_set_type(ir_set, ctx=subctx) return [stype] elif isinstance(ql_t, qlast.TypeOp): if ql_t.op == '|': return (_ql_typeexpr_to_type(ql_t.left, ctx=ctx) + _ql_typeexpr_to_type(ql_t.right, ctx=ctx)) raise errors.UnsupportedFeatureError( f'type operator {ql_t.op!r} is not implemented', context=ql_t.context) elif isinstance(ql_t, qlast.TypeName): return [_ql_typename_to_type(ql_t, ctx=ctx)] else: raise errors.InternalServerError(f'unexpected TypeExpr: {ql_t!r}')
def _ql_typeexpr_to_type(ql_t: qlast.TypeExpr, *, ctx: context.ContextLevel) -> List[s_types.Type]: if isinstance(ql_t, qlast.TypeOf): with ctx.new() as subctx: # Use an empty scope tree, to avoid polluting things pointlessly subctx.path_scope = irast.ScopeTreeNode() ir_set = dispatch.compile(ql_t.expr, ctx=subctx) stype = setgen.get_set_type(ir_set, ctx=subctx) return [stype] elif isinstance(ql_t, qlast.TypeOp): if ql_t.op == '|': return (_ql_typeexpr_to_type(ql_t.left, ctx=ctx) + _ql_typeexpr_to_type(ql_t.right, ctx=ctx)) raise errors.UnsupportedFeatureError( f'type operator {ql_t.op!r} is not implemented', context=ql_t.context) elif isinstance(ql_t, qlast.TypeName): return [_ql_typename_to_type(ql_t, ctx=ctx)] else: raise errors.EdgeQLSyntaxError("Unexpected type expression", context=ql_t.context)
def compile_FunctionCall(expr: irast.FunctionCall, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: if expr.typemod is ql_ft.TypeModifier.SetOfType: raise errors.UnsupportedFeatureError( 'set returning functions are not supported in simple expressions') args = _compile_call_args(expr, ctx=ctx) if expr.has_empty_variadic and expr.variadic_param_type is not None: var = pgast.TypeCast( arg=pgast.ArrayExpr(elements=[]), type_name=pgast.TypeName(name=pg_types.pg_type_from_ir_typeref( expr.variadic_param_type))) args.append(pgast.VariadicArgument(expr=var)) name = relgen.get_func_call_backend_name(expr, ctx=ctx) result: pgast.BaseExpr = pgast.FuncCall(name=name, args=args) if expr.force_return_cast: # The underlying function has a return value type # different from that of the EdgeQL function declaration, # so we need to make an explicit cast here. result = pgast.TypeCast( arg=result, type_name=pgast.TypeName( name=pg_types.pg_type_from_ir_typeref(expr.typeref))) return result
def _create_begin(self, schema, context): referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None: return super()._create_begin(schema, context) subject = referrer_ctx.scls if subject.is_scalar() and subject.is_enum(schema): raise errors.UnsupportedFeatureError( f'constraints cannot be defined on an enumerated type', context=self.source_context, ) if not context.canonical: schema, props = self._get_create_fields(schema, context) props.pop('name') props.pop('subject', None) fullname = self.classname shortname = sn.shortname_from_fullname(fullname) constr_base, attrs, inh = Constraint.get_concrete_constraint_attrs( schema, subject, name=shortname, sourcectx=self.source_context, **props) for k, v in attrs.items(): inherited = inh.get(k) self.set_attribute_value(k, v, inherited=inherited) self.set_attribute_value('subject', subject) return super()._create_begin(schema, context)
def _create_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None: schema = super()._create_begin(schema, context) return schema subject = referrer_ctx.scls assert isinstance(subject, ConsistencySubject) if not subject.can_accept_constraints(schema): raise errors.UnsupportedFeatureError( f'constraints cannot be defined on ' f'{subject.get_verbosename(schema)}', context=self.source_context, ) if not context.canonical: props = self.get_attributes(schema, context) props.pop('name') props.pop('subject', None) fullname = self.classname shortname = sn.shortname_from_fullname(fullname) self._populate_concrete_constraint_attrs( schema, subject, name=shortname, sourcectx=self.source_context, **props) self.set_attribute_value('subject', subject) return super()._create_begin(schema, context)
def _cmd_tree_from_ast(cls, schema, astnode, context): if not context.stdmode and not context.testmode: raise errors.UnsupportedFeatureError( 'user-defined operators are not supported', context=astnode.context) return super()._cmd_tree_from_ast(schema, astnode, context)
def _cmd_tree_from_ast(cls, schema, astnode, context): cmd = super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(cmd, sd.CommandGroup): for subcmd in cmd.get_subcommands(): if isinstance(subcmd, cls): create_cmd = subcmd break else: raise errors.InternalServerError( 'scalar view definition did not return CreateScalarType') else: create_cmd = cmd bases = create_cmd.get_attribute_value('bases') is_enum = False if len(bases) == 1 and isinstance(bases._ids[0], AnonymousEnumTypeRef): elements = bases._ids[0].elements create_cmd.set_attribute_value('enum_values', elements) create_cmd.set_attribute_value('is_final', True) is_enum = True for sub in create_cmd.get_subcommands(type=sd.AlterObjectProperty): if sub.property == 'default': if is_enum: raise errors.UnsupportedFeatureError( f'enumerated types do not support defaults') else: sub.new_value = [sub.new_value] return cmd
def create(cls, schema, *, name=None, id=so.NoDefault, dimensions=None, element_type, **kwargs): if not dimensions: dimensions = [-1] if dimensions != [-1]: raise errors.UnsupportedFeatureError( f'multi-dimensional arrays are not supported') if id is so.NoDefault: id = generate_type_id(f'array-{element_type.id}-{dimensions}') if name is None: name = s_name.SchemaName( module='std', name=f'array<{element_type.get_name(schema)}>') return super()._create(schema, id=id, name=name, element_type=element_type, dimensions=dimensions, **kwargs)
def create_concrete_constraint(cls, schema, subject, *, name, subjectexpr=None, sourcectx=None, args=[], modaliases=None, **kwargs): if subject.is_scalar() and subject.is_enum(schema): raise errors.UnsupportedFeatureError( f'constraints cannot be defined on an enumerated type', context=sourcectx, ) constr_base, attrs = cls.get_concrete_constraint_attrs( schema, subject, name=name, subjectexpr=subjectexpr, sourcectx=sourcectx, args=args, modaliases=modaliases, **kwargs) schema, constraint = constr_base.derive(schema, subject, merge_bases=[constr_base], attrs=attrs) return schema, constraint, attrs
def apply(self, schema, context): scls = self.get_object(schema, context) self.scls = scls enum_values = scls.get_enum_values(schema) if enum_values: raise errors.UnsupportedFeatureError( f'altering enum composition is not supported') if self.removed_bases and not self.added_bases: raise errors.SchemaError(f'cannot DROP EXTENDING enum') all_bases = [] for bases, pos in self.added_bases: if pos: raise errors.SchemaError( f'cannot add another enum as supertype ' f'use EXTENDING without position qualification') all_bases.extend(bases) if len(all_bases) > 1: raise errors.SchemaError( f'cannot set more than one enum as supertype ') new_base = all_bases[0] new_values = new_base.elements schema = self._validate_enum_change(scls, enum_values, new_values, schema, context) return schema, scls else: return super().apply(self, schema, context)
def from_subtypes(cls, schema, subtypes, typemods=None, *, name=None): if len(subtypes) != 1: raise errors.SchemaError( f'unexpected number of subtypes, expecting 1: {subtypes!r}') stype = subtypes[0] if isinstance(stype, Array): raise errors.UnsupportedFeatureError( f'nested arrays are not supported') if typemods: dimensions = typemods[0] else: dimensions = [] if isinstance(stype, cls): # There is no array of arrays, only multi-dimensional arrays. element_type = stype.element_type if not dimensions: dimensions.append(-1) dimensions += stype.dimensions else: element_type = stype dimensions = [] return cls.create(schema, element_type=element_type, dimensions=dimensions, name=name)
def compile_OperatorCall(expr: irast.OperatorCall, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: if (str(expr.func_shortname) == 'std::IF' and expr.args[0].cardinality.is_single() and expr.args[2].cardinality.is_single()): if_expr, condition, else_expr = (a.expr for a in expr.args) return pgast.CaseExpr(args=[ pgast.CaseWhen(expr=dispatch.compile(condition, ctx=ctx), result=dispatch.compile(if_expr, ctx=ctx)) ], defresult=dispatch.compile(else_expr, ctx=ctx)) elif (str(expr.func_shortname) == 'std::??' and expr.args[0].cardinality.is_single() and expr.args[1].cardinality.is_single()): l_expr, r_expr = (a.expr for a in expr.args) return pgast.CoalesceExpr(args=[ dispatch.compile(l_expr, ctx=ctx), dispatch.compile(r_expr, ctx=ctx), ], ) elif expr.typemod is ql_ft.TypeModifier.SetOfType: raise errors.UnsupportedFeatureError( f'set returning operator {expr.func_shortname!r} is not supported ' f'in simple expressions') args = _compile_call_args(expr, ctx=ctx) return compile_operator(expr, args, ctx=ctx)
def ensure_schema_collection(schema, coll_type, parent_cmd, *, src_context=None, context): if not coll_type.is_collection(): raise ValueError( f'{coll_type.get_displayname(schema)} is not a collection') if coll_type.contains_array_of_tuples(schema): raise errors.UnsupportedFeatureError( 'arrays of tuples are not supported at the schema level', context=src_context, ) delta_root = context.top().op if (schema.get_by_id(coll_type.id, None) is None and coll_type.id not in delta_root.new_types): parent_cmd.add(coll_type.as_create_delta(schema)) delta_root.new_types.add(coll_type.id) if coll_type.id in delta_root.deleted_types: # Revert the deletion decision. del_cmd = delta_root.deleted_types.pop(coll_type.id) delta_root.discard(del_cmd)
def _ql_typeexpr_to_type(ql_t: qlast.TypeExpr, *, ctx: context.ContextLevel) -> List[s_types.Type]: if isinstance(ql_t, qlast.TypeOf): with ctx.new() as subctx: # Use an empty scope tree, to avoid polluting things pointlessly subctx.path_scope = irast.ScopeTreeNode() subctx.expr_exposed = context.Exposure.UNEXPOSED ir_set = dispatch.compile(ql_t.expr, ctx=subctx) stype = setgen.get_set_type(ir_set, ctx=subctx) return [stype] elif isinstance(ql_t, qlast.TypeOp): if ql_t.op == '|': # We need to validate that type ops are applied only to # object types. So we check the base case here, when the # left or right operand is a single type, because if it's # a longer list, then we know that it was already composed # of "|" or "&", or it is the result of inference by # "typeof" and is a list of object types anyway. left = _ql_typeexpr_to_type(ql_t.left, ctx=ctx) right = _ql_typeexpr_to_type(ql_t.right, ctx=ctx) if len(left) == 1 and not left[0].is_object_type(): raise errors.UnsupportedFeatureError( f'cannot use type operator {ql_t.op!r} with non-object ' f'type {left[0].get_displayname(ctx.env.schema)}', context=ql_t.left.context) if len(right) == 1 and not right[0].is_object_type(): raise errors.UnsupportedFeatureError( f'cannot use type operator {ql_t.op!r} with non-object ' f'type {right[0].get_displayname(ctx.env.schema)}', context=ql_t.right.context) return left + right raise errors.UnsupportedFeatureError( f'type operator {ql_t.op!r} is not implemented', context=ql_t.context) elif isinstance(ql_t, qlast.TypeName): return [_ql_typename_to_type(ql_t, ctx=ctx)] else: raise errors.EdgeQLSyntaxError("Unexpected type expression", context=ql_t.context)
def compile_GroupQuery( expr: qlast.Base, *, ctx: context.ContextLevel) -> irast.Set: raise errors.UnsupportedFeatureError( "'GROUP' statement is not currently implemented", context=expr.context) with ctx.subquery() as ictx: stmt = irast.GroupStmt() init_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx) typename = s_name.Name( module='__group__', name=ctx.aliases.get('Group')) obj = ctx.env.get_track_schema_object('std::BaseObject') stmt.group_path_id = pathctx.get_path_id( obj, typename=typename, ctx=ictx) pathctx.register_set_in_scope(stmt.group_path_id, ctx=ictx) with ictx.newscope(fenced=True) as subjctx: subject_set = setgen.scoped_set( dispatch.compile(expr.subject, ctx=subjctx), ctx=subjctx) alias = expr.subject_alias or subject_set.path_id.target_name_hint stmt.subject = stmtctx.declare_inline_view( subject_set, alias, ctx=ictx) with subjctx.new() as grpctx: stmt.groupby = compile_groupby_clause( expr.groupby, ctx=grpctx) with ictx.subquery() as isctx, isctx.newscope(fenced=True) as sctx: o_stmt = sctx.stmt = irast.SelectStmt() o_stmt.result = compile_result_clause( expr.result, view_scls=ctx.view_scls, view_rptr=ctx.view_rptr, result_alias=expr.result_alias, view_name=ctx.toplevel_result_view_name, ctx=sctx) clauses.compile_where_clause( o_stmt, expr.where, ctx=sctx) o_stmt.orderby = clauses.compile_orderby_clause( expr.orderby, ctx=sctx) o_stmt.offset = clauses.compile_limit_offset_clause( expr.offset, ctx=sctx) o_stmt.limit = clauses.compile_limit_offset_clause( expr.limit, ctx=sctx) stmt.result = setgen.scoped_set(o_stmt, ctx=sctx) result = fini_stmt(stmt, expr, ctx=ictx, parent_ctx=ctx) return result
def compile_ConfigInsert( expr: qlast.ConfigInsert, *, ctx: context.ContextLevel) -> irast.Set: info = _validate_op(expr, ctx=ctx) if not expr.system: raise errors.UnsupportedFeatureError( f'CONFIGURE SESSION INSERT is not supported' ) level = 'SYSTEM' if expr.system else 'SESSION' subject = ctx.env.get_track_schema_object( f'cfg::{expr.name.name}', default=None) if subject is None: raise errors.ConfigurationError( f'{expr.name.name!r} is not a valid configuration item', context=expr.context, ) insert_stmt = qlast.InsertQuery( subject=qlast.Path( steps=[ qlast.ObjectRef( name=expr.name.name, module='cfg', ) ] ), shape=expr.shape, ) for el in expr.shape: if isinstance(el.compexpr, qlast.InsertQuery): _inject_tname(el.compexpr, ctx=ctx) with ctx.newscope() as subctx: subctx.expr_exposed = True subctx.modaliases = ctx.modaliases.copy() subctx.modaliases[None] = 'cfg' subctx.special_computables_in_mutation_shape |= {'_tname'} insert_ir = dispatch.compile(insert_stmt, ctx=subctx) insert_ir_set = setgen.ensure_set(insert_ir, ctx=subctx) assert isinstance(insert_ir_set.expr, irast.InsertStmt) insert_subject = insert_ir_set.expr.subject _validate_config_object(insert_subject, level=level, ctx=subctx) return setgen.ensure_set( irast.ConfigInsert( name=info.param_name, cardinality=info.cardinality, system=expr.system, requires_restart=info.requires_restart, backend_setting=info.backend_setting, expr=insert_subject, context=expr.context, ), ctx=ctx, )
def type_op_ast_to_type_shell( node: qlast.TypeOp, *, module: Optional[str] = None, modaliases: Mapping[Optional[str], str], schema: s_schema.Schema, ) -> s_types.TypeExprShell: from . import types as s_types if node.op != '|': raise errors.UnsupportedFeatureError( f'unsupported type expression operator: {node.op}', context=node.context, ) if module is None: module = modaliases.get(None) if module is None: raise errors.InternalServerError( 'cannot determine module for derived compound type', context=node.context, ) left = ast_to_type_shell( node.left, module=module, modaliases=modaliases, schema=schema, ) right = ast_to_type_shell(node.right, module=module, modaliases=modaliases, schema=schema) if isinstance(left, s_types.UnionTypeShell): if isinstance(right, s_types.UnionTypeShell): return s_types.UnionTypeShell( components=left.components + right.components, module=module, ) else: return s_types.UnionTypeShell( components=left.components + (right, ), module=module, ) else: if isinstance(right, s_types.UnionTypeShell): return s_types.UnionTypeShell( components=(left, ) + right.components, module=module, ) else: return s_types.UnionTypeShell( components=(left, right), module=module, )
def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> sd.Command: if not context.stdmode and not context.testmode: raise errors.UnsupportedFeatureError( 'user-defined extension packages are not supported yet', context=astnode.context) return super()._cmd_tree_from_ast(schema, astnode, context)
def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> sd.Command: cmd = super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(cmd, sd.CommandGroup): for subcmd in cmd.get_subcommands(): if isinstance(subcmd, cls): create_cmd: sd.Command = subcmd break else: raise errors.InternalServerError( 'scalar alias definition did not return CreateScalarType' ) else: create_cmd = cmd if isinstance(astnode, qlast.CreateScalarType): bases = [ s_utils.ast_to_type_shell( b, modaliases=context.modaliases, schema=schema, ) for b in astnode.bases ] if any(isinstance(br, AnonymousEnumTypeShell) for br in bases): # This is an enumerated type. if len(bases) > 1: assert isinstance(astnode, qlast.BasesMixin) raise errors.SchemaError( f'invalid scalar type definition, enumeration must be' f' the only supertype specified', context=astnode.bases[0].context, ) deflt = create_cmd.get_attribute_set_cmd('default') if deflt is not None: raise errors.UnsupportedFeatureError( f'enumerated types do not support defaults', context=deflt.source_context, ) shell = bases[0] assert isinstance(shell, AnonymousEnumTypeShell) create_cmd.set_attribute_value('enum_values', shell.elements) create_cmd.set_attribute_value('is_final', True) return cmd
def _compile_call_args( expr: irast.Call, *, ctx: context.CompilerContextLevel) -> List[pgast.BaseExpr]: args = [] if isinstance(expr, irast.FunctionCall) and expr.global_args: args += [dispatch.compile(arg, ctx=ctx) for arg in expr.global_args] args += [dispatch.compile(a.expr, ctx=ctx) for a in expr.args] for ref, ir_arg, typemod in zip(args, expr.args, expr.params_typemods): if (not expr.impl_is_strict and ir_arg.cardinality.can_be_zero() and ref.nullable and typemod == ql_ft.TypeModifier.SingletonType): raise errors.UnsupportedFeatureError( 'operations on potentially empty arguments not supported in ' 'simple expressions') return args
def from_subtypes(cls, schema, subtypes, typemods=None, *, name=None): if len(subtypes) != 1: raise errors.SchemaError( f'unexpected number of subtypes, expecting 1: {subtypes!r}') stype = subtypes[0] if isinstance(stype, Array): raise errors.UnsupportedFeatureError( f'nested arrays are not supported') # One-dimensional unbounded array. dimensions = [-1] return cls.create(schema, element_type=stype, dimensions=dimensions, name=name)
def _ql_typeexpr_to_ir_typeref( ql_t: qlast.TypeExpr, *, ctx: context.ContextLevel) -> typing.List[irast.TypeRef]: # FIXME: currently this only handles type union if isinstance(ql_t, qlast.TypeOp): if ql_t.op == '|': return (_ql_typeexpr_to_ir_typeref(ql_t.left, ctx=ctx) + _ql_typeexpr_to_ir_typeref(ql_t.right, ctx=ctx)) raise errors.UnsupportedFeatureError( f'type operator {ql_t.op!r} is not implemented', context=ql_t.context) else: return [_ql_typeref_to_ir_typeref(ql_t, ctx=ctx)]
def _init_scalars(self, scalars, enums): for scalar, scalardecl in scalars.items(): enum_values = enums.get(scalar) if enum_values: self._schema = scalar.update(self._schema, { 'enum_values': enum_values, 'is_final': True, }) if scalardecl.fields: self._parse_field_setters(scalar, scalardecl.fields) if scalardecl.constraints: if enum_values: raise errors.UnsupportedFeatureError( f'constraints cannot be defined on an enumerated type', context=scalardecl.constraints[0].context, ) self._parse_subject_constraints(scalar, scalardecl)
def _cmd_tree_from_ast(cls, schema, astnode, context): cmd = super()._cmd_tree_from_ast(schema, astnode, context) bases = cmd.get_attribute_value('bases') is_enum = False if len(bases) == 1 and isinstance(bases._ids[0], AnonymousEnumTypeRef): elements = bases._ids[0].elements cmd.set_attribute_value('enum_values', elements) cmd.set_attribute_value('is_final', True) is_enum = True for sub in cmd.get_subcommands(type=sd.AlterObjectProperty): if sub.property == 'default': if is_enum: raise errors.UnsupportedFeatureError( f'enumerated types do not support defaults') else: sub.new_value = [sub.new_value] return cmd
def _alter_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: if not context.canonical: old_expr = self.scls.get_expr(schema) has_expr = self.has_attribute_value('expr') clears_expr = has_expr and not self.get_attribute_value('expr') # Force reconsideration of the expression if cardinality # or required is changed. if ((self.has_attribute_value('cardinality') or self.has_attribute_value('required')) and not has_expr and old_expr): self.set_attribute_value( 'expr', s_expr.Expression.not_compiled(old_expr)) # Produce an error when setting a type on something with # an expression if (self.has_attribute_value('target') and ((self.scls.get_expr(schema) or has_expr) and not clears_expr)): raise errors.UnsupportedFeatureError( "cannot specify a type and an expression for a global", context=self.source_context, ) if clears_expr and old_expr: # If the expression was explicitly set to None, # that means that `RESET EXPRESSION` was executed # and this is no longer a computable. computed_fields = self.scls.get_computed_fields(schema) if ('required' in computed_fields and not self.has_attribute_value('required')): self.set_attribute_value('required', None) if ('cardinality' in computed_fields and not self.has_attribute_value('cardinality')): self.set_attribute_value('cardinality', None) return super()._alter_begin(schema, context)
def compile_TypeCast(expr: irast.TypeCast, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: pg_expr = dispatch.compile(expr.expr, ctx=ctx) if expr.sql_cast: # Use explicit SQL cast. pg_type = pg_types.pg_type_from_ir_typeref(expr.to_type) res: pgast.BaseExpr = pgast.TypeCast( arg=pg_expr, type_name=pgast.TypeName(name=pg_type)) elif expr.sql_function or expr.sql_expr: # Cast implemented as a function. if expr.sql_expr: func_name = common.get_cast_backend_name(expr.cast_name, aspect='function') else: assert expr.sql_function func_name = tuple(expr.sql_function.split('.')) res = pgast.FuncCall( name=func_name, args=[pg_expr], ) else: raise errors.UnsupportedFeatureError('cast not supported') if expr.cardinality_mod is qlast.CardinalityModifier.Required: res = pgast.FuncCall( name=('edgedb', 'raise_on_null'), args=[ res, pgast.StringConstant(val='invalid_parameter_value', ), pgast.StringConstant(val='invalid null value in cast', ), ]) return res
def _compile_set_in_singleton_mode( node: irast.Set, *, ctx: context.CompilerContextLevel) -> pgast.BaseExpr: if isinstance(node, irast.EmptySet): return pgast.NullConstant() elif node.expr is not None: return dispatch.compile(node.expr, ctx=ctx) else: if node.rptr: ptrref = node.rptr.ptrref source = node.rptr.source if isinstance(ptrref, irast.TupleIndirectionPointerRef): tuple_val = dispatch.compile(source, ctx=ctx) set_expr = astutils.tuple_getattr( tuple_val, source.typeref, ptrref.shortname.name, ) return set_expr if ptrref.source_ptr is None and source.rptr is not None: raise errors.UnsupportedFeatureError( 'unexpectedly long path in simple expr') ptr_stor_info = pg_types.get_ptrref_storage_info( ptrref, resolve_type=False) colref = pgast.ColumnRef( name=[ptr_stor_info.column_name], nullable=node.rptr.dir_cardinality.can_be_zero()) else: name = [common.edgedb_name_to_pg_name(str(node.typeref.id))] if node.path_id.is_objtype_path(): name.append('id') colref = pgast.ColumnRef(name=name) return colref
def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> sd.Command: cmd = super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(cmd, sd.CommandGroup): for subcmd in cmd.get_subcommands(): if isinstance(subcmd, cls): create_cmd: sd.Command = subcmd break else: raise errors.InternalServerError( 'scalar alias definition did not return CreateScalarType') else: create_cmd = cmd bases = create_cmd.get_attribute_value('bases') is_enum = False if len(bases) == 1 and isinstance(bases._ids[0], AnonymousEnumTypeRef): # type ignore below because this class elements is set # directly on __dict__ elements = bases._ids[0].elements # type: ignore create_cmd.set_attribute_value('enum_values', elements) create_cmd.set_attribute_value('is_final', True) is_enum = True for sub in create_cmd.get_subcommands(type=sd.AlterObjectProperty): if sub.property == 'default': if is_enum: raise errors.UnsupportedFeatureError( f'enumerated types do not support defaults') else: sub.new_value = [sub.new_value] assert isinstance(cmd, (CreateScalarType, sd.CommandGroup)) return cmd
def _ql_typeexpr_to_ir_typeref( ql_t: qlast.TypeExpr, *, ctx: context.ContextLevel) -> typing.List[irast.TypeRef]: if isinstance(ql_t, qlast.TypeOf): with ctx.newscope(fenced=True, temporary=True) as subctx: ir_set = setgen.ensure_set(dispatch.compile(ql_t.expr, ctx=subctx), ctx=subctx) stype = setgen.get_set_type(ir_set, ctx=subctx) return [irtyputils.type_to_typeref(subctx.env.schema, stype)] elif isinstance(ql_t, qlast.TypeOp): if ql_t.op == '|': return (_ql_typeexpr_to_ir_typeref(ql_t.left, ctx=ctx) + _ql_typeexpr_to_ir_typeref(ql_t.right, ctx=ctx)) raise errors.UnsupportedFeatureError( f'type operator {ql_t.op!r} is not implemented', context=ql_t.context) else: return [_ql_typeref_to_ir_typeref(ql_t, ctx=ctx)]
def _create_begin(self, schema, context): from edb.ir import utils as irutils fullname = self.classname shortname = sn.shortname_from_fullname(fullname) schema, cp = self._get_param_desc_from_delta(schema, context, self) signature = f'{shortname}({", ".join(p.as_str(schema) for p in cp)})' func = schema.get(fullname, None) if func: raise errors.DuplicateFunctionDefinitionError( f'cannot create the `{signature}` function: ' f'a function with the same signature ' f'is already defined', context=self.source_context) schema = super()._create_begin(schema, context) params: FuncParameterList = self.scls.get_params(schema) language = self.scls.get_language(schema) return_type = self.scls.get_return_type(schema) return_typemod = self.scls.get_return_typemod(schema) from_function = self.scls.get_from_function(schema) has_polymorphic = params.has_polymorphic(schema) polymorphic_return_type = return_type.is_polymorphic(schema) named_only = params.find_named_only(schema) session_only = self.scls.get_session_only(schema) # Certain syntax is only allowed in "EdgeDB developer" mode, # i.e. when populating std library, etc. if not context.stdmode and not context.testmode: if has_polymorphic or polymorphic_return_type: raise errors.InvalidFunctionDefinitionError( f'cannot create `{signature}` function: ' f'generic types are not supported in ' f'user-defined functions', context=self.source_context) elif from_function: raise errors.InvalidFunctionDefinitionError( f'cannot create `{signature}` function: ' f'"FROM SQL FUNCTION" is not supported in ' f'user-defined functions', context=self.source_context) elif language != qlast.Language.EdgeQL: raise errors.InvalidFunctionDefinitionError( f'cannot create `{signature}` function: ' f'"FROM {language}" is not supported in ' f'user-defined functions', context=self.source_context) if polymorphic_return_type and not has_polymorphic: raise errors.InvalidFunctionDefinitionError( f'cannot create `{signature}` function: ' f'function returns a generic type but has no ' f'generic parameters', context=self.source_context) overloaded_funcs = schema.get_functions(shortname, ()) has_from_function = from_function for func in overloaded_funcs: func_params = func.get_params(schema) func_named_only = func_params.find_named_only(schema) func_from_function = func.get_from_function(schema) if func_named_only.keys() != named_only.keys(): raise errors.InvalidFunctionDefinitionError( f'cannot create `{signature}` function: ' f'overloading another function with different ' f'named only parameters: ' f'"{func.get_shortname(schema)}' f'{func_params.as_str(schema)}"', context=self.source_context) if ((has_polymorphic or func_params.has_polymorphic(schema)) and (func.get_return_typemod(schema) != return_typemod)): func_return_typemod = func.get_return_typemod(schema) raise errors.InvalidFunctionDefinitionError( f'cannot create the polymorphic `{signature} -> ' f'{return_typemod.to_edgeql()} ' f'{return_type.get_displayname(schema)}` ' f'function: overloading another function with different ' f'return type {func_return_typemod.to_edgeql()} ' f'{func.get_return_type(schema).get_displayname(schema)}', context=self.source_context) if session_only != func.get_session_only(schema): raise errors.InvalidFunctionDefinitionError( f'cannot create `{signature}` function: ' f'overloading another function with different ' f'`session_only` flag', context=self.source_context) if func_from_function: has_from_function = func_from_function if has_from_function: if (from_function != has_from_function or any( f.get_from_function(schema) != has_from_function for f in overloaded_funcs)): raise errors.InvalidFunctionDefinitionError( f'cannot create the `{signature}` function: ' f'overloading "FROM SQL FUNCTION" functions is ' f'allowed only when all functions point to the same ' f'SQL function', context=self.source_context) if (language == qlast.Language.EdgeQL and any( p.get_typemod(schema) is ft.TypeModifier.SET_OF for p in params.objects(schema))): raise errors.UnsupportedFeatureError( f'cannot create the `{signature}` function: ' f'SET OF parameters in user-defined EdgeQL functions are ' f'not supported', context=self.source_context) # check that params of type 'anytype' don't have defaults for p in params.objects(schema): p_default = p.get_default(schema) if p_default is None: continue p_type = p.get_type(schema) try: ir_default = p.get_ir_default(schema=schema) except Exception as ex: raise errors.InvalidFunctionDefinitionError( f'cannot create the `{signature}` function: ' f'invalid default value {p_default.text!r} of parameter ' f'{p.get_displayname(schema)!r}: {ex}', context=self.source_context) check_default_type = True if p_type.is_polymorphic(schema): if irutils.is_empty(ir_default.expr): check_default_type = False else: raise errors.InvalidFunctionDefinitionError( f'cannot create the `{signature}` function: ' f'polymorphic parameter of type ' f'{p_type.get_displayname(schema)} cannot ' f'have a non-empty default value', context=self.source_context) elif (p.get_typemod(schema) is ft.TypeModifier.OPTIONAL and irutils.is_empty(ir_default.expr)): check_default_type = False if check_default_type: default_type = ir_default.stype if not default_type.assignment_castable_to(p_type, schema): raise errors.InvalidFunctionDefinitionError( f'cannot create the `{signature}` function: ' f'invalid declaration of parameter ' f'{p.get_displayname(schema)!r}: ' f'unexpected type of the default expression: ' f'{default_type.get_displayname(schema)}, expected ' f'{p_type.get_displayname(schema)}', context=self.source_context) return schema