def declare_view_from_schema(viewcls: s_types.Type, *, ctx: context.ContextLevel) -> s_types.Type: vc = ctx.env.schema_view_cache.get(viewcls) if vc is not None: return vc with ctx.detached() as subctx: subctx.expr_exposed = False view_expr = viewcls.get_expr(ctx.env.schema) assert view_expr is not None view_ql = qlparser.parse(view_expr.text) viewcls_name = viewcls.get_name(ctx.env.schema) view_set = declare_view(view_ql, alias=viewcls_name, fully_detached=True, ctx=subctx) # The view path id _itself_ should not be in the nested namespace. view_set.path_id = view_set.path_id.replace_namespace( ctx.path_id_namespace) vc = subctx.aliased_views[viewcls_name] assert vc is not None ctx.env.schema_view_cache[viewcls] = vc ctx.source_map.update(subctx.source_map) ctx.aliased_views[viewcls_name] = subctx.aliased_views[viewcls_name] ctx.view_nodes[vc.get_name(ctx.env.schema)] = vc ctx.view_sets[vc] = subctx.view_sets[vc] return vc
def tuple_indirection_set(path_tip: irast.Set, *, source: s_types.Type, ptr_name: str, source_context: Optional[ parsing.ParserContext] = None, ctx: context.ContextLevel) -> irast.Set: assert isinstance(source, s_types.Tuple) el_name = ptr_name el_norm_name = source.normalize_index(ctx.env.schema, el_name) el_type = source.get_subtype(ctx.env.schema, el_name) path_id = pathctx.get_tuple_indirection_path_id(path_tip.path_id, el_norm_name, el_type, ctx=ctx) ti_set = new_set(stype=el_type, path_id=path_id, ctx=ctx) ptr = irast.TupleIndirectionPointer( source=path_tip, target=ti_set, ptrref=path_id.rptr(), direction=path_id.rptr_dir(), ) ti_set.rptr = ptr return ti_set
def new_set( *, stype: s_types.Type, ctx: context.ContextLevel, ircls: Type[irast.Set] = irast.Set, **kwargs: Any, ) -> irast.Set: """Create a new ir.Set instance with given attributes. Absolutely all ir.Set instances must be created using this constructor. """ if (stype not in ctx.type_rewrites and isinstance(stype, s_objtypes.ObjectType) and ctx.env.options.apply_query_rewrites and (filters := stype.get_access_policy_filters(ctx.env.schema))): qry = qlast.SelectQuery(result=qlast.Path( steps=[s_utils.name_to_ast_ref(stype.get_name(ctx.env.schema))]), ) for f in filters: assert isinstance(f.qlast, qlast.Expr) qry.where = astutils.extend_binop(qry.where, f.qlast) with ctx.detached() as subctx: subctx.expr_exposed = False # This is a global rewrite operation that is done once # per type, and so we don't really care if we're in a # temporary scope or not. subctx.path_scope = subctx.env.path_scope.root subctx.in_temp_scope = False # Put a placeholder to prevent recursion. subctx.type_rewrites[stype] = irast.Set() filtered_set = dispatch.compile(qry, ctx=subctx) assert isinstance(filtered_set, irast.Set) subctx.type_rewrites[stype] = filtered_set
def _find_cast(orig_stype: s_types.Type, new_stype: s_types.Type, *, srcctx: Optional[parsing.ParserContext], ctx: context.ContextLevel) -> Optional[s_casts.Cast]: casts = ctx.env.schema.get_casts_to_type(new_stype) if not casts and not new_stype.is_collection(): ancestors = new_stype.get_ancestors(ctx.env.schema) for t in ancestors.objects(ctx.env.schema): casts = ctx.env.schema.get_casts_to_type(t) if casts: break else: return None args = [ (orig_stype, irast.EmptySet()), (new_stype, irast.EmptySet()), ] matched = polyres.find_callable((CastCallableWrapper(c) for c in casts), args=args, kwargs={}, ctx=ctx) if len(matched) == 1: return cast(CastCallableWrapper, matched[0].func)._cast elif len(matched) > 1: raise errors.QueryError( f'cannot unambiguously cast ' f'{orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}', context=srcctx) else: return None
def schema_type_to_python_type(stype: s_types.Type, schema: s_schema.Schema) -> type: if stype.is_scalar(): return scalar_type_to_python_type(stype, schema) elif stype.is_object_type(): return object_type_to_python_type(stype, schema) else: raise UnsupportedExpressionError( f'{stype.get_displayname(schema)} is not representable in Python')
def _cast_array_literal( ir_set: irast.Set, orig_stype: s_types.Type, new_stype: s_types.Type, *, srcctx: Optional[parsing.ParserContext], ctx: context.ContextLevel) -> irast.Set: assert isinstance(ir_set.expr, irast.Array) orig_typeref = typegen.type_to_typeref(orig_stype, env=ctx.env) new_typeref = typegen.type_to_typeref(new_stype, env=ctx.env) direct_cast = _find_cast(orig_stype, new_stype, srcctx=srcctx, ctx=ctx) if direct_cast is None: if not new_stype.is_array(): raise errors.QueryError( f'cannot cast {orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}', context=srcctx) from None assert isinstance(new_stype, s_types.Array) el_type = new_stype.get_subtypes(ctx.env.schema)[0] intermediate_stype = orig_stype else: el_type = new_stype ctx.env.schema, intermediate_stype = s_types.Array.from_subtypes( ctx.env.schema, [el_type]) intermediate_typeref = typegen.type_to_typeref( intermediate_stype, env=ctx.env) casted_els = [] for el in ir_set.expr.elements: el = compile_cast(el, el_type, cardinality_mod=qlast.CardinalityModifier.Required, ctx=ctx, srcctx=srcctx) casted_els.append(el) new_array = setgen.ensure_set( irast.Array(elements=casted_els, typeref=intermediate_typeref), ctx=ctx) if direct_cast is not None: return _cast_to_ir( new_array, direct_cast, intermediate_stype, new_stype, ctx=ctx) else: cast_ir = irast.TypeCast( expr=new_array, from_type=orig_typeref, to_type=new_typeref, sql_cast=True, sql_expr=False, ) return setgen.ensure_set(cast_ir, ctx=ctx)
def apply_intersection(left: s_types.Type, right: s_types.Type, *, ctx: context.ContextLevel) -> TypeIntersectionResult: """Compute an intersection of two types: *left* and *right*. In theory, this should handle all combinations of unions and intersections recursively, but currently this handles only the common case of intersecting a regular type or a union type with a regular type. Returns: A :class:`~TypeIntersectionResult` named tuple containing the result intersection type, whether the type system considers the intersection empty and whether *left* is related to *right* (i.e either is a subtype of another). """ if left.issubclass(ctx.env.schema, right): # The intersection type is a proper *superclass* # of the argument, then this is, effectively, a NOP. return TypeIntersectionResult(stype=left) is_subtype = False empty_intersection = False union = left.get_union_of(ctx.env.schema) if union: # If the argument type is a union type, then we # narrow it by the intersection type. narrowed_union = [] for component_type in union.objects(ctx.env.schema): if component_type.issubclass(ctx.env.schema, right): narrowed_union.append(component_type) elif right.issubclass(ctx.env.schema, component_type): narrowed_union.append(right) if len(narrowed_union) == 0: int_type = get_intersection_type((left, right), ctx=ctx) is_subtype = int_type.issubclass(ctx.env.schema, left) assert isinstance(right, s_obj.InheritingObject) empty_intersection = not any( c.issubclass(ctx.env.schema, left) for c in right.descendants(ctx.env.schema)) elif len(narrowed_union) == 1: int_type = narrowed_union[0] is_subtype = int_type.issubclass(ctx.env.schema, left) else: int_type = get_union_type(narrowed_union, ctx=ctx) else: is_subtype = right.issubclass(ctx.env.schema, left) empty_intersection = not is_subtype int_type = get_intersection_type((left, right), ctx=ctx) return TypeIntersectionResult( stype=int_type, is_empty=empty_intersection, is_subtype=is_subtype, )
def _get_cast_distance( arg: irast.Set, arg_type: s_types.Type, param_type: s_types.Type, ) -> int: nonlocal resolved_poly_base_type if basic_matching_only: return 0 if in_polymorphic_func: # Compiling a body of a polymorphic function. if arg_type.is_polymorphic(schema): if param_type.is_polymorphic(schema): if arg_type.test_polymorphic(schema, param_type): return 0 else: return -1 else: if arg_type.resolve_polymorphic(schema, param_type): return 0 else: return -1 if param_type.is_polymorphic(schema): if not arg_type.test_polymorphic(schema, param_type): return -1 resolved = param_type.resolve_polymorphic(schema, arg_type) if resolved is None: return -1 if resolved_poly_base_type is None: resolved_poly_base_type = resolved if resolved_poly_base_type == resolved: return s_types.MAX_TYPE_DISTANCE if is_abstract else 0 ctx.env.schema, ct = ( resolved_poly_base_type.find_common_implicitly_castable_type( resolved, ctx.env.schema, )) if ct is not None: # If we found a common implicitly castable type, we # refine our resolved_poly_base_type to be that as the # more general case. resolved_poly_base_type = ct return s_types.MAX_TYPE_DISTANCE if is_abstract else 0 else: return -1 if arg_type.issubclass(schema, param_type): return 0 return arg_type.get_implicit_cast_distance(param_type, schema)
def scalar_type_to_python_type( stype: s_types.Type, schema: s_schema.Schema, ) -> type: for basetype_name, pytype in typemap.items(): basetype = schema.get(basetype_name, type=s_obj.InheritingObject) if stype.issubclass(schema, basetype): return pytype if stype.is_enum(schema): return str raise UnsupportedExpressionError( f'{stype.get_displayname(schema)} is not representable in Python')
def declare_view_from_schema(viewcls: s_types.Type, *, ctx: context.ContextLevel) -> s_types.Type: vc = ctx.env.schema_view_cache.get(viewcls) if vc is not None: return vc with ctx.detached() as subctx: subctx.expr_exposed = False view_expr = viewcls.get_expr(ctx.env.schema) assert view_expr is not None view_ql = qlparser.parse(view_expr.text) viewcls_name = viewcls.get_name(ctx.env.schema) assert isinstance(view_ql, qlast.Expr), 'expected qlast.Expr' view_set = declare_view(view_ql, alias=viewcls_name, fully_detached=True, ctx=subctx) # The view path id _itself_ should not be in the nested namespace. view_set.path_id = view_set.path_id.replace_namespace( ctx.path_id_namespace) vc = subctx.aliased_views[viewcls_name] assert vc is not None if not ctx.in_temp_scope: ctx.env.schema_view_cache[viewcls] = vc ctx.source_map.update(subctx.source_map) ctx.aliased_views[viewcls_name] = subctx.aliased_views[viewcls_name] ctx.view_nodes[vc.get_name(ctx.env.schema)] = vc ctx.view_sets[vc] = subctx.view_sets[vc] # XXX: The current cardinality inference machine does not look # into unreferenced expression parts, which includes computables # that may be declared on an alias that another alias is referencing, # leaving Unknown cardinalities in place. To fix this, copy # cardinalities for computed pointers from the alias object in the # schema. view_type = setgen.get_set_type(view_set, ctx=subctx) if isinstance(view_type, s_objtypes.ObjectType): assert isinstance(viewcls, s_objtypes.ObjectType) _fixup_cardinalities( view_type, viewcls, ctx=ctx, ) return vc
def derive_ptrcls( view_rptr: context.ViewRPtr, *, target_scls: s_types.Type, transparent: bool=False, ctx: context.ContextLevel) -> s_pointers.Pointer: if view_rptr.ptrcls is None: if view_rptr.base_ptrcls is None: transparent = False if target_scls.is_object_type(): base = ctx.env.get_track_schema_object( sn.QualName('std', 'link'), expr=None) view_rptr.base_ptrcls = cast(s_links.Link, base) else: base = ctx.env.get_track_schema_object( sn.QualName('std', 'property'), expr=None) view_rptr.base_ptrcls = cast(s_props.Property, base) derived_name = schemactx.derive_view_name( view_rptr.base_ptrcls, derived_name_base=view_rptr.ptrcls_name, derived_name_quals=( str(view_rptr.source.get_name(ctx.env.schema)), ), ctx=ctx) attrs = {} if transparent and not view_rptr.ptrcls_is_alias: attrs['path_id_name'] = view_rptr.base_ptrcls.get_name( ctx.env.schema) view_rptr.ptrcls = schemactx.derive_ptr( view_rptr.base_ptrcls, view_rptr.source, target_scls, derived_name=derived_name, is_insert=view_rptr.is_insert, is_update=view_rptr.is_update, attrs=attrs, ctx=ctx ) else: attrs = {} if transparent and not view_rptr.ptrcls_is_alias: attrs['path_id_name'] = view_rptr.ptrcls.get_name(ctx.env.schema) view_rptr.ptrcls = schemactx.derive_ptr( view_rptr.ptrcls, view_rptr.source, target_scls, derived_name_quals=( str(view_rptr.source.get_name(ctx.env.schema)), ), is_insert=view_rptr.is_insert, is_update=view_rptr.is_update, attrs=attrs, ctx=ctx ) return view_rptr.ptrcls
def _cast_array_literal(ir_set: irast.Set, orig_stype: s_types.Type, new_stype: s_types.Type, *, srcctx: parsing.ParserContext, ctx: context.ContextLevel) -> irast.Base: orig_typeref = irtyputils.type_to_typeref(ctx.env.schema, orig_stype) new_typeref = irtyputils.type_to_typeref(ctx.env.schema, new_stype) direct_cast = _find_cast(orig_stype, new_stype, srcctx=srcctx, ctx=ctx) if direct_cast is None: if not new_stype.is_array(): raise errors.QueryError( f'cannot cast {orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}', context=srcctx) from None el_type = new_stype.get_subtypes(ctx.env.schema)[0] else: el_type = new_stype casted_els = [] for el in ir_set.expr.elements: el = compile_cast(el, el_type, ctx=ctx, srcctx=srcctx) casted_els.append(el) new_array = setgen.ensure_set(irast.Array(elements=casted_els, typeref=orig_typeref), ctx=ctx) if direct_cast is not None: return _cast_to_ir(new_array, direct_cast, orig_stype, new_stype, ctx=ctx) else: cast_ir = irast.TypeCast( expr=new_array, from_type=orig_typeref, to_type=new_typeref, sql_cast=True, ) return setgen.ensure_set(cast_ir, ctx=ctx)
def _add_annotation(self, t: s_types.Type): self.anno_buffer.append(CTYPE_ANNO_TYPENAME) self.anno_buffer.append(t.id.bytes) tn = t.get_displayname(self.schema) tn_bytes = tn.encode('utf-8') self.anno_buffer.append(_uint32_packer(len(tn_bytes))) self.anno_buffer.append(tn_bytes)
def has_implicit_tname( stype: s_types.Type, *, is_mutation: bool, ctx: context.ContextLevel) -> bool: return ( stype.is_object_type() and not is_mutation and ctx.implicit_tname_in_shapes )
def tuple_indirection_set(path_tip: irast.Set, *, source: s_types.Type, ptr_name: str, source_context: parsing.ParserContext, ctx: context.ContextLevel) -> irast.Set: assert isinstance(source, s_types.Tuple) el_name = ptr_name el_norm_name = source.normalize_index(ctx.env.schema, el_name) el_type = source.get_subtype(ctx.env.schema, el_name) path_id = pathctx.get_tuple_indirection_path_id(path_tip.path_id, el_norm_name, el_type, ctx=ctx) expr = irast.TupleIndirection(expr=path_tip, name=el_norm_name, path_id=path_id, context=source_context) return expression_set(expr, ctx=ctx)
def is_type_compatible( type_a: s_types.Type, type_b: s_types.Type, *, ctx: context.ContextLevel, ) -> bool: material_type_a = type_a.material_type(ctx.env.schema) material_type_b = type_b.material_type(ctx.env.schema) compatible = material_type_b.issubclass(ctx.env.schema, material_type_a) if compatible: if (isinstance(material_type_a, s_types.Tuple) and isinstance(material_type_b, s_types.Tuple)): # For tuples, we also check that the element names match. compatible = (material_type_a.get_element_names( ctx.env.schema) == material_type_b.get_element_names( ctx.env.schema)) return compatible
def _find_cast( orig_stype: s_types.Type, new_stype: s_types.Type, *, srcctx: Optional[parsing.ParserContext], ctx: context.ContextLevel) -> Optional[s_casts.Cast]: # Don't try to pick up casts when there is a direct subtyping # relationship. if (orig_stype.issubclass(ctx.env.schema, new_stype) or new_stype.issubclass(ctx.env.schema, orig_stype)): return None casts = ctx.env.schema.get_casts_to_type(new_stype) if not casts and isinstance(new_stype, s_types.InheritingType): ancestors = new_stype.get_ancestors(ctx.env.schema) for t in ancestors.objects(ctx.env.schema): casts = ctx.env.schema.get_casts_to_type(t) if casts: break else: return None dummy_set = irast.EmptySet() # type: ignore args = [ (orig_stype, dummy_set), (new_stype, dummy_set), ] matched = polyres.find_callable( (CastCallableWrapper(c) for c in casts), args=args, kwargs={}, ctx=ctx) if len(matched) == 1: return cast(CastCallableWrapper, matched[0].func)._cast elif len(matched) > 1: raise errors.QueryError( f'cannot unambiguously cast ' f'{orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}', context=srcctx) else: return None
def type_to_ql_typeref(t: s_types.Type, *, _name=None, schema: s_schema.Schema) -> qlast.TypeName: if t.is_any(): result = qlast.TypeName(name=_name, maintype=qlast.AnyType()) elif t.is_anytuple(): result = qlast.TypeName(name=_name, maintype=qlast.AnyTuple()) elif not isinstance(t, s_abc.Collection): result = qlast.TypeName(name=_name, maintype=qlast.ObjectRef( module=t.get_name(schema).module, name=t.get_name(schema).name)) elif isinstance(t, s_abc.Tuple) and t.named: result = qlast.TypeName(name=_name, maintype=qlast.ObjectRef(name=t.schema_name), subtypes=[ type_to_ql_typeref(st, _name=sn, schema=schema) for sn, st in t.iter_subtypes(schema) ]) else: result = qlast.TypeName(name=_name, maintype=qlast.ObjectRef(name=t.schema_name), subtypes=[ type_to_ql_typeref(st, schema=schema) for st in t.get_subtypes(schema) ]) return result
def type_to_typeref( t: s_types.Type, env: context.Environment, ) -> irast.TypeRef: schema = env.schema cache = env.type_ref_cache expr_type = t.get_expr_type(env.schema) include_descendants = (expr_type is s_types.ExprType.Update or expr_type is s_types.ExprType.Delete) return irtyputils.type_to_typeref( schema, t, include_descendants=include_descendants, cache=cache, )
def scalar_type_to_python_type(stype: s_types.Type, schema: s_schema.Schema) -> type: typemap = { 'std::str': str, 'std::anyint': int, 'std::anyfloat': float, 'std::decimal': decimal.Decimal, 'std::bool': bool, 'std::json': str, 'std::uuid': uuid.UUID, } for basetype, python_type in typemap.items(): if stype.issubclass(schema, schema.get(basetype)): return python_type raise UnsupportedExpressionError( f'{stype.get_displayname(schema)} is not representable in Python')
def eta_expand( path: qlast.Path, stype: s_types.Type, *, ctx: context.ContextLevel, ) -> qlast.Expr: """η-expansion of an AST path""" if not ALWAYS_EXPAND and not stype.contains_object(ctx.env.schema): # This isn't strictly right from a "fully η expanding" perspective, # but for our uses, we only need to make sure that objects are # exposed to the output, so we can skip anything not containing one. return path if isinstance(stype, s_types.Array): return eta_expand_array(path, stype, ctx=ctx) elif isinstance(stype, s_types.Tuple): return eta_expand_tuple(path, stype, ctx=ctx) else: return path
def scalar_type_to_python_type(stype: s_types.Type, schema: s_schema.Schema) -> type: typemap = { 'std::str': str, 'std::anyint': int, 'std::anyfloat': float, 'std::decimal': decimal.Decimal, 'std::bigint': decimal.Decimal, 'std::bool': bool, 'std::json': str, 'std::uuid': uuidgen.UUID, } for basetype_name, python_type in typemap.items(): basetype = schema.get(basetype_name) assert isinstance(basetype, s_inh.InheritingObject) if stype.issubclass(schema, basetype): return python_type raise UnsupportedExpressionError( f'{stype.get_displayname(schema)} is not representable in Python')
def derive_view(stype: s_types.Type, *, derived_name: typing.Optional[sn.SchemaName] = None, derived_name_quals: typing.Optional[typing.Sequence[str]] = (), derived_name_base: typing.Optional[str] = None, preserve_shape: bool = False, preserve_path_id: bool = False, is_insert: bool = False, is_update: bool = False, inheritance_merge: bool = True, attrs: typing.Optional[dict] = None, ctx: context.ContextLevel) -> s_types.Type: if derived_name is None: derived_name = derive_view_name(stype=stype, derived_name_quals=derived_name_quals, derived_name_base=derived_name_base, ctx=ctx) if is_insert: vtype = s_types.ViewType.Insert elif is_update: vtype = s_types.ViewType.Update else: vtype = s_types.ViewType.Select if attrs is None: attrs = {} else: attrs = dict(attrs) attrs['view_type'] = vtype derived: s_types.Type if isinstance(stype, s_abc.Collection): ctx.env.schema, derived = stype.derive_subtype(ctx.env.schema, name=derived_name) elif isinstance(stype, s_inh.InheritingObject): ctx.env.schema, derived = stype.derive_subtype( ctx.env.schema, name=derived_name, inheritance_merge=inheritance_merge, refdict_whitelist={'pointers'}, mark_derived=True, preserve_path_id=preserve_path_id, attrs=attrs, ) if (not stype.generic(ctx.env.schema) and isinstance(derived, s_sources.Source)): scls_pointers = stype.get_pointers(ctx.env.schema) derived_own_pointers = derived.get_pointers(ctx.env.schema) for pn, ptr in derived_own_pointers.items(ctx.env.schema): # This is a view of a view. Make sure query-level # computable expressions for pointers are carried over. src_ptr = scls_pointers.get(ctx.env.schema, pn) computable_data = ctx.source_map.get(src_ptr) if computable_data is not None: ctx.source_map[ptr] = computable_data if src_ptr in ctx.pending_cardinality: ctx.pointer_derivation_map[src_ptr].append(ptr) stmtctx.pend_pointer_cardinality_inference(ptrcls=ptr, ctx=ctx) ctx.view_nodes[derived.get_name(ctx.env.schema)] = derived if preserve_shape and stype in ctx.env.view_shapes: ctx.env.view_shapes[derived] = ctx.env.view_shapes[stype] return derived
def object_type_to_python_type( objtype: s_types.Type, schema: s_schema.Schema, *, base_class: typing.Optional[type] = None, _memo: typing.Optional[typing.Mapping[s_types.Type, type]] = None) -> type: if _memo is None: _memo = {} fields = [] subclasses = [] for pn, p in objtype.get_pointers(schema).items(schema): if pn in ('id', '__type__'): continue ptype = p.get_target(schema) if ptype.is_object_type(): pytype = _memo.get(ptype) if pytype is None: pytype = object_type_to_python_type(ptype, schema, base_class=base_class, _memo=_memo) _memo[ptype] = pytype for subtype in ptype.children(schema): subclasses.append( object_type_to_python_type(subtype, schema, base_class=pytype, _memo=_memo)) else: pytype = scalar_type_to_python_type(ptype, schema) is_multi = p.get_cardinality(schema) is qltypes.Cardinality.MANY if is_multi: pytype = typing.FrozenSet[pytype] default = p.get_default(schema) if default is None: if p.get_required(schema): default = dataclasses.MISSING else: default = ql_compiler.evaluate_to_python_val(default.text, schema=schema) if is_multi and not isinstance(default, frozenset): default = frozenset((default, )) constraints = p.get_constraints(schema).objects(schema) exclusive = schema.get('std::exclusive') unique = (not ptype.is_object_type() and any( c.issubclass(schema, exclusive) for c in constraints)) field = dataclasses.field( compare=unique, hash=unique, repr=True, default=default, ) fields.append((pn, pytype, field)) return dataclasses.make_dataclass( objtype.get_name(schema).name, fields=fields, bases=(base_class, ) if base_class is not None else (), frozen=True, namespace={'_subclasses': subclasses}, )
def derive_view( stype: s_types.Type, *, derived_name: Optional[sn.QualName] = None, derived_name_quals: Optional[Sequence[str]] = (), preserve_shape: bool = False, preserve_path_id: bool = False, exprtype: s_types.ExprType = s_types.ExprType.Select, inheritance_merge: bool = True, attrs: Optional[Dict[str, Any]] = None, ctx: context.ContextLevel, ) -> s_types.Type: if derived_name is None: assert isinstance(stype, s_obj.DerivableObject) derived_name = derive_view_name(stype=stype, derived_name_quals=derived_name_quals, ctx=ctx) if attrs is None: attrs = {} else: attrs = dict(attrs) attrs['expr_type'] = exprtype derived: s_types.Type if isinstance(stype, s_types.Collection): ctx.env.schema, derived = stype.derive_subtype( ctx.env.schema, name=derived_name, attrs=attrs, ) elif isinstance(stype, (s_objtypes.ObjectType, s_scalars.ScalarType)): existing = ctx.env.schema.get(derived_name, default=None, type=type(stype)) if existing is not None: if ctx.recompiling_schema_alias: # When recompiling schema alias, we, essentially # re-derive the already-existing objects exactly. derived = existing else: raise AssertionError( f'{type(stype).get_schema_class_displayname()}' f' {derived_name!r} already exists', ) else: ctx.env.schema, derived = stype.derive_subtype( ctx.env.schema, name=derived_name, inheritance_merge=inheritance_merge, inheritance_refdicts={'pointers'}, mark_derived=True, transient=True, preserve_path_id=preserve_path_id, attrs=attrs, ) if (stype.is_view(ctx.env.schema) # XXX: Previously, the main check here was just for # (not stype.generic(...)). generic isn't really the # right way to figure out if something is a view, since # some aliases will be generic. On changing it to is_view # instead, though, two GROUP BY tests that grouped # on the result of a group broke # (test_edgeql_group_by_group_by_03{a,b}). # # It's probably a bug that this matters in that case, and # it is an accident that group bindings are named in such # a way that they count as being generic, but for now # preserve that behavior. and not (stype.generic(ctx.env.schema) and (view_ir := ctx.view_sets.get(stype)) and (scope_info := ctx.path_scope_map.get(view_ir)) and scope_info.binding_kind) and isinstance( derived, s_objtypes.ObjectType)): assert isinstance(stype, s_objtypes.ObjectType) scls_pointers = stype.get_pointers(ctx.env.schema) derived_own_pointers = derived.get_pointers(ctx.env.schema) for pn, ptr in derived_own_pointers.items(ctx.env.schema): # This is a view of a view. Make sure query-level # computable expressions for pointers are carried over. src_ptr = scls_pointers.get(ctx.env.schema, pn) computable_data = (ctx.source_map.get(src_ptr) if src_ptr else None) if computable_data is not None: ctx.source_map[ptr] = computable_data if src_ptr in ctx.env.pointer_specified_info: ctx.env.pointer_derivation_map[src_ptr].append(ptr)
def _cast_array(ir_set: irast.Set, orig_stype: s_types.Type, new_stype: s_types.Type, *, srcctx: Optional[parsing.ParserContext], ctx: context.ContextLevel) -> irast.Set: assert isinstance(orig_stype, s_types.Array) direct_cast = _find_cast(orig_stype, new_stype, srcctx=srcctx, ctx=ctx) if direct_cast is None: if not new_stype.is_array(): raise errors.QueryError( f'cannot cast {orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}', context=srcctx) assert isinstance(new_stype, s_types.Array) el_type = new_stype.get_subtypes(ctx.env.schema)[0] else: el_type = new_stype orig_el_type = orig_stype.get_subtypes(ctx.env.schema)[0] el_cast = _find_cast(orig_el_type, el_type, srcctx=srcctx, ctx=ctx) if el_cast is not None and el_cast.get_from_cast(ctx.env.schema): # Simple cast return _cast_to_ir(ir_set, el_cast, orig_stype, new_stype, ctx=ctx) else: pathctx.register_set_in_scope(ir_set, ctx=ctx) with ctx.new() as subctx: subctx.anchors = subctx.anchors.copy() source_alias = subctx.aliases.get('a') subctx.anchors[source_alias] = ir_set unpacked = qlast.FunctionCall( func=('__std__', 'array_unpack'), args=[ qlast.Path(steps=[qlast.ObjectRef(name=source_alias)], ), ], ) enumerated = setgen.ensure_set( dispatch.compile( qlast.FunctionCall( func=('__std__', 'enumerate'), args=[unpacked], ), ctx=subctx, ), ctx=subctx, ) enumerated_alias = subctx.aliases.get('e') subctx.anchors[enumerated_alias] = enumerated enumerated_ref = qlast.Path( steps=[qlast.ObjectRef(name=enumerated_alias)], ) elements = qlast.FunctionCall( func=('__std__', 'array_agg'), args=[ qlast.SelectQuery( result=qlast.TypeCast( expr=qlast.Path(steps=[ enumerated_ref, qlast.Ptr(ptr=qlast.ObjectRef( name='1', direction='>', ), ), ], ), type=typegen.type_to_ql_typeref( el_type, ctx=subctx, ), cardinality_mod=qlast.CardinalityModifier.Required, ), orderby=[ qlast.SortExpr( path=qlast.Path(steps=[ enumerated_ref, qlast.Ptr(ptr=qlast.ObjectRef( name='0', direction='>', ), ), ], ), direction=qlast.SortOrder.Asc, ), ], ), ], ) array_ir = dispatch.compile(elements, ctx=subctx) assert isinstance(array_ir, irast.Set) if direct_cast is not None: ctx.env.schema, array_stype = s_types.Array.from_subtypes( ctx.env.schema, [el_type]) return _cast_to_ir(array_ir, direct_cast, array_stype, new_stype, ctx=ctx) else: return array_ir
def compile_cast( ir_expr: Union[irast.Set, irast.Expr], new_stype: s_types.Type, *, srcctx: Optional[parsing.ParserContext], ctx: context.ContextLevel, cardinality_mod: Optional[qlast.CardinalityModifier] = None ) -> irast.Set: if isinstance(ir_expr, irast.EmptySet): # For the common case of casting an empty set, we simply # generate a new EmptySet node of the requested type. return setgen.new_empty_set( stype=new_stype, alias=ir_expr.path_id.target_name_hint.name, ctx=ctx, srcctx=ir_expr.context) elif irutils.is_untyped_empty_array_expr(ir_expr): # Ditto for empty arrays. new_typeref = typegen.type_to_typeref(new_stype, ctx.env) return setgen.ensure_set(irast.Array(elements=[], typeref=new_typeref), ctx=ctx) ir_set = setgen.ensure_set(ir_expr, ctx=ctx) orig_stype = setgen.get_set_type(ir_set, ctx=ctx) if (orig_stype == new_stype and cardinality_mod is not qlast.CardinalityModifier.Required): return ir_set elif orig_stype.is_object_type() and new_stype.is_object_type(): # Object types cannot be cast between themselves, # as cast is a _constructor_ operation, and the only # valid way to construct an object is to INSERT it. raise errors.QueryError( f'cannot cast object type ' f'{orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}, use ' f'`...[IS {new_stype.get_displayname(ctx.env.schema)}]` instead', context=srcctx) if isinstance(ir_set.expr, irast.Array): return _cast_array_literal(ir_set, orig_stype, new_stype, srcctx=srcctx, ctx=ctx) elif orig_stype.is_tuple(ctx.env.schema): return _cast_tuple(ir_set, orig_stype, new_stype, srcctx=srcctx, ctx=ctx) elif orig_stype.issubclass(ctx.env.schema, new_stype): # The new type is a supertype of the old type, # and is always a wider domain, so we simply reassign # the stype. return _inheritance_cast_to_ir(ir_set, orig_stype, new_stype, cardinality_mod=cardinality_mod, ctx=ctx) elif new_stype.issubclass(ctx.env.schema, orig_stype): # The new type is a subtype, so may potentially have # a more restrictive domain, generate a cast call. return _inheritance_cast_to_ir(ir_set, orig_stype, new_stype, cardinality_mod=cardinality_mod, ctx=ctx) elif orig_stype.is_array(): return _cast_array(ir_set, orig_stype, new_stype, srcctx=srcctx, ctx=ctx) else: json_t = ctx.env.get_track_schema_type('std::json') if (new_stype.issubclass(ctx.env.schema, json_t) and ir_set.path_id.is_objtype_path()): # JSON casts of objects are special: we want the full shape # and not just an identity. with ctx.new() as subctx: subctx.implicit_id_in_shapes = False subctx.implicit_tid_in_shapes = False viewgen.compile_view_shapes(ir_set, ctx=subctx) elif (orig_stype.issubclass(ctx.env.schema, json_t) and new_stype.is_enum(ctx.env.schema)): # Casts from json to enums need some special handling # here, where we have access to the enum type. Just turn # it into json->str and str->enum. str_typ = ctx.env.get_track_schema_type('std::str') str_ir = compile_cast(ir_expr, str_typ, srcctx=srcctx, ctx=ctx) return compile_cast(str_ir, new_stype, cardinality_mod=cardinality_mod, srcctx=srcctx, ctx=ctx) elif (orig_stype.issubclass(ctx.env.schema, json_t) and isinstance(new_stype, s_types.Array) and not new_stype.get_subtypes(ctx.env.schema)[0].issubclass( ctx.env.schema, json_t)): # Turn casts from json->array<T> into json->array<json> # and array<json>->array<T>. ctx.env.schema, json_array_typ = s_types.Array.from_subtypes( ctx.env.schema, [json_t]) json_array_ir = compile_cast(ir_expr, json_array_typ, srcctx=srcctx, ctx=ctx) return compile_cast(json_array_ir, new_stype, cardinality_mod=cardinality_mod, srcctx=srcctx, ctx=ctx) elif (orig_stype.issubclass(ctx.env.schema, json_t) and isinstance(new_stype, s_types.Tuple)): return _cast_json_to_tuple(ir_set, orig_stype, new_stype, srcctx=srcctx, ctx=ctx) return _compile_cast(ir_expr, orig_stype, new_stype, cardinality_mod=cardinality_mod, srcctx=srcctx, ctx=ctx)
def _cast_tuple(ir_set: irast.Set, orig_stype: s_types.Type, new_stype: s_types.Type, *, srcctx: Optional[parsing.ParserContext], ctx: context.ContextLevel) -> irast.Set: assert isinstance(orig_stype, s_types.Tuple) # Make sure the source tuple expression is pinned in the scope, # so that we don't generate a cross-product of it by evaluating # the tuple indirections. pathctx.register_set_in_scope(ir_set, ctx=ctx) direct_cast = _find_cast(orig_stype, new_stype, srcctx=srcctx, ctx=ctx) orig_subtypes = dict(orig_stype.iter_subtypes(ctx.env.schema)) if direct_cast is not None: # Direct casting to non-tuple involves casting each tuple # element and also keeping the cast around the whole tuple. # This is to trigger the downstream logic of casting # objects (in elements of the tuple). elements = [] for n in orig_subtypes: val = setgen.tuple_indirection_set( ir_set, source=orig_stype, ptr_name=n, ctx=ctx, ) val_type = setgen.get_set_type(val, ctx=ctx) # Element cast val = compile_cast(val, new_stype, ctx=ctx, srcctx=srcctx) elements.append(irast.TupleElement(name=n, val=val)) new_tuple = setgen.new_tuple_set( elements, named=orig_stype.is_named(ctx.env.schema), ctx=ctx, ) return _cast_to_ir(new_tuple, direct_cast, orig_stype, new_stype, ctx=ctx) if not new_stype.is_tuple(ctx.env.schema): raise errors.QueryError( f'cannot cast {orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}', context=srcctx) assert isinstance(new_stype, s_types.Tuple) new_subtypes = list(new_stype.iter_subtypes(ctx.env.schema)) if len(orig_subtypes) != len(new_subtypes): raise errors.QueryError( f'cannot cast {orig_stype.get_displayname(ctx.env.schema)!r} ' f'to {new_stype.get_displayname(ctx.env.schema)!r}: ' f'the number of elements is not the same', context=srcctx) # For tuple-to-tuple casts we generate a new tuple # to simplify things on sqlgen side. elements = [] for i, n in enumerate(orig_subtypes): val = setgen.tuple_indirection_set( ir_set, source=orig_stype, ptr_name=n, ctx=ctx, ) val_type = setgen.get_set_type(val, ctx=ctx) new_el_name, new_st = new_subtypes[i] if val_type != new_st: # Element cast val = compile_cast(val, new_st, ctx=ctx, srcctx=srcctx) elements.append(irast.TupleElement(name=new_el_name, val=val)) return setgen.new_tuple_set( elements, named=new_stype.is_named(ctx.env.schema), ctx=ctx, )
def type_intersection_set( source_set: irast.Set, stype: s_types.Type, *, optional: bool, ctx: context.ContextLevel, ) -> irast.Set: """Return an interesection of *source_set* with type *stype*.""" arg_type = get_set_type(source_set, ctx=ctx) result = schemactx.apply_intersection(arg_type, stype, ctx=ctx) if result.stype == arg_type: return source_set poly_set = new_set(stype=result.stype, ctx=ctx) rptr = source_set.rptr rptr_specialization = [] if rptr is not None and rptr.ptrref.union_components: # This is a type intersection of a union pointer, most likely # a reverse link path specification. If so, test the union # components against the type expression and record which # components match. This information will be used later # when evaluating the path cardinality, as well as to # route link property references accordingly. for component in rptr.ptrref.union_components: component_endpoint_ref = component.dir_target ctx.env.schema, component_endpoint = irtyputils.ir_typeref_to_type( ctx.env.schema, component_endpoint_ref) if component_endpoint.issubclass(ctx.env.schema, stype): assert isinstance(component, irast.PointerRef) rptr_specialization.append(component) elif stype.issubclass(ctx.env.schema, component_endpoint): assert isinstance(stype, s_objtypes.ObjectType) narrow_ptr = stype.getptr( ctx.env.schema, component.shortname.get_local_name(), ) rptr_specialization.append( irtyputils.ptrref_from_ptrcls( schema=ctx.env.schema, ptrcls=narrow_ptr, direction=rptr.direction, cache=ctx.env.ptr_ref_cache, typeref_cache=ctx.env.type_ref_cache, ), ) ptrcls = irast.TypeIntersectionLink( arg_type, result.stype, optional=optional, is_empty=result.is_empty, is_subtype=result.is_subtype, rptr_specialization=rptr_specialization, # The type intersection cannot increase the cardinality # of the input set, so semantically, the cardinality # of the type intersection "link" is, at most, ONE. cardinality=qltypes.SchemaCardinality.One, ) ptrref = irtyputils.ptrref_from_ptrcls( schema=ctx.env.schema, ptrcls=ptrcls, cache=ctx.env.ptr_ref_cache, typeref_cache=ctx.env.type_ref_cache, ) poly_set.path_id = source_set.path_id.extend(ptrref=ptrref) ptr = irast.TypeIntersectionPointer( source=source_set, target=poly_set, ptrref=ptrref, direction=poly_set.path_id.rptr_dir(), optional=optional, ) poly_set.rptr = ptr return poly_set
def derive_view( stype: s_types.Type, *, derived_name: Optional[sn.QualName] = None, derived_name_quals: Optional[Sequence[str]] = (), derived_name_base: Optional[str] = None, preserve_shape: bool = False, preserve_path_id: bool = False, is_insert: bool = False, is_update: bool = False, is_delete: bool = False, inheritance_merge: bool = True, attrs: Optional[Dict[str, Any]] = None, ctx: context.ContextLevel, ) -> s_types.Type: if derived_name is None: assert isinstance(stype, s_obj.DerivableObject) derived_name = derive_view_name(stype=stype, derived_name_quals=derived_name_quals, derived_name_base=derived_name_base, ctx=ctx) if is_insert: exprtype = s_types.ExprType.Insert elif is_update: exprtype = s_types.ExprType.Update elif is_delete: exprtype = s_types.ExprType.Delete else: exprtype = s_types.ExprType.Select if attrs is None: attrs = {} else: attrs = dict(attrs) attrs['expr_type'] = exprtype derived: s_types.Type if isinstance(stype, s_abc.Collection): ctx.env.schema, derived = stype.derive_subtype( ctx.env.schema, name=derived_name, attrs=attrs, ) elif isinstance(stype, s_obj.DerivableInheritingObject): existing = ctx.env.schema.get(derived_name, default=None, type=type(stype)) if existing is not None: if ctx.recompiling_schema_alias: # When recompiling schema alias, we, essentially # re-derive the already-existing objects exactly. derived = existing else: raise AssertionError( f'{type(stype).get_schema_class_displayname()}' f' {derived_name!r} already exists', ) else: ctx.env.schema, derived = stype.derive_subtype( ctx.env.schema, name=derived_name, inheritance_merge=inheritance_merge, inheritance_refdicts={'pointers'}, mark_derived=True, transient=True, preserve_path_id=preserve_path_id, attrs=attrs, ) if (not stype.generic(ctx.env.schema) and isinstance(derived, s_sources.Source)): scls_pointers = stype.get_pointers(ctx.env.schema) derived_own_pointers = derived.get_pointers(ctx.env.schema) for pn, ptr in derived_own_pointers.items(ctx.env.schema): # This is a view of a view. Make sure query-level # computable expressions for pointers are carried over. src_ptr = scls_pointers.get(ctx.env.schema, pn) computable_data = ctx.source_map.get(src_ptr) if computable_data is not None: ctx.source_map[ptr] = computable_data if src_ptr in ctx.env.pointer_specified_info: ctx.env.pointer_derivation_map[src_ptr].append(ptr) else: raise TypeError("unsupported type in derive_view") ctx.view_nodes[derived.get_name(ctx.env.schema)] = derived if preserve_shape and stype in ctx.env.view_shapes: ctx.env.view_shapes[derived] = ctx.env.view_shapes[stype] ctx.env.created_schema_objects.add(derived) return derived