def type_to_ql_typeref(t: s_obj.Object, *, _name=None, ctx: context.ContextLevel) -> qlast.TypeName: if not isinstance(t, s_abc.Collection): result = qlast.TypeName(name=_name, maintype=qlast.ObjectRef( module=t.get_name(ctx.env.schema).module, name=t.get_name(ctx.env.schema).name)) elif isinstance(t, s_abc.Tuple) and t.named: result = qlast.TypeName(name=_name, maintype=qlast.ObjectRef(name=t.schema_name), subtypes=[ type_to_ql_typeref(st, _name=sn, ctx=ctx) for sn, st in t.element_types.items() ]) else: result = qlast.TypeName(name=_name, maintype=qlast.ObjectRef(name=t.schema_name), subtypes=[ type_to_ql_typeref(st, ctx=ctx) for st in t.get_subtypes() ]) return result
def pg_type_from_object(schema: s_schema.Schema, obj: s_obj.Object, topbase: bool = False) -> typing.Tuple[str, ...]: if isinstance(obj, s_scalars.ScalarType): return pg_type_from_scalar(schema, obj, topbase=topbase) elif isinstance(obj, s_abc.Tuple) or (obj.is_type() and obj.is_anytuple()): return ('record', ) elif isinstance(obj, s_abc.Array): if obj.is_polymorphic(schema): return ('anyarray', ) else: tp = pg_type_from_object(schema, obj.element_type, topbase=topbase) if len(tp) == 1: return (tp[0] + '[]', ) else: return (tp[0], tp[1] + '[]') elif isinstance(obj, s_objtypes.ObjectType): return ('uuid', ) elif obj.is_type() and obj.is_any(): return ('anyelement', ) else: raise ValueError(f'could not determine PG type for {obj!r}')
def derive_view( scls: s_obj.Object, source: typing.Optional[s_nodes.Node]=None, target: typing.Optional[s_nodes.Node]=None, *qualifiers, derived_name: typing.Optional[sn.SchemaName]=None, derived_name_quals: typing.Optional[typing.Sequence[str]]=(), derived_name_base: typing.Optional[str]=None, is_insert: bool=False, is_update: bool=False, add_to_schema: bool=True, ctx: context.ContextLevel) -> s_obj.Object: if source is None: source = scls if derived_name is None and (ctx.derived_target_module or source is scls): derived_name = derive_view_name( scls=scls, derived_name_quals=derived_name_quals, derived_name_base=derived_name_base, ctx=ctx) if scls.generic(): derived = scls.derive( ctx.schema, source, target, *qualifiers, name=derived_name, mark_derived=True) else: # If this is already a derived class, reuse its name, # so that the correct storage relations are used in DML. if derived_name is None: derived_name = scls.name derived = scls.derive_copy( ctx.schema, source, target, *qualifiers, name=derived_name, attrs=dict(bases=[scls]), mark_derived=True) if isinstance(derived, s_sources.Source): for pn, ptr in derived.own_pointers.items(): # This is a view of a view. Make sure query-level # computable expressions for pointers are carried over. src_ptr = scls.pointers[pn] computable_data = ctx.source_map.get(src_ptr) if computable_data is not None: ctx.source_map[ptr] = computable_data if isinstance(derived, s_types.Type): if is_insert: vtype = s_types.ViewType.Insert elif is_update: vtype = s_types.ViewType.Update else: vtype = s_types.ViewType.Select derived.view_type = vtype if (add_to_schema and not isinstance(derived, s_types.Collection) and ctx.schema.get(derived.name, None) is None): ctx.schema.add(derived) if isinstance(derived, s_types.Type): ctx.view_nodes[derived.name] = derived return derived
def amend_empty_set_type(es: irast.EmptySet, t: s_obj.Object, schema) -> None: alias = es.path_id[-1].name.name scls_name = s_name.Name(module='__expr__', name=alias) scls = t.__class__(name=scls_name, bases=[t]) scls.acquire_ancestor_inheritance(schema) es.path_id = irast.PathId(scls) es.scls = t
def type_to_ql_typeref(t: s_obj.Object) -> qlast.TypeName: if not isinstance(t, s_types.Collection): result = qlast.TypeName( maintype=qlast.ObjectRef(module=t.name.module, name=t.name.name)) else: result = qlast.TypeName( maintype=qlast.ObjectRef(name=t.schema_name), subtypes=[type_to_ql_typeref(st) for st in t.get_subtypes()]) return result
def declare_view_from_schema(viewcls: s_obj.Object, *, ctx: context.ContextLevel) -> irast.Set: vc = ctx.env.schema_view_cache.get(viewcls) if vc is not None: return vc with ctx.detached() as subctx: subctx.expr_exposed = False view_expr = qlparser.parse(viewcls.get_expr(ctx.env.schema)) viewcls_name = viewcls.get_name(ctx.env.schema) declare_view(view_expr, alias=viewcls_name, fully_detached=True, ctx=subctx) vc = subctx.aliased_views[viewcls_name] ctx.env.schema_view_cache[viewcls] = vc ctx.source_map.update(subctx.source_map) ctx.aliased_views[viewcls_name] = subctx.aliased_views[viewcls_name] ctx.view_nodes[vc.get_name(ctx.env.schema)] = vc ctx.view_sets[vc] = subctx.view_sets[vc] return vc
def derive_view_name( scls: s_obj.Object, derived_name_quals: typing.Optional[typing.Sequence[str]]=(), derived_name_base: typing.Optional[str]=None, *, ctx: context.ContextLevel) -> sn.Name: if not derived_name_quals: derived_name_quals = (ctx.aliases.get('view'),) if not derived_name_base: derived_name_base = scls.shortname if ctx.derived_target_module: derived_name_module = ctx.derived_target_module else: derived_name_module = '__view__' derived_sname = scls.get_specialized_name( derived_name_base, *derived_name_quals) return sn.SchemaName(module=derived_name_module, name=derived_sname)
def range_from_queryset(set_ops: typing.Sequence[typing.Tuple[ str, pgast.BaseRelation]], stype: s_obj.Object, *, env: context.Environment) -> pgast.BaseRangeVar: if len(set_ops) > 1: # More than one class table, generate a UNION/EXCEPT clause. qry = pgast.SelectStmt(all=True, larg=set_ops[0][1]) for op, rarg in set_ops[1:]: qry.op, qry.rarg = op, rarg qry = pgast.SelectStmt(all=True, larg=qry) qry = qry.larg rvar = pgast.RangeSubselect( subquery=qry, alias=pgast.Alias(aliasname=env.aliases.get( stype.get_shortname(env.schema).name))) else: # Just one class table, so return it directly rvar = set_ops[0][1].from_clause[0] return rvar
def cast(node: pgast.Base, *, source_type: s_obj.Object, target_type: s_obj.Object, force: bool = False, env: context.Environment) -> pgast.Base: if source_type.name == target_type.name and not force: return node schema = env.schema real_t = schema.get('std::anyreal') int_t = schema.get('std::anyint') json_t = schema.get('std::json') str_t = schema.get('std::str') datetime_t = schema.get('std::datetime') bool_t = schema.get('std::bool') if isinstance(target_type, s_types.Collection): if target_type.schema_name == 'array': if source_type.issubclass(json_t): # If we are casting a jsonb array to array, we do the # following transformation: # EdgeQL: <array<T>>MAP_VALUE # SQL: # SELECT array_agg(j::T) # FROM jsonb_array_elements(MAP_VALUE) AS j inner_cast = cast(pgast.ColumnRef(name=['j']), source_type=source_type, target_type=target_type.element_type, env=env) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=pgast.FuncCall( name=('array_agg', ), args=[inner_cast])) ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall(name=('jsonb_array_elements', ), args=[node]) ], alias=pgast.Alias(aliasname='j')) ]) else: # EdgeQL: <array<int64>>['1', '2'] # to SQL: ARRAY['1', '2']::int[] elem_pgtype = pg_types.pg_type_from_object( schema, target_type.element_type, topbase=True) return pgast.TypeCast(arg=node, type_name=pgast.TypeName( name=elem_pgtype, array_bounds=[-1])) else: # `target_type` is not a collection. if (source_type.issubclass(datetime_t) and target_type.issubclass(str_t)): # Normalize datetime to text conversion to have the same # format as one would get by serializing to JSON. # # EdgeQL: <text><datetime>'2010-10-10'; # To SQL: trim(to_json('2010-01-01'::timestamptz)::text, '"') return pgast.FuncCall( name=('trim', ), args=[ pgast.TypeCast(arg=pgast.FuncCall(name=('to_json', ), args=[node]), type_name=pgast.TypeName(name=('text', ))), pgast.Constant(val='"') ]) elif source_type.issubclass(bool_t) and target_type.issubclass(int_t): # PostgreSQL 9.6 doesn't allow to cast 'boolean' to any integer # other than int32: # SELECT 'true'::boolean::bigint; # ERROR: cannot cast type boolean to bigint # So we transform EdgeQL: <int64>BOOL # to SQL: BOOL::int::<targetint> return pgast.TypeCast( arg=pgast.TypeCast(arg=node, type_name=pgast.TypeName(name=('int', ))), type_name=pgast.TypeName( name=pg_types.pg_type_from_scalar(schema, target_type))) elif source_type.issubclass(int_t) and target_type.issubclass(bool_t): # PostgreSQL 9.6 doesn't allow to cast any integer other # than int32 to 'boolean': # SELECT 1::bigint::boolean; # ERROR: cannot cast type bigint to boolea # So we transform EdgeQL: <boolean>INT # to SQL: (INT != 0) return astutils.new_binop(node, pgast.Constant(val=0), op=ast.ops.NE) elif source_type.issubclass(json_t): if (target_type.issubclass(real_t) or target_type.issubclass(bool_t)): # Simply cast to text and the to the target type. return cast(cast(node, source_type=source_type, target_type=str_t, env=env), source_type=str_t, target_type=target_type, env=env) elif target_type.issubclass(str_t): # It's not possible to cast jsonb string to text directly, # so we do a trick: # EdgeQL: <str>JSONB_VAL # SQL: array_to_json(ARRAY[JSONB_VAL])->>0 return astutils.new_binop(pgast.FuncCall( name=('array_to_json', ), args=[pgast.ArrayExpr(elements=[node])]), pgast.Constant(val=0), op='->>') elif target_type.issubclass(json_t): return pgast.TypeCast( arg=node, type_name=pgast.TypeName(name=('jsonb', ))) else: const_type = pg_types.pg_type_from_object(schema, target_type, topbase=True) return pgast.TypeCast(arg=node, type_name=pgast.TypeName(name=const_type)) raise RuntimeError( f'could not cast {source_type.name} to {target_type.name}')
def derive_view(stype: s_obj.Object, source: typing.Optional[s_nodes.Node] = None, target: typing.Optional[s_nodes.Node] = None, *qualifiers, derived_name: typing.Optional[sn.SchemaName] = None, derived_name_quals: typing.Optional[typing.Sequence[str]] = (), derived_name_base: typing.Optional[str] = None, is_insert: bool = False, is_update: bool = False, attrs: typing.Optional[dict] = None, ctx: context.ContextLevel) -> s_obj.Object: if source is None: source = stype if derived_name is None and (ctx.derived_target_module or source is stype): derived_name = derive_view_name(stype=stype, derived_name_quals=derived_name_quals, derived_name_base=derived_name_base, ctx=ctx) if isinstance(stype, s_abc.Type): if is_insert: vtype = s_types.ViewType.Insert elif is_update: vtype = s_types.ViewType.Update else: vtype = s_types.ViewType.Select if attrs is None: attrs = {} else: attrs = dict(attrs) attrs['view_type'] = vtype if isinstance(stype, s_abc.Collection): ctx.env.schema, derived = stype.derive_subtype(ctx.env.schema, name=derived_name) else: if stype.get_name(ctx.env.schema) == derived_name: qualifiers = list(qualifiers) qualifiers.append(ctx.aliases.get('d')) ctx.env.schema, derived = stype.derive(ctx.env.schema, source, target, *qualifiers, name=derived_name, mark_derived=True, attrs=attrs) if not stype.generic(ctx.env.schema): if isinstance(derived, s_sources.Source): scls_pointers = stype.get_pointers(ctx.env.schema) derived_own_pointers = derived.get_own_pointers(ctx.env.schema) for pn, ptr in derived_own_pointers.items(ctx.env.schema): # This is a view of a view. Make sure query-level # computable expressions for pointers are carried over. src_ptr = scls_pointers.get(ctx.env.schema, pn) computable_data = ctx.source_map.get(src_ptr) if computable_data is not None: ctx.source_map[ptr] = computable_data if isinstance(derived, s_abc.Type): ctx.view_nodes[derived.get_name(ctx.env.schema)] = derived return derived