def unnamed_tuple_as_json_object(expr, *, styperef, env): has_colls = any(irtyputils.is_collection(st) for st in styperef.subtypes) if not has_colls: # No nested collections, take the fast path. return pgast.FuncCall( name=('edgedb',) + _get_json_func('row_to_array', env=env), args=[expr], null_safe=True, ser_safe=True, nullable=expr.nullable) vals = [] for el_idx, el_type in enumerate(styperef.subtypes): type_sentinel = pgast.TypeCast( arg=pgast.NullConstant(), type_name=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type) ) ) val = pgast.FuncCall( name=('edgedb', 'row_getattr_by_num'), args=[ expr, pgast.NumericConstant(val=str(el_idx + 1)), type_sentinel ]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) return pgast.FuncCall( name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable)
def unnamed_tuple_as_json_object(expr, *, styperef, env): vals = [] if styperef.in_schema: for el_idx, el_type in enumerate(styperef.subtypes): val = pgast.Indirection( arg=expr, indirection=[ pgast.ColumnRef(name=[str(el_idx)], ), ], ) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) return pgast.FuncCall(name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable) else: coldeflist = [] for el_idx, el_type in enumerate(styperef.subtypes): coldeflist.append( pgast.ColumnDef( name=str(el_idx), typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[str(el_idx)]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) res = pgast.FuncCall(name=_get_json_func('build_array', env=env), args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=res, ), ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall( name=('unnest', ), args=[pgast.ArrayExpr(elements=[expr], )], coldeflist=coldeflist, ) ]) ])
def compile_Introspect(expr: qlast.Introspect, *, ctx: context.ContextLevel) -> irast.Set: typeref = typegen.ql_typeexpr_to_ir_typeref(expr.type, ctx=ctx) if typeref.material_type and not irtyputils.is_object(typeref): typeref = typeref.material_type if typeref.is_opaque_union: typeref = typegen.type_to_typeref( typing.cast( s_objtypes.ObjectType, ctx.env.schema.get('std::BaseObject'), ), env=ctx.env, ) if irtyputils.is_view(typeref): raise errors.QueryError(f'cannot introspect transient type variant', context=expr.type.context) if irtyputils.is_collection(typeref): raise errors.QueryError(f'cannot introspect collection types', context=expr.type.context) if irtyputils.is_generic(typeref): raise errors.QueryError(f'cannot introspect generic types', context=expr.type.context) return setgen.ensure_set(irast.TypeIntrospection(typeref=typeref), ctx=ctx)
def named_tuple_as_json_object(expr, *, styperef, env): keyvals = [] for el_idx, el_type in enumerate(styperef.subtypes): keyvals.append(pgast.StringConstant(val=el_type.element_name)) type_sentinel = pgast.TypeCast( arg=pgast.NullConstant(), type_name=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type) ) ) val = pgast.FuncCall( name=('edgedb', 'row_getattr_by_num'), args=[ expr, pgast.NumericConstant(val=str(el_idx + 1)), type_sentinel ]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) return pgast.FuncCall( name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable)
def compile_Introspect(expr: qlast.Introspect, *, ctx: context.ContextLevel) -> irast.Base: typeref = typegen.ql_typeref_to_ir_typeref(expr.type, ctx=ctx) if typeref.material_type and not irtyputils.is_object(typeref): typeref = typeref.material_type if irtyputils.is_view(typeref): raise errors.QueryError(f'cannot introspect views', context=expr.type.context) if irtyputils.is_collection(typeref): raise errors.QueryError(f'cannot introspect collection types', context=expr.type.context) if irtyputils.is_generic(typeref): raise errors.QueryError(f'cannot introspect generic types', context=expr.type.context) return irast.TypeIntrospection(typeref=typeref)
def serialize_expr_to_json(expr: pgast.BaseExpr, *, styperef: irast.TypeRef, nested: bool = False, env: context.Environment) -> pgast.BaseExpr: val: pgast.BaseExpr if isinstance(expr, pgast.TupleVar): val = tuple_var_as_json_object(expr, styperef=styperef, env=env) elif isinstance(expr, (pgast.RowExpr, pgast.ImplicitRowExpr)): val = _build_json( 'build_array', args=expr.args, null_safe=True, ser_safe=True, env=env, ) elif irtyputils.is_collection(styperef) and not expr.ser_safe: val = coll_as_json_object(expr, styperef=styperef, env=env) # TODO: We'll probably want to generalize this to other custom JSON # casts once they exist. elif (irtyputils.is_bytes(styperef) and not expr.ser_safe): cast_name = s_casts.get_cast_fullname_from_names( 'std', 'std::bytes', 'std::json') val = pgast.FuncCall(name=common.get_cast_backend_name( cast_name, aspect='function'), args=[expr], null_safe=True, ser_safe=True) elif not nested: val = pgast.FuncCall(name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True) else: val = expr return val
def array_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: el_type = styperef.subtypes[0] if irtyputils.is_tuple(el_type): coldeflist = [] json_args: List[pgast.BaseExpr] = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = st.element_name json_args.append(pgast.StringConstant(val=st.element_name)) else: colname = str(i) val: pgast.BaseExpr = pgast.ColumnRef(name=[colname]) if irtyputils.is_collection(st): val = coll_as_json_object(val, styperef=st, env=env) json_args.append(val) if not irtyputils.is_persistent_tuple(el_type): # Column definition list is only allowed for functions # returning "record", i.e. an anonymous tuple, which # would not be the case for schema-persistent tuple types. coldeflist.append( pgast.ColumnDef( name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st) ) ) ) if is_named: json_func = _get_json_func('build_object', env=env) else: json_func = _get_json_func('build_array', env=env) return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr( args=[ pgast.FuncCall( name=_get_json_func('agg', env=env), args=[ pgast.FuncCall( name=json_func, args=json_args, ) ] ), pgast.StringConstant(val='[]'), ] ), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction( alias=pgast.Alias( aliasname=env.aliases.get('q'), ), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest',), args=[expr], coldeflist=coldeflist, ) ] ) ] ) else: return pgast.FuncCall( name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)
def named_tuple_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: keyvals: List[pgast.BaseExpr] = [] if irtyputils.is_persistent_tuple(styperef): for el_type in styperef.subtypes: keyvals.append(pgast.StringConstant(val=el_type.element_name)) val: pgast.BaseExpr = pgast.Indirection( arg=expr, indirection=[ pgast.ColumnRef( name=[el_type.element_name] ) ] ) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) return pgast.FuncCall( name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable) else: coldeflist = [] for el_type in styperef.subtypes: keyvals.append(pgast.StringConstant(val=el_type.element_name)) coldeflist.append(pgast.ColumnDef( name=el_type.element_name, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[el_type.element_name]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) keyvals.append(val) res = pgast.FuncCall( name=_get_json_func('build_object', env=env), args=keyvals, null_safe=True, ser_safe=True, nullable=expr.nullable) return pgast.SelectStmt( target_list=[ pgast.ResTarget( val=res, ), ], from_clause=[ pgast.RangeFunction( functions=[ pgast.FuncCall( name=('unnest',), args=[ pgast.ArrayExpr( elements=[expr], ) ], coldeflist=coldeflist, ) ] ) ] )
def array_as_json_object(expr, *, styperef, env): el_type = styperef.subtypes[0] if irtyputils.is_tuple(el_type): coldeflist = [] json_args = [] is_named = any(st.element_name for st in el_type.subtypes) for i, st in enumerate(el_type.subtypes): if is_named: colname = env.aliases.get(st.element_name) json_args.append(pgast.StringConstant(val=st.element_name)) else: colname = env.aliases.get(str(i)) val = pgast.ColumnRef(name=[colname]) if irtyputils.is_collection(st): val = coll_as_json_object(val, styperef=st, env=env) json_args.append(val) coldeflist.append( pgast.ColumnDef(name=colname, typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(st)))) if is_named: json_func = _get_json_func('build_object', env=env) else: json_func = _get_json_func('build_array', env=env) return pgast.SelectStmt(target_list=[ pgast.ResTarget( val=pgast.CoalesceExpr(args=[ pgast.FuncCall(name=_get_json_func('agg', env=env), args=[ pgast.FuncCall( name=json_func, args=json_args, ) ]), pgast.StringConstant(val='[]'), ]), ser_safe=True, ) ], from_clause=[ pgast.RangeFunction( alias=pgast.Alias( aliasname=env.aliases.get('q'), ), is_rowsfrom=True, functions=[ pgast.FuncCall( name=('unnest', ), args=[expr], coldeflist=coldeflist, ) ]) ]) else: return pgast.FuncCall(name=_get_json_func('to', env=env), args=[expr], null_safe=True, ser_safe=True)
def unnamed_tuple_as_json_object( expr: pgast.BaseExpr, *, styperef: irast.TypeRef, env: context.Environment, ) -> pgast.BaseExpr: vals: List[pgast.BaseExpr] = [] if irtyputils.is_persistent_tuple(styperef): for el_idx, el_type in enumerate(styperef.subtypes): val: pgast.BaseExpr = pgast.Indirection( arg=expr, indirection=[ pgast.ColumnRef(name=[str(el_idx)], ), ], ) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) return _build_json( 'build_array', args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable, env=env, ) else: coldeflist = [] for el_idx, el_type in enumerate(styperef.subtypes): coldeflist.append( pgast.ColumnDef( name=str(el_idx), typename=pgast.TypeName( name=pgtypes.pg_type_from_ir_typeref(el_type), ), )) val = pgast.ColumnRef(name=[str(el_idx)]) if irtyputils.is_collection(el_type): val = coll_as_json_object(val, styperef=el_type, env=env) vals.append(val) res = _build_json( 'build_array', args=vals, null_safe=True, ser_safe=True, nullable=expr.nullable, env=env, ) return pgast.SelectStmt( target_list=[ pgast.ResTarget(val=res, ), ], from_clause=[ pgast.RangeFunction(functions=[ pgast.FuncCall( name=('unnest', ), args=[pgast.ArrayExpr(elements=[expr], )], coldeflist=coldeflist, ) ]) ])