def _literal(_, expr): dtype = expr.type() sqla_type = to_sqla_type(dtype) op = expr.op() value = op.value if isinstance(dtype, dt.Interval): return sa.text(f"INTERVAL '{value} {dtype.resolution}'") elif isinstance(dtype, dt.Set) or (isinstance(value, collections.abc.Sequence) and not isinstance(value, str)): return sa.cast(sa.func.list_value(*value), sqla_type) elif isinstance(value, np.ndarray): return sa.cast(sa.func.list_value(*value.tolist()), sqla_type) elif isinstance(value, collections.abc.Mapping): if isinstance(dtype, dt.Struct): placeholders = ", ".join(f"{key!r}: :v{i}" for i, key in enumerate(value.keys())) return sa.text(f"{{{placeholders}}}").bindparams( *(sa.bindparam(f"v{i:d}", val) for i, val in enumerate(value.values()))) raise NotImplementedError( f"Ibis dtype `{dtype}` with mapping type " f"`{type(value).__name__}` isn't yet supported with the duckdb " "backend") return sa.cast(sa.literal(value), sqla_type)
def _struct_field(t, expr): op = expr.op() return sa.func.struct_extract( t.translate(op.arg), sa.text(repr(op.field)), type_=to_sqla_type(expr.type()), )
def _ibis_to_pg_sa_type(ibis_type): """Map an ibis DataType to a Postgres-compatible sqlalchemy type""" return to_sqla_type(ibis_type, type_map=PostgreSQLExprTranslator._type_map)
def _array_column(t, expr): (arg, ) = expr.op().args sqla_type = to_sqla_type(expr.type()) return sa.cast(sa.func.list_value(*map(t.translate, arg)), sqla_type)