def compile_cast(t, expr, scope, **kwargs): op = expr.op() if isinstance(op.to, dtypes.Interval): if isinstance(op.arg.op(), ops.Literal): return interval(op.arg.op().value, op.to.unit) else: raise com.UnsupportedArgumentError( 'Casting to intervals is only supported for literals ' 'in the PySpark backend. {} not allowed.'.format(type(op.arg))) if isinstance(op.to, dtypes.Array): cast_type = ibis_array_dtype_to_spark_dtype(op.to) else: cast_type = ibis_dtype_to_spark_dtype(op.to) src_column = t.translate(op.arg, scope) return src_column.cast(cast_type)
def compile_elementwise_udf(t, expr, scope): op = expr.op() spark_output_type = ibis_dtype_to_spark_dtype(op._output_type) spark_udf = pandas_udf(op.func, spark_output_type, PandasUDFType.SCALAR) func_args = (t.translate(arg, scope) for arg in op.func_args) return spark_udf(*func_args)