def compile(self, expr, timecontext=None, params=None, *args, **kwargs): """Compile an ibis expression to a PySpark DataFrame object """ if timecontext is not None: session_timezone = self._session.conf.get( 'spark.sql.session.timeZone' ) # Since spark use session timezone for tz-naive timestamps # we localize tz-naive context here to match that behavior timecontext = localize_context( canonicalize_context(timecontext), session_timezone ) # Insert params in scope if params is None: scope = Scope() else: scope = Scope( {param.op(): raw_value for param, raw_value in params.items()}, timecontext, ) return self.translator.translate( expr, scope=scope, timecontext=timecontext )
def main_execute( expr, params=None, scope=None, timecontext: Optional[TimeContext] = None, aggcontext=None, **kwargs, ): """Execute an expression against data that are bound to it. If no data are bound, raise an Exception. Parameters ---------- expr : ibis.expr.types.Expr The expression to execute params : Mapping[ibis.expr.types.Expr, object] The data that an unbound parameter in `expr` maps to scope : Mapping[ibis.expr.operations.Node, object] Additional scope, mapping ibis operations to data timecontext : Optional[TimeContext] timecontext needed for execution aggcontext : Optional[ibis.backends.pandas.aggcontext.AggregationContext] An object indicating how to compute aggregations. For example, a rolling mean needs to be computed differently than the mean of a column. kwargs : Dict[str, object] Additional arguments that can potentially be used by individual node execution Returns ------- result : Union[ pandas.Series, pandas.DataFrame, ibis.backends.pandas.core.simple_types ] Raises ------ ValueError * If no data are bound to the input expression """ if scope is None: scope = Scope() if timecontext is not None: # convert timecontext to datetime type, if time strings are provided timecontext = canonicalize_context(timecontext) if params is None: params = {} # TODO: make expresions hashable so that we can get rid of these .op() # calls everywhere params = {k.op() if hasattr(k, 'op') else k: v for k, v in params.items()} scope = scope.merge_scope(Scope(params, timecontext)) return execute_with_scope( expr, scope, timecontext=timecontext, aggcontext=aggcontext, **kwargs, )
def compile(self, expr, timecontext=None, params=None, *args, **kwargs): """Compile an ibis expression to a PySpark DataFrame object """ if timecontext is not None: timecontext = canonicalize_context(timecontext) # Insert params in scope if params is None: scope = Scope() else: scope = Scope( {param.op(): raw_value for param, raw_value in params.items()}, timecontext, ) return self.translator.translate( expr, scope=scope, timecontext=timecontext )