Exemple #1
0
 def _spark(
     cls,
     execution_engine: "SqlAlchemyExecutionEngine",
     metric_domain_kwargs: Dict,
     metric_value_kwargs: Dict,
     metrics: Dict[Tuple, Any],
     runtime_configuration: Dict,
 ):
     return F.count(F.lit(1)), metric_domain_kwargs, dict()
Exemple #2
0
 def _spark(cls, column, **kwargs):
     return F.count(F.lit(1)).over(Window.partitionBy(column)) <= 1
Exemple #3
0
 def _spark(cls, column_list, **kwargs):
     column_names = column_list.columns
     row_wise_cond = (F.count(F.lit(1)).over(
         Window.partitionBy(F.struct(*column_names))) <= 1)
     return row_wise_cond