def trigger(self, processingTime=None): """Set the trigger for the stream query. If this is not set it will run the query as fast as possible, which is equivalent to setting the trigger to ``processingTime='0 seconds'``. .. note:: Experimental. :param processingTime: a processing time interval as a string, e.g. '5 seconds', '1 minute'. >>> # trigger the query for execution every 5 seconds >>> writer = sdf.writeStream.trigger(processingTime='5 seconds') """ from pyspark.sql.streaming import ProcessingTime trigger = None if processingTime is not None: if type(processingTime) != str or len(processingTime.strip()) == 0: raise ValueError( 'The processing time must be a non empty string. Got: %s' % processingTime) trigger = ProcessingTime(processingTime) if trigger is None: raise ValueError( 'A trigger was not provided. Supported triggers: processingTime.' ) self._jwrite = self._jwrite.trigger( trigger._to_java_trigger(self._spark)) return self
def trigger(self, processingTime=None): """Set the trigger for the stream query. If this is not set it will run the query as fast as possible, which is equivalent to setting the trigger to ``processingTime='0 seconds'``. :param processingTime: a processing time interval as a string, e.g. '5 seconds', '1 minute'. >>> # trigger the query for execution every 5 seconds >>> writer = sdf.write.trigger(processingTime='5 seconds') """ from pyspark.sql.streaming import ProcessingTime trigger = None if processingTime is not None: if type(processingTime) != str or len(processingTime.strip()) == 0: raise ValueError('The processing time must be a non empty string. Got: %s' % processingTime) trigger = ProcessingTime(processingTime) if trigger is None: raise ValueError('A trigger was not provided. Supported triggers: processingTime.') self._jwrite = self._jwrite.trigger(trigger._to_java_trigger(self._sqlContext)) return self