def run_experiment(args):

    if experiment.PARAMS.experiment_type == 'batch':

        batch_process.run_pipeline(
            inference_type=experiment.PARAMS.inference_type,
            sample_size=experiment.PARAMS.batch_size,
            sink_location=experiment.PARAMS.sink_dir,
            runner=experiment.PARAMS.runner,
            args=args)

    elif experiment.PARAMS.experiment_type == 'batch-predict':

        batch_process.run_pipeline_with_batch_predict(
            sample_size=experiment.PARAMS.batch_sample_size,
            sink_location=experiment.PARAMS.sink_dir,
            runner=experiment.PARAMS.runner,
            args=args)

    elif experiment.PARAMS.experiment_type == 'stream':

        stream_process.run_pipeline(
            inference_type=experiment.PARAMS.inference_type,
            project=experiment.PARAMS.project_id,
            pubsub_topic=experiment.PARAMS.pubsub_topic,
            pubsub_subscription=experiment.PARAMS.pubsub_subscription,
            bq_dataset=experiment.PARAMS.bq_dataset,
            bq_table=experiment.PARAMS.bq_table,
            runner=experiment.PARAMS.runner,
            args=args)

    elif experiment.PARAMS.experiment_type == 'stream-m-batches':

        stream_process.run_pipeline_with_micro_batches(
            inference_type=experiment.PARAMS.inference_type,
            project=experiment.PARAMS.project_id,
            pubsub_topic=experiment.PARAMS.pubsub_topic,
            pubsub_subscription=experiment.PARAMS.pubsub_subscription,
            bq_dataset=experiment.PARAMS.bq_dataset,
            bq_table=experiment.PARAMS.bq_table,
            window_size=experiment.PARAMS.window_size,
            runner=experiment.PARAMS.runner,
            args=args)
def run_experiment(args):

    if experiment.PARAMS.experiment_type == 'batch':

        batch_process.run_pipeline(inference_type=experiment.PARAMS.inference_type,
                                   sample_size=experiment.PARAMS.batch_size,
                                   sink_location=experiment.PARAMS.sink_dir,
                                   runner=experiment.PARAMS.runner,
                                   args=args)

    elif experiment.PARAMS.experiment_type == 'batch-predict':

        batch_process.run_pipeline_with_batch_predict(sample_size=experiment.PARAMS.batch_sample_size,
                                                      sink_location=experiment.PARAMS.sink_dir,
                                                      runner=experiment.PARAMS.runner,
                                                      args=args
        )

    elif experiment.PARAMS.experiment_type == 'stream':

        stream_process.run_pipeline(inference_type=experiment.PARAMS.inference_type,
                                    project=experiment.PARAMS.project_id,
                                    pubsub_topic=experiment.PARAMS.pubsub_topic,
                                    pubsub_subscription=experiment.PARAMS.pubsub_subscription,
                                    bq_dataset=experiment.PARAMS.bq_dataset,
                                    bq_table=experiment.PARAMS.bq_table,
                                    runner=experiment.PARAMS.runner,
                                    args=args)

    elif experiment.PARAMS.experiment_type == 'stream-m-batches':

        stream_process.run_pipeline_with_micro_batches(inference_type=experiment.PARAMS.inference_type,
                                    project=experiment.PARAMS.project_id,
                                    pubsub_topic=experiment.PARAMS.pubsub_topic,
                                    pubsub_subscription=experiment.PARAMS.pubsub_subscription,
                                    bq_dataset=experiment.PARAMS.bq_dataset,
                                    bq_table=experiment.PARAMS.bq_table,
                                    window_size=experiment.PARAMS.window_size,
                                    runner=experiment.PARAMS.runner,
                                    args=args)
Esempio n. 3
0
        batch_process.run_pipeline(inference_type=INFERENCE_TYPE,
                                   sample_size=SAMPLE_SIZE,
                                   sink_location=sink_location,
                                   runner=RUNNER,
                                   args=args)

    elif EXPERIMENT == 'batch-predict':
        batch_process.run_pipeline_with_batch_predict(
                               sample_size=SAMPLE_SIZE,
                               sink_location=sink_location,
                               runner=RUNNER,
                               args=args)

    elif EXPERIMENT == 'stream':
        stream_process.run_pipeline(inference_type=INFERENCE_TYPE,
                                    pubsub_topic=pubsub_topic,
                                    runner='DataflowRunner',
                                    args=args)

    elif EXPERIMENT == 'stream-m-batches':
        stream_process.run_pipeline_with_micro_batches(inference_type=INFERENCE_TYPE,
                                    pubsub_topic=pubsub_topic,
                                    runner='DataflowRunner',
                                    args=args)

    time_end = datetime.utcnow()
    print(".......................................")
    print("Job finished at {}".format(time_end.strftime("%H:%M:%S")))
    print("")
    time_elapsed = time_end - time_start
    print("Job elapsed time: {} seconds".format(time_elapsed.total_seconds()))