def ConfigureJob(messages, job, files_by_type, logging_config, args):
        """Populates the prestoJob member of the given job."""

        presto_job = messages.PrestoJob(
            continueOnFailure=args.continue_on_failure,
            queryFileUri=files_by_type['file'],
            loggingConfig=logging_config)

        if args.queries:
            presto_job.queryList = messages.QueryList(queries=args.queries)
        if args.query_output_format:
            presto_job.outputFormat = args.query_output_format
        if args.client_tags:
            presto_job.clientTags = args.client_tags

        job_properties = job_util.BuildJobProperties(args.properties,
                                                     args.properties_file)
        if job_properties:
            # Sort properties to ensure tests comparing messages not fail on ordering.
            presto_job.properties = encoding.DictToAdditionalPropertyMessage(
                job_properties,
                messages.PrestoJob.PropertiesValue,
                sort_items=True)

        job.prestoJob = presto_job
    def ConfigureJob(messages, job, files_by_type, logging_config, args):
        """Populates the sparkRJob member of the given job."""
        spark_r_job = messages.SparkRJob(args=args.job_args or [],
                                         archiveUris=files_by_type['archives'],
                                         fileUris=files_by_type['files'],
                                         mainRFileUri=files_by_type['r_file'],
                                         loggingConfig=logging_config)

        job_properties = job_util.BuildJobProperties(args.properties,
                                                     args.properties_file)
        if job_properties:
            spark_r_job.properties = encoding.DictToMessage(
                job_properties, messages.SparkRJob.PropertiesValue)

        job.sparkRJob = spark_r_job
    def ConfigureJob(messages, job, files_by_type, logging_config, args):
        """Populates the hadoopJob member of the given job."""
        hadoop_job = messages.HadoopJob(
            args=args.job_args or [],
            archiveUris=files_by_type['archives'],
            fileUris=files_by_type['files'],
            jarFileUris=files_by_type['jars'],
            mainClass=args.main_class,
            mainJarFileUri=files_by_type['main_jar'],
            loggingConfig=logging_config)

        job_properties = job_util.BuildJobProperties(args.properties,
                                                     args.properties_file)
        if job_properties:
            # Sort properties to ensure tests comparing messages not fail on ordering.
            hadoop_job.properties = encoding.DictToAdditionalPropertyMessage(
                job_properties,
                messages.HadoopJob.PropertiesValue,
                sort_items=True)

        job.hadoopJob = hadoop_job
    def ConfigureJob(messages, job, files_by_type, logging_config, args):
        """Populates the pysparkJob member of the given job."""

        pyspark_job = messages.PySparkJob(
            args=args.job_args or [],
            archiveUris=files_by_type['archives'],
            fileUris=files_by_type['files'],
            jarFileUris=files_by_type['jars'],
            pythonFileUris=files_by_type['py_files'],
            mainPythonFileUri=files_by_type['py_file'],
            loggingConfig=logging_config)

        job_properties = job_util.BuildJobProperties(args.properties,
                                                     args.properties_file)
        if job_properties:
            # Sort properties to ensure tests comparing messages not fail on ordering.
            pyspark_job.properties = encoding.DictToAdditionalPropertyMessage(
                job_properties,
                messages.PySparkJob.PropertiesValue,
                sort_items=True)

        job.pysparkJob = pyspark_job
    def ConfigureJob(messages, job, files_by_type, args):
        """Populates the hiveJob member of the given job."""

        hive_job = messages.HiveJob(continueOnFailure=args.continue_on_failure,
                                    jarFileUris=files_by_type['jars'],
                                    queryFileUri=files_by_type['file'])

        if args.queries:
            hive_job.queryList = messages.QueryList(queries=args.queries)
        if args.params:
            hive_job.scriptVariables = encoding.DictToAdditionalPropertyMessage(
                args.params, messages.HiveJob.ScriptVariablesValue)

        job_properties = job_util.BuildJobProperties(args.properties,
                                                     args.properties_file)
        if job_properties:
            # Sort properties to ensure tests comparing messages not fail on ordering.
            hive_job.properties = encoding.DictToAdditionalPropertyMessage(
                job_properties,
                messages.HiveJob.PropertiesValue,
                sort_items=True)

        job.hiveJob = hive_job
예제 #6
0
    def ConfigureJob(messages, job, files_by_type, logging_config, args):
        """Populates the sparkSqlJob member of the given job."""

        spark_sql_job = messages.SparkSqlJob(
            jarFileUris=files_by_type['jars'],
            queryFileUri=files_by_type['file'],
            loggingConfig=logging_config)

        if args.queries:
            spark_sql_job.queryList = messages.QueryList(queries=args.queries)
        if args.params:
            spark_sql_job.scriptVariables = encoding.DictToAdditionalPropertyMessage(
                args.params, messages.SparkSqlJob.ScriptVariablesValue)

        job_properties = job_util.BuildJobProperties(args.properties,
                                                     args.properties_file)
        if job_properties:
            # Sort properties to ensure tests comparing messages not fail on ordering.
            spark_sql_job.properties = encoding.DictToAdditionalPropertyMessage(
                job_properties,
                messages.SparkSqlJob.PropertiesValue,
                sort_items=True)

        job.sparkSqlJob = spark_sql_job