def default_job_server(self, portable_options): # TODO Provide a way to specify a container Docker URL # https://issues.apache.org/jira/browse/BEAM-6328 if not self._dockerized_job_server: self._dockerized_job_server = job_server.StopOnExitJobServer( job_server.DockerizedJobServer()) return self._dockerized_job_server
def default_job_server(self, options): spark_options = options.view_as(pipeline_options.SparkRunnerOptions) if spark_options.spark_submit_uber_jar: if not spark_options.spark_rest_url: raise ValueError('Option spark_rest_url must be set.') return spark_uber_jar_job_server.SparkUberJarJobServer( spark_options.spark_rest_url, options) return job_server.StopOnExitJobServer(SparkJarJobServer(options))
def default_job_server(self, options): flink_master = options.view_as(FlinkRunnerOptions).flink_master if flink_master == '[local]' or sys.version_info < (3, 6): # TOOD(BEAM-8396): Also default to LOOPBACK for [local]. return job_server.StopOnExitJobServer(FlinkJarJobServer(options)) else: return flink_uber_jar_job_server.FlinkUberJarJobServer( flink_master)
def default_job_server(self, options): flink_master = options.view_as(FlinkRunnerOptions).flink_master if flink_master == '[local]' or sys.version_info < (3, 6): portable_options = options.view_as(pipeline_options.PortableOptions) if flink_master == '[local]' and not portable_options.environment_type: portable_options.environment_type == 'LOOPBACK' return job_server.StopOnExitJobServer(FlinkJarJobServer(options)) else: return flink_uber_jar_job_server.FlinkUberJarJobServer(flink_master)
def default_job_server(self, options): flink_master = self.add_http_scheme( options.view_as(FlinkRunnerOptions).flink_master) options.view_as(FlinkRunnerOptions).flink_master = flink_master if flink_master in MAGIC_HOST_NAMES or sys.version_info < (3, 6): return job_server.StopOnExitJobServer(FlinkJarJobServer(options)) else: # This has to be changed [auto], otherwise we will attempt to submit a # the pipeline remotely on the Flink JobMaster which will _fail_. # DO NOT CHANGE the following line, unless you have tested this. options.view_as(FlinkRunnerOptions).flink_master = '[auto]' return flink_uber_jar_job_server.FlinkUberJarJobServer(flink_master)
def default_job_server(self, options): spark_options = options.view_as(pipeline_options.SparkRunnerOptions) if spark_options.spark_submit_uber_jar: if sys.version_info < (3, 6): raise ValueError( 'spark_submit_uber_jar requires Python 3.6+, current version %s' % sys.version) if not spark_options.spark_rest_url: raise ValueError('Option spark_rest_url must be set.') return spark_uber_jar_job_server.SparkUberJarJobServer( spark_options.spark_rest_url, options) return job_server.StopOnExitJobServer(SparkJarJobServer(options))
def default_job_server(self, options): spark_options = options.view_as(pipeline_options.SparkRunnerOptions) if spark_options.spark_submit_uber_jar: if not spark_options.spark_rest_url: raise ValueError('Option spark_rest_url must be set.') return spark_uber_jar_job_server.SparkUberJarJobServer( spark_options.spark_rest_url, options) # Use Java job server by default. # Only SparkRunnerOptions and JobServerOptions affect job server # configuration, so concat those as the cache key. job_server_options = options.view_as(pipeline_options.JobServerOptions) options_str = str(spark_options) + str(job_server_options) if not options_str in JOB_SERVER_CACHE: JOB_SERVER_CACHE[options_str] = job_server.StopOnExitJobServer( SparkJarJobServer(options)) return JOB_SERVER_CACHE[options_str]
def default_job_server(self, options): flink_options = options.view_as(pipeline_options.FlinkRunnerOptions) flink_master = self.add_http_scheme(flink_options.flink_master) flink_options.flink_master = flink_master if (flink_options.flink_submit_uber_jar and flink_master not in MAGIC_HOST_NAMES): if sys.version_info < (3, 6): raise ValueError( 'flink_submit_uber_jar requires Python 3.6+, current version %s' % sys.version) # This has to be changed [auto], otherwise we will attempt to submit a # the pipeline remotely on the Flink JobMaster which will _fail_. # DO NOT CHANGE the following line, unless you have tested this. flink_options.flink_master = '[auto]' return flink_uber_jar_job_server.FlinkUberJarJobServer( flink_master, options) else: return job_server.StopOnExitJobServer(FlinkJarJobServer(options))
def default_job_server(self, options): # TODO(BEAM-8139) submit a Spark jar to a cluster return job_server.StopOnExitJobServer(SparkJarJobServer(options))
def default_job_server(self, options): return job_server.StopOnExitJobServer(FlinkJarJobServer(options))