def start(self, env): import params install_flink() self.configure(env) Execute(params.bin_dir + '/historyserver.sh start') Execute( "echo `ps aux|grep 'org.apache.flink.runtime.webmonitor.history' | grep -v grep | awk '{print $2}'` > " + self.pid_file)
def start(self, env): import params install_flink() self.configure(env) Execute(params.bin_dir + '/taskmanager.sh start') Execute( "echo `ps aux|grep 'org.apache.flink.runtime.taskexecutor' | grep -v grep | awk '{print $2}'` > " + self.pid_file)
def start(self, env): import params import status_params install_flink() self.configure(env) params.HdfsResource( params.flink_dir, type="directory", action="create_on_execute", owner=params.flink_user, mode=0755) params.HdfsResource( params.flink_checkpoints_dir, type="directory", action="create_on_execute", owner=params.flink_user, mode=0755) params.HdfsResource( params.flink_recovery_dir, type="directory", action="create_on_execute", owner=params.flink_user, mode=0755) params.HdfsResource(None, action="execute") if params.security_enabled: flink_kinit_cmd = format( "{kinit_path_local} -kt {flink_kerberos_keytab} {flink_kerberos_principal}; ") Execute(flink_kinit_cmd, user=params.flink_user) Execute('echo bin dir ' + params.bin_dir) Execute('echo pid file ' + status_params.flink_pid_file) cmd = format( "export HADOOP_CONF_DIR={hadoop_conf_dir}; {bin_dir}/yarn-session.sh -n {flink_numcontainers} -s {flink_numberoftaskslots} -jm {flink_jobmanager_memory} -tm {flink_container_memory} -qu {flink_queue} -nm {flink_appname} -d") if params.flink_streaming: cmd = cmd + ' -st ' Execute(cmd + format(" >> {flink_log_file}"), user=params.flink_user) Execute( params.hadoop_bin_dir + "/yarn application -list 2>/dev/null | awk '/" + params.flink_appname + "/ {print $1}' | head -n1 > " + status_params.flink_pid_file, user=params.flink_user)
def install(self, env): install_flink() self.configure(env)
def install(self, env): install_flink(first=True) self.configure(env, True)