def install(self, env): import params env.set_params(params) self.install_packages(env) install_hadoop()
def start(self, env): import params install_hadoop() env.set_params(params) self.configure(env) service(action="start", name="journalnode", user=params.hdfs_user, create_pid_dir=True, create_log_dir=True)
def start(self, env): import params env.set_params(params) install_hadoop() self.configure(env) Directory(params.ramdisk + '/hdfs/data', create_parents=True, mode=0751, owner=params.hdfs_user, group=params.user_group) datanode(action="start")
def start(self, env): import params env.set_params(params) install_hadoop() self.configure(env) hdfs_binary = self.get_hdfs_binary() namenode( action="start", hdfs_binary=hdfs_binary, upgrade_suspended=params.upgrade_suspended, env=env)
def start_static(env): import params install_hadoop() env.set_params(params) ZkfcSlave.configure_static(env) Directory(params.hadoop_pid_dir_prefix, mode=0755, owner=params.hdfs_user, group=params.user_group) # format the znode for this HA setup # only run this format command if the active namenode hostname is set # The Ambari UI HA Wizard prompts the user to run this command # manually, so this guarantees it is only run in the Blueprints case if params.dfs_ha_enabled and len(params.dfs_ha_namenode_active) > 0: success = initialize_ha_zookeeper(params) if not success: raise Fail("Could not initialize HA state in zookeeper") utils.service(action="start", name="zkfc", user=params.hdfs_user, create_pid_dir=True, create_log_dir=True)
def start(self, env): import params env.set_params(params) self.configure(env) install_hadoop() observer_namenode(action="start")
def pre_upgrade_restart(self, env, upgrade_type=None): install_hadoop()
def start(self, env): install_hadoop() self.start_static(env)
def start(self, env): import params env.set_params(params) install_hadoop() Execute(self.get_hdfs_binary() + ' --daemon start dfsrouter', user=params.hdfs_user)
def start(self, env): import params env.set_params(params) install_hadoop() Execute(params.hadoop_bin_dir + 'hadoop --daemon start kms', user=params.hdfs_user)
def start(self, env): import params env.set_params(params) install_hadoop() self.configure(env) nfsgateway(action="start")
def start(self, env, upgrade_type=None): import params env.set_params(params) install_hadoop()