def start(self, env, upgrade_type=None): import params env.set_params(params) self.configure(env) # FOR SECURITY if params.has_ranger_admin and params.is_supported_yarn_ranger: setup_ranger_yarn() #Ranger Yarn Plugin related calls service('resourcemanager', action='start')
def start(self, env, rolling_restart=False): import params env.set_params(params) self.configure(env) # FOR SECURITY if params.has_ranger_admin and params.is_supported_yarn_ranger: setup_ranger_yarn() #Ranger Yarn Plugin related calls service('resourcemanager', action='start')
def start(self, env, upgrade_type=None): import params env.set_params(params) self.configure(env) # FOR SECURITY if params.has_ranger_admin and params.is_supported_yarn_ranger: setup_ranger_yarn() #Ranger Yarn Plugin related calls # wait for active-dir and done-dir to be created by ATS if needed if params.has_ats: Logger.info("Verifying DFS directories where ATS stores time line data for active and completed applications.") self.wait_for_dfs_directories_created(params.entity_groupfs_store_dir, params.entity_groupfs_active_dir) service('resourcemanager', action='start')
def start(self, env, upgrade_type=None): import params env.set_params(params) self.configure(env) # FOR SECURITY if params.has_ranger_admin and params.is_supported_yarn_ranger: setup_ranger_yarn() #Ranger Yarn Plugin related calls # wait for active-dir and done-dir to be created by ATS if needed if params.has_ats: Logger.info("Verifying DFS directories where ATS stores time line data for active and completed applications.") self.wait_for_dfs_directories_created(params.entity_groupfs_store_dir, params.entity_groupfs_active_dir) service('resourcemanager', action='start')
def start(self, env, rolling_restart=False): import params env.set_params(params) self.configure(env) # FOR SECURITY if params.has_ranger_admin and params.is_supported_yarn_ranger: setup_ranger_yarn() #Ranger Yarn Plugin related calls if not Script.is_hdp_stack_greater_or_equal("2.2"): install_tez_jars() else: resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user) if resource_created: params.HdfsResource(None, action="execute") service('resourcemanager', action='start')
def start(self, env, rolling_restart=False): import params env.set_params(params) self.configure(env) # FOR SECURITY if params.is_supported_yarn_ranger: setup_ranger_yarn() #Ranger Yarn Plugin related calls if params.hdp_stack_version != "" and compare_versions( params.hdp_stack_version, '2.1') == 0: install_tez_jars() else: # will work only for stack versions >=2.2 copy_tarballs_to_hdfs('tez', 'hadoop-yarn-resourcemanager', params.tez_user, params.hdfs_user, params.user_group) service('resourcemanager', action='start')
def start(self, env, rolling_restart=False): import params env.set_params(params) self.configure(env) # FOR SECURITY if params.is_supported_yarn_ranger: setup_ranger_yarn() #Ranger Yarn Plugin related calls if not Script.is_hdp_stack_greater_or_equal("2.2"): install_tez_jars() else: # will work only for stack versions >=2.2 params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(), type="file", action="create_on_execute", source=params.tez_tar_source, group=params.user_group, owner=params.hdfs_user ) params.HdfsResource(None, action="execute") service('resourcemanager', action='start')
def start(self, env, upgrade_type=None): import params env.set_params(params) self.configure(env) # FOR SECURITY if params.enable_ranger_yarn and params.is_supported_yarn_ranger: setup_ranger_yarn() #Ranger Yarn Plugin related calls # wait for active-dir and done-dir to be created by ATS if needed if params.has_ats: Logger.info("Verifying DFS directories where ATS stores time line data for active and completed applications.") self.wait_for_dfs_directories_created(params.entity_groupfs_store_dir, params.entity_groupfs_active_dir) if params.stack_version_formatted_major and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted_major): # MC Hammer said, "Can't touch this" resource_created = copy_to_hdfs( "yarn", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created if resource_created: params.HdfsResource(None, action="execute") service('resourcemanager', action='start')