def actionexecute(self, env): Logger.info("Host checks started.") config = Script.get_config() tmp_dir = Script.get_tmp_dir() report_file_handler_dict = {} #print "CONFIG: " + str(config) check_execute_list = config['commandParams']['check_execute_list'] if check_execute_list == '*BEFORE_CLEANUP_HOST_CHECKS*': check_execute_list = BEFORE_CLEANUP_HOST_CHECKS structured_output = {} Logger.info("Check execute list: " + str(check_execute_list)) # check each of the commands; if an unknown exception wasn't handled # by the functions, then produce a generic exit_code : 1 if CHECK_JAVA_HOME in check_execute_list: try: java_home_check_structured_output = self.execute_java_home_available_check( config) structured_output[ CHECK_JAVA_HOME] = java_home_check_structured_output except Exception, exception: Logger.exception( "There was an unexpected error while checking for the Java home location: " + str(exception)) structured_output[CHECK_JAVA_HOME] = { "exit_code": 1, "message": str(exception) }
def actionexecute(self, env): config = Script.get_config() structured_output = {} try: repo_info_json = config['hostLevelParams']['repo_info'] repo_info_dict = json.loads(repo_info_json) for item in repo_info_dict["repositories"]: base_url = item["base_url"] repo_name = item["repo_name"] repo_id = item["repo_id"] repo_rhel_suse = config['configurations']['cluster-env']['repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env']['repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_suse_family() or OSCheck.is_redhat_family() else repo_ubuntu ubuntu_components = [repo_name] + self.UBUNTU_REPO_COMPONENTS_POSTFIX Repository(repo_id, action = "create", base_url = base_url, mirror_list = None, repo_file_name = repo_name, repo_template = template, components = ubuntu_components, # ubuntu specific ) structured_output["repo_update"] = {"exit_code" : 0, "message": format("Repository files successfully updated!")} except Exception, exception: Logger.logger.exception("ERROR: There was an unexpected error while updating repositories") raise Fail("Failed to update repo files!")
def get_pid_file(): """ Fetches the pid file, which will be used to get the status of the HAWQ Master, Standby or Segments """ config = Script.get_config() component_name = config['componentName'] component = "master" if component_name in ["HAWQMASTER", "HAWQSTANDBY"] else "segment" hawq_pid_file = os.path.join(hawq_constants.hawq_pid_dir, "hawq-{0}.pid".format(component)) File(hawq_pid_file, action='delete') utils.create_dir_as_hawq_user(hawq_constants.hawq_pid_dir) #Get hawq_master_directory or hawq_segment_directory value from hawq-site.xml depending #on the component hawq_site_directory_property = "hawq_{0}_directory".format(component) #hawq-site content from Ambari server will not be available when the #command type is STATUS_COMMAND. Hence, reading it directly from the local file postmaster_pid_file = os.path.join(common.get_local_hawq_site_property( hawq_site_directory_property), hawq_constants.postmaster_pid_filename) pid = "" if os.path.exists(postmaster_pid_file): with open(postmaster_pid_file, 'r') as fh: pid = fh.readline().strip() if not pid: raise Fail("Failed to fetch pid from {0}".format(postmaster_pid_file)) File(hawq_pid_file, content=pid, owner=hawq_constants.hawq_user, group=hawq_constants.hawq_user) return hawq_pid_file
def actionexecute(self, env): config = Script.get_config() structured_output = {} try: repo_info = config['repositoryFile'] for item in repo_info["repositories"]: base_url = item["baseUrl"] repo_name = item["repoName"] repo_id = item["repoId"] distribution = item["distribution"] if "distribution" in item else None components = item["components"] if "components" in item else None repo_rhel_suse = config['configurations']['cluster-env']['repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env']['repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_suse_family() or OSCheck.is_redhat_family() else repo_ubuntu ubuntu_components = [distribution if distribution else repo_name] + \ [components.replace(",", " ") if components else self.UBUNTU_REPO_COMPONENTS_POSTFIX] Repository(repo_id, action = "create", base_url = base_url, mirror_list = None, repo_file_name = repo_name, repo_template = template, components = ubuntu_components, # ubuntu specific ) structured_output["repo_update"] = {"exit_code" : 0, "message": format("Repository files successfully updated!")} except Exception, exception: Logger.logger.exception("ERROR: There was an unexpected error while updating repositories") raise Fail("Failed to update repo files!")
def actionexecute(self, env): config = Script.get_config() tmp_dir = Script.get_tmp_dir() report_file_handler_dict = {} #print "CONFIG: " + str(config) check_execute_list = config['commandParams']['check_execute_list'] structured_output = {} # check each of the commands; if an unknown exception wasn't handled # by the functions, then produce a generic exit_code : 1 if CHECK_JAVA_HOME in check_execute_list: try: java_home_check_structured_output = self.execute_java_home_available_check( config) structured_output[ CHECK_JAVA_HOME] = java_home_check_structured_output except Exception, exception: print "There was an unexpected error while checking for the Java home location: " + str( exception) structured_output[CHECK_JAVA_HOME] = { "exit_code": 1, "message": str(exception) }
def get_pid_file(): """ Fetches the pid file, which will be used to get the status of the HAWQ Master, Standby or Segments """ config = Script.get_config() component_name = config['componentName'] component = "master" if component_name in ["HAWQMASTER", "HAWQSTANDBY"] else "segment" hawq_pid_file = os.path.join(hawqconstants.hawq_pid_dir, "hawq-{0}.pid".format(component)) File(hawq_pid_file, action='delete') utils.create_dir_as_hawq_user(hawqconstants.hawq_pid_dir) #Get hawq_master_directory or hawq_segment_directory value from hawq-site.xml depending #on the component hawq_site_directory_property = "hawq_{0}_directory".format(component) #hawq-site content from Ambari server will not be available when the #command type is STATUS_COMMAND. Hence, reading it directly from the local file postmaster_pid_file = os.path.join(common.get_local_hawq_site_property( hawq_site_directory_property), hawqconstants.postmaster_pid_filename) pid = "" if os.path.exists(postmaster_pid_file): with open(postmaster_pid_file, 'r') as fh: pid = fh.readline().strip() if not pid: raise Fail("Failed to fetch pid from {0}".format(postmaster_pid_file)) File(hawq_pid_file, content=pid, owner=hawqconstants.hawq_user, group=hawqconstants.hawq_user) return hawq_pid_file
def start(self, env): #解析 service 的配置参数 config = Script.get_config() AMBARI_USER =config['configurations']['ambari-server-env']['AMBARI_USER'] AMBARI_USER_PWD =config['configurations']['ambari-server-env']['AMBARI_USER_PASSWORD'] AMBARI_SERVER_HOST =config['configurations']['ambari-server-env']['AMBARI_SERVER_HOST'] AMBARI_WEB_LISTEN_PORT = config['configurations']['ambari-server-env']['AMBARI_WEB_LISTEN_PORT'] print "Ambari User:"******" \nAmbari user password: "******"\nServer: " +AMBARI_SERVER_HOST + "\nLinsten port " + str(AMBARI_WEB_LISTEN_PORT) cmd = "mkdir -p /var/run/guoqingyao" os.system(cmd) print "start the service"
def actionexecute(self, env): config = Script.get_config() params = config['commandParams'] validation_passed = self.check_users(params) and self.check_directories(params) if validation_passed: print 'All configurations validated!' else: self.fail_with_error('Configurations validation failed!')
def actionexecute(self, env): config = Script.get_config() structured_output = {} version = config['commandParams']['version'] self.stack_tool_package = stack_tools.get_stack_tool_package( stack_tools.STACK_SELECTOR_NAME) versions_to_remove = self.get_lower_versions(version) for low_version in versions_to_remove: self.remove_stack_version(structured_output, low_version)
def actionexecute(self, env): config = Script.get_config() structured_output = {} cmd = self.get_clearcache_cmd() Logger.info("Clearing repository cache") code, output = shell.call(cmd, sudo = True) if 0 == code: structured_output["clear_repocache"] = {"exit_code" : 0, "message": format("Repository cache successfully cleared!")} else: structured_output["clear_repocache"] = {"exit_code": code, "message": "Failed to clear repository cache! {0}".format(str(output))} self.put_structured_out(structured_output)
def actionexecute(self, env): config = Script.get_config() packages_to_remove = config['roleParams']['package_list'].split(',') structured_output = {'success': [], 'failure': []} for package_name in packages_to_remove: try: Package(package_name, action='remove', ignore_dependencies=True) Logger.info('Removed {0}'.format(package_name)) structured_output['success'].append(package_name) except Exception, e: Logger.exception('Failed to remove {0}: {1}'.format( package_name, str(e))) structured_output['failure'].append(package_name)
def actionexecute(self, env): config = Script.get_config() tmp_dir = Script.get_tmp_dir() #print "CONFIG: " + str(config) check_execute_list = config['commandParams']['check_execute_list'] structured_output = {} # check each of the commands; if an unknown exception wasn't handled # by the functions, then produce a generic exit_code : 1 if CHECK_JAVA_HOME in check_execute_list: try : java_home_check_structured_output = self.execute_java_home_available_check(config) structured_output[CHECK_JAVA_HOME] = java_home_check_structured_output except Exception, exception: print "There was an unexpected error while checking for the Java home location: " + str(exception) structured_output[CHECK_JAVA_HOME] = {"exit_code" : 1, "message": str(exception)}
def actionexecute(self, env): config = Script.get_config() structured_output = {} try: repo_info_json = config['hostLevelParams']['repo_info'] repo_info_dict = json.loads(repo_info_json) for item in repo_info_dict["repositories"]: base_url = item["base_url"] repo_name = item["repo_name"] repo_id = item["repo_id"] repo_rhel_suse = config['configurations']['cluster-env'][ 'repo_suse_rhel_template'] repo_ubuntu = config['configurations']['cluster-env'][ 'repo_ubuntu_template'] template = repo_rhel_suse if OSCheck.is_suse_family( ) or OSCheck.is_redhat_family() else repo_ubuntu ubuntu_components = [repo_name ] + self.UBUNTU_REPO_COMPONENTS_POSTFIX Repository( repo_id, action="create", base_url=base_url, mirror_list=None, repo_file_name=repo_name, repo_template=template, components=ubuntu_components, # ubuntu specific ) structured_output["repo_update"] = { "exit_code": 0, "message": format("Repository files successfully updated!") } except Exception, exception: Logger.logger.exception( "ERROR: There was an unexpected error while updating repositories" ) raise Fail("Failed to update repo files!")
def start(self, env): # analysis service config config = Script.get_config() AMBARI_USER =config['configurations']['ambari-server-env']['AMBARI_USER'] AMBARI_USER_PWD =config['configurations']['ambari-server-env']['AMBARI_USER_PASSWORD'] AMBARI_SERVER_HOST =config['configurations']['ambari-server-env']['AMBARI_SERVER_HOST'] AMBARI_WEB_LISTEN_PORT = config['configurations']['ambari-server-env']['AMBARI_WEB_LISTEN_PORT'] print "Ambari User:"******" \nAmbari user password: "******"\nServer: " +AMBARI_SERVER_HOST + "\nLinsten port " + str(AMBARI_WEB_LISTEN_PORT) cmd = "mkdir -p /var/run/guoqingyao" os.system(cmd) print "start the service" def stop(self, env): cmd ="rm -rf /var/run/guoqingyao" os.system(cmd) print "stop the service" def status(self, env): cmd = "echo 'check one time' > /tmp/my.log" os.system(cmd) cmd = "ls /var/run/guoqingyao" result = os.system(cmd) if result != 0: print "The component is not runing"
""" import os import functools from resource_management import Script from resource_management.libraries.functions.default import default from resource_management.libraries.resources.hdfs_resource import HdfsResource from resource_management.libraries.resources.execute_hadoop import ExecuteHadoop from resource_management.libraries.functions import get_kinit_path from resource_management.libraries.functions import conf_select try: from resource_management.libraries.functions import hdp_select as hadoop_select except ImportError: from resource_management.libraries.functions import phd_select as hadoop_select config = Script.get_config() def __get_component_host(component): """ Returns the first host where the given component is deployed, None if the component is not deployed """ component_host = None if component in config['clusterHostInfo'] and len( config['clusterHostInfo'][component]) > 0: component_host = config['clusterHostInfo'][component][0] return component_host hostname = config['hostname']
from resource_management import Script config = Script.get_config()['configurations'] TERMS_ACCEPTED = config['chorus-env']['chorus.termsaccepted'] == 'yes' SECURITY_SALT = '' if config['chorus-env'][ 'chorus.security.salt'] == 'generate' else config['chorus-env'][ 'chorus.security.salt'] INSTALLER_PATH = config['chorus-env']['chorus.installation.installerpath'] INSTALLATION_DIRECTORY = config['chorus-env']['chorus.installation.directory'] DATA_DIRECTORY = config['chorus-env']['chorus.installation.datadirectory'] SERVER_PORT = config['chorus-env']['chorus.server.port'] SERVER_TIMEOUT = config['chorus-env']['chorus.server.timeout'] DEFAULT_PREVIEW_ROW_LIMIT = config['chorus-env'][ 'chorus.server.defaultpreviewrowlimit'] EXECUTION_TIMEOUT = config['chorus-env']['chorus.server.executiontimeout'] LOG_LEVEL = config['chorus-env']['chorus.server.loglevel'] MAIL_ENABLED = config['chorus-env']['chorus.server.mailenabled'] minimum_memory = config['chorus-tuning']['chorus.minimum_memory'] maximum_memory = config['chorus-tuning']['chorus.maximum_memory'] maximum_memory = config['chorus-tuning']['chorus.young_heap_size'] maximum_memory = config['chorus-tuning']['chorus.max_perm_size']
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os import sys from resource_management import format_hdp_stack_version, Script from resource_management.libraries.functions import format from resource_management.libraries.functions.default import default import status_params # server configurations config = Script.get_config() # security enabled security_enabled = status_params.security_enabled stack_name = default("/hostLevelParams/stack_name", None) # New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade version = default("/commandParams/version", None) # hdp version stack_version_unformatted = str(config['hostLevelParams']['stack_version']) hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) metadata_home = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else '/usr/hdp/current/atlas-server' metadata_bin = format("{metadata_home}/bin")