def get_ambari_server_port(cls): if not cls._ambari_server_port: cls._ambari_server_port = util.getPropertyValueFromFile( cls.ambari_server_properties, "client.api.ssl.port" ) if cls.get_ambari_server_ssl() else '8080' return cls._ambari_server_port
def updateJobProperties(cls, propFile, properties=None, haEnabled=False, debug=False): fileSystemName = Hadoop.getFSDefaultValue() jobTrackerIP = MAPRED.getJobtrackerAddress() jobTracker = jobTrackerIP[0] + ":" + jobTrackerIP[1] if not properties: properties = {} if not properties.has_key('nameNode'): properties['nameNode'] = fileSystemName if not properties.has_key('jobTracker'): properties['jobTracker'] = jobTracker if "hcatalog" in propFile: if Hadoop.isSecure(): kerberosPrincipal = Hive.getConfigValue( "hive.metastore.kerberos.principal") properties[ 'hive.metastore.kerberos.principal'] = kerberosPrincipal logger.info("Updating for hcatalog workflow") hcatNode = Hive.getConfigValue("hive.metastore.uris").replace( 'thrift', 'hcat') logger.info("Hcat node is " + hcatNode) properties['hcatNode'] = hcatNode if Hadoop.isSecure(): # determine the namenode and the jobtracker principal nnPrincipal = None if haEnabled: nnPrincipal = HDFS.getNameNodePrincipal().replace( '_HOST', HDFS.getNamenodeByState('active')) else: nnPrincipal = HDFS.getNameNodePrincipal().replace( '_HOST', HDFS.getNamenodeHttpAddress()[0]) jtPrincipal = MAPRED.getMasterPrincipal().replace( '_HOST', jobTrackerIP[0]) properties['dfs.namenode.kerberos.principal'] = nnPrincipal properties['mapreduce.jobtracker.kerberos.principal'] = jtPrincipal wfPath = util.getPropertyValueFromFile(propFile, "oozie.wf.application.path") if wfPath != None and wfPath.find("hdfs://localhost:9000") != -1: wfPath = wfPath.replace("hdfs://localhost:9000", fileSystemName) logger.info("Value of replaced oozie.wf.application.path is " + wfPath) properties['oozie.wf.application.path'] = wfPath util.writePropertiesToFile(propFile, propFile, properties) if debug: logger.info('Content of properties file %s' % propFile) f = open(propFile, 'r') # print the file to the console logger.info(f.read()) f.close()
def is_ambari_security_enabled(cls, property): ambari_server_ssl = util.getPropertyValueFromFile(cls.ambari_server_properties, "api.ssl") if ambari_server_ssl and ambari_server_ssl.lower() == 'true' and (property.lower() == 'yes' or property.lower() == 'true'): return True else: return False
def get_login_identifier(cls): if not cls._login_identity_provider: cls._login_identity_provider = util.getPropertyValueFromFile( cls.nifi_properties, "nifi.security.user.login.identity.provider") return cls._login_identity_provider
def get_flow_config_file(cls): ''' Returns the the file that contains the flow configuration file :return: String ''' if not cls._flow_conf_file: cls._flow_conf_file = util.getPropertyValueFromFile( cls.nifi_properties, "nifi.flow.configuration.file") return cls._flow_conf_file
def get_port(cls): ''' Returns the port on which nifi is running ''' if not cls._rest_port: prop_name = 'nifi.web.https.port' if cls.is_encrypted( ) else 'nifi.web.http.port' cls._rest_port = util.getPropertyValueFromFile( cls.nifi_properties, prop_name) return cls._rest_port
def is_encrypted(cls): ''' Returns if nifi is running in secure mode or not. :return: true if secure false otherwise. ''' if not cls._is_encrypted: cls._is_encrypted = util.getPropertyValueFromFile( cls.nifi_properties, "nifi.cluster.protocol.is.secure") if not cls._is_encrypted: cls._is_encrypted = False else: cls._is_encrypted = cls._is_encrypted.lower() == 'true' return cls._is_encrypted
def getVersion(cls, refresh=False): if refresh or not cls._version: # Since hdf3.0, version is not stored as property. However, for linux it is not a problem, # as Ambari write this version to property. For windows need to retrieve this from the # installed nifi folder name if Machine.isWindows(): folderNamesplit = re.split("nifi-", cls.nifi_current_location) logger.info("Nifi version in windows platform : " + folderNamesplit[1]) cls._version = folderNamesplit[1] else: cls._version = util.getPropertyValueFromFile( cls.nifi_properties, "nifi.version") return cls._version
def update_lr_job_parameters(self): from beaver import beaverConfigs beaverConfigs.setConfigs() # get JAVA_HOME from the hadoop configs hadoopConfDir = Config.get('hadoop', 'HADOOP_CONF') hadoopEnvFile = os.path.join(hadoopConfDir, 'hadoop-env.sh') javaHome = util.getPropertyValueFromFile(hadoopEnvFile, 'JAVA_HOME') Config.set('machine', 'JAVA_HOME', javaHome, overwrite=True) Config.set('machine', 'JAVA_CMD', os.path.join(javaHome, 'bin', 'java'), overwrite=True) AMBARI_PROP_FILE = os.path.join(Config.getEnv('WORKSPACE'), '..', 'ambari_deploy', 'uifrm', 'ambari.properties') self.read_and_update_job_parameters() #shutil.copy(AMBARI_PROP_FILE, self.LOCAL_WORK_DIR) Config.set('xasecure', 'XA_KMS_HOME', '')
def get_log_dir(cls, refresh=False): if refresh or not cls._log_dir: cls._log_dir = util.getPropertyValueFromFile(cls.smm_env_sh, "LOG_DIR") return cls._log_dir
def get_java_home(cls, refresh=False): if not cls._java_home or refresh: zookeeper_env = os.path.join(Config.get('zookeeper', 'ZK_HOME'), "conf", "zookeeper-env.sh") cls._java_home = util.getPropertyValueFromFile(zookeeper_env, "JAVA_HOME") return cls._java_home
def get_java_home(cls, refresh=False): if not cls._java_home or refresh: cls._java_home = util.getPropertyValueFromFile( cls.schema_registry_env_sh, "JAVA_HOME") return cls._java_home
def get_user_authorizer(cls): if not cls._user_authorizer: cls._user_authorizer = util.getPropertyValueFromFile( cls.nifi_properties, "nifi.security.user.authorizer") return cls._user_authorizer