def getNameNodeURL(nameservice2=False): if Hadoop.isEncrypted(): baseUrl = "https://%s" % (HDFS.getNamenodeHttpsAddress(nameservice2)) else: baseUrl = "http://%s" % (HDFS.getNamenodeHttpAddress(nameservice2)) logger.info("URL being returned is - %s" % baseUrl) return baseUrl
def updateJobProperties(cls, propFile, properties=None, haEnabled=False, debug=False): fileSystemName = Hadoop.getFSDefaultValue() jobTrackerIP = MAPRED.getJobtrackerAddress() jobTracker = jobTrackerIP[0] + ":" + jobTrackerIP[1] if not properties: properties = {} if not properties.has_key('nameNode'): properties['nameNode'] = fileSystemName if not properties.has_key('jobTracker'): properties['jobTracker'] = jobTracker if "hcatalog" in propFile: if Hadoop.isSecure(): kerberosPrincipal = Hive.getConfigValue( "hive.metastore.kerberos.principal") properties[ 'hive.metastore.kerberos.principal'] = kerberosPrincipal logger.info("Updating for hcatalog workflow") hcatNode = Hive.getConfigValue("hive.metastore.uris").replace( 'thrift', 'hcat') logger.info("Hcat node is " + hcatNode) properties['hcatNode'] = hcatNode if Hadoop.isSecure(): # determine the namenode and the jobtracker principal nnPrincipal = None if haEnabled: nnPrincipal = HDFS.getNameNodePrincipal().replace( '_HOST', HDFS.getNamenodeByState('active')) else: nnPrincipal = HDFS.getNameNodePrincipal().replace( '_HOST', HDFS.getNamenodeHttpAddress()[0]) jtPrincipal = MAPRED.getMasterPrincipal().replace( '_HOST', jobTrackerIP[0]) properties['dfs.namenode.kerberos.principal'] = nnPrincipal properties['mapreduce.jobtracker.kerberos.principal'] = jtPrincipal wfPath = util.getPropertyValueFromFile(propFile, "oozie.wf.application.path") if wfPath != None and wfPath.find("hdfs://localhost:9000") != -1: wfPath = wfPath.replace("hdfs://localhost:9000", fileSystemName) logger.info("Value of replaced oozie.wf.application.path is " + wfPath) properties['oozie.wf.application.path'] = wfPath util.writePropertiesToFile(propFile, propFile, properties) if debug: logger.info('Content of properties file %s' % propFile) f = open(propFile, 'r') # print the file to the console logger.info(f.read()) f.close()
CONF = {} CONF['KNOX_GUEST_USERNAME'] = "******" CONF['KNOX_GUEST_PASSWORD'] = "******" CONF['KNOX_PROTO'] = "https" CONF['KNOX_HOST'] = Config.get('knox', 'KNOX_HOST').split(',')[0] CONF['AMBARI_HOST'] = Config.get("machine", "GATEWAY") CONF['KNOX_PORT'] = 8443 CONF['AMBARI_PORT'] = 8080 CONF['KNOX_TOPO'] = "sandbox" CONF['KNOX_BASE_URL'] = "%s://%s:%s/gateway" % ( CONF['KNOX_PROTO'], CONF['KNOX_HOST'], CONF['KNOX_PORT']) CONF['KNOX_TOPO_URL'] = "%s/%s" % (CONF['KNOX_BASE_URL'], CONF['KNOX_TOPO']) CONF['KNOX_WEBHDFS_URL'] = "%s/%s/webhdfs/v1/" % (CONF['KNOX_BASE_URL'], CONF['KNOX_TOPO']) CONF['DIRECT_WEBHDFS_URL'] = "http://%s/webhdfs/v1/" % ( HDFS.getNamenodeHttpAddress()) CONF['RANGER_KNOX_POLICY'] = None #CONF['SRC_DIR'] = os.path.join(Config.getEnv('WORKSPACE'), 'tests', 'knox', 'knox_2') knox_host = CONF['KNOX_HOST'] if Machine.isOpenStack(): knox_host = knox_host + ".com" KNOXSSO_PROVIDER_URL = "%s://%s:%s/gateway/knoxsso/api/v1/websso" % ( CONF['KNOX_PROTO'], knox_host, CONF['KNOX_PORT']) CLUSTER_NAME = Ambari.getClusterName(is_enc=Hadoop.isEncrypted()) KNOX_TRUSTSTORE_PASSWORD = "******" KNOX_KEYSTORE_PATH = "/usr/hdp/current/knox-server/data/security/keystores/" KNOX_TOPOLOGY_DIR = "/etc/knox/conf/topologies/" JAVA_HOME = Config.get("machine", "QA_CODE_JAVA_HOME")