def updateJobProperties(cls, propFile, properties=None, haEnabled=False, debug=False): fileSystemName = Hadoop.getFSDefaultValue() jobTrackerIP = MAPRED.getJobtrackerAddress() jobTracker = jobTrackerIP[0] + ":" + jobTrackerIP[1] if not properties: properties = {} if not properties.has_key('nameNode'): properties['nameNode'] = fileSystemName if not properties.has_key('jobTracker'): properties['jobTracker'] = jobTracker if "hcatalog" in propFile: if Hadoop.isSecure(): kerberosPrincipal = Hive.getConfigValue( "hive.metastore.kerberos.principal") properties[ 'hive.metastore.kerberos.principal'] = kerberosPrincipal logger.info("Updating for hcatalog workflow") hcatNode = Hive.getConfigValue("hive.metastore.uris").replace( 'thrift', 'hcat') logger.info("Hcat node is " + hcatNode) properties['hcatNode'] = hcatNode if Hadoop.isSecure(): # determine the namenode and the jobtracker principal nnPrincipal = None if haEnabled: nnPrincipal = HDFS.getNameNodePrincipal().replace( '_HOST', HDFS.getNamenodeByState('active')) else: nnPrincipal = HDFS.getNameNodePrincipal().replace( '_HOST', HDFS.getNamenodeHttpAddress()[0]) jtPrincipal = MAPRED.getMasterPrincipal().replace( '_HOST', jobTrackerIP[0]) properties['dfs.namenode.kerberos.principal'] = nnPrincipal properties['mapreduce.jobtracker.kerberos.principal'] = jtPrincipal wfPath = util.getPropertyValueFromFile(propFile, "oozie.wf.application.path") if wfPath != None and wfPath.find("hdfs://localhost:9000") != -1: wfPath = wfPath.replace("hdfs://localhost:9000", fileSystemName) logger.info("Value of replaced oozie.wf.application.path is " + wfPath) properties['oozie.wf.application.path'] = wfPath util.writePropertiesToFile(propFile, propFile, properties) if debug: logger.info('Content of properties file %s' % propFile) f = open(propFile, 'r') # print the file to the console logger.info(f.read()) f.close()
def submit_storm_hive_topology(cls, tcId, className, args, useStandaloneCmd): if Hadoop.isSecure(): if Config.hasOption('machine', 'USER_REALM'): user_realm = Config.get('machine', 'USER_REALM', '') else: nnKerbPrincipal = HDFS.getNameNodePrincipal(defaultValue='') atloc = nnKerbPrincipal.find("@") if (atloc != -1): user_realm = nnKerbPrincipal[atloc:] if user_realm != None: args += " " + Machine.getHeadlessUserKeytab( Config.getEnv('USER')) + " " + Config.getEnv( 'USER') + '@' + user_realm exit_code, stdout = Storm.runStormHdfsTopology( TARGET_HIVE_STORM_JAR, className, args, None, logoutput=True, inBackground=False, useStandaloneCmd=useStandaloneCmd) logger.info(exit_code) ruAssert("Storm", exit_code == 0, "[StormHiveSubmit] %s Failed" % (tcId))
def background_job_setup(cls, runSmokeTestSetup=True, config=None): ''' Setup for background long running job :param runSmokeTestSetup: Runs smoke test setup if set to true ''' from beaver.component.rollingupgrade.ruUpgrade import UpgradePerNode UpgradePerNode.reportProgress( "[INFO][FLUME][BGJobSetup] Long running job setup for Flume component started" ) from beaver.component.flume import Agent global agent1 global agent2 if not os.path.exists(cls._local_work_dir): os.mkdir(cls._local_work_dir) shutil.copy(cls._flume_datagen_src, cls._local_work_dir) agent1 = Agent(cls._local_work_dir) agent2 = Agent(cls._local_work_dir) for outdir in (cls._agent1_chkpt_dir, cls._agent1_data_dir, cls._agent2_chkpt_dir, cls._agent2_data_dir): os.mkdir(outdir) logger.info("Preparing the Flume configs for long running test") propertyMap = {} namenode = Hadoop.getFSDefaultValue() propertyMap['agent2.sinks.hdfsSink.hdfs.path'] = "%s%s" % ( namenode, cls._hdfs_test_dir) if Hadoop.isSecure(): if Config.hasOption('machine', 'USER_REALM'): user_realm = Config.get('machine', 'USER_REALM', '') else: nnKerbPrincipal = HDFS.getNameNodePrincipal(defaultValue='') atloc = nnKerbPrincipal.find("@") if atloc != -1: user_realm = nnKerbPrincipal[atloc:] if user_realm: propertyMap[ 'agent2.sinks.hdfsSink.hdfs.kerberosPrincipal'] = cls._test_user + '@' + user_realm propertyMap[ 'agent2.sinks.hdfsSink.hdfs.kerberosKeytab'] = Machine.getHeadlessUserKeytab( cls._test_user) util.writePropertiesToFile( os.path.join(cls._flume_test_conf, 'longrunning.properties'), cls._flume_test_src, propertyMap)
def getEnvironmentVariables(cls, directory): # Need to set the below for the oozie-regression test OOZIE_HDFS_LOG_DIR = str( Hadoop.getFSDefaultValue(True) + '/user/' + Config.get("oozie", "OOZIE_USER") + '/oozie-logs/') if Machine.type() == "Windows": sep = ";" OOZIE_HDFS_LOG_DIR = OOZIE_HDFS_LOG_DIR + os.environ.get( "COMPUTERNAME") else: sep = ":" OOZIE_HDFS_LOG_DIR = OOZIE_HDFS_LOG_DIR + Oozie.getOozieServers( )[0] OOZIE_QA_REG_DIR = cls.getOozieRegressionFolder() path = os.path.join( OOZIE_QA_REG_DIR, "lib", "yoozietest-qa-1.0.0-SNAPSHOT.jar") + sep + os.path.join( OOZIE_QA_REG_DIR, "lib", "original-yoozietest-qa-1.0.0-SNAPSHOT.jar") Config.setEnv("LOCAL_CP", path) oozie_server = cls.getOozieUrl() if HDFS.isASV() or HDFS.isCabo(): if HDFS.isCabo(): # Cabo gets FQDN uri to use the scheme as a differentiator for other FS HIT_NN = str(Hadoop.getConfigValue("fs.defaultFS", None)) else: # is ASV HIT_NN = str(Hadoop.getFSDefaultValue(False)) return { "JAVA_HOME": Config.get('machine', 'JAVA_HOME'), "HADOOP_HOME": Config.get('hadoop', 'HADOOP_HOME'), "HADOOP_CONF_DIR": Config.get('hadoop', 'HADOOP_CONF'), "HIT_NN": HIT_NN, "HIT_JT": str(MAPRED.getJobtrackerAddress()[0]), "HIT_JT_PORT": str(MAPRED.getJobtrackerAddress()[1]), ###TODO Change following 3 for secure setups "OOZIE_QA_AUTH": "simple", "OOZIE_NN_KRB_PRINCIPAL": "Blah", "OOZIE_JT_KRB_PRINCIPAL": "Blah", "OOZIE_TEST_SUITE": "testngRegressionSuiteDebug", "OOZIE_HOME": Config.get("oozie", "OOZIE_HOME"), "OOZIE_PORT": Config.get("oozie", "OOZIE_PORT"), "OOZIE_SECURE_HOSTNAME": Machine.getfqdn(), "OOZIE_FOLDER": Config.get("oozie", "OOZIE_USER"), "OOZIE_USER": Config.get("oozie", "OOZIE_USER"), "FIREFOX_PATH": Config.get("firefox", "FIREFOX_PATH"), "FIREFOX_DISPLAY": Config.get("firefox", "FIREFOX_DISPLAY"), "OOZIE_QA_REG_DIR": OOZIE_QA_REG_DIR, "OOZIE_QA_HADOOP_QUEUE": "default", "OOZIE_URL": str(oozie_server), "HIT_OOZIE": ((str(oozie_server)).split(":")[2]).split("/")[0], "LOCAL_CP": path, "HIT_HDFS_STORAGE_DIR": directory, "OOZIE_HDFS_LOG_DIR": OOZIE_HDFS_LOG_DIR } else: if Hadoop.isSecure(): oozie_qa_auth = "kerberos" nnPrincipal = HDFS.getNameNodePrincipal() jtPrincipal = MAPRED.getMasterPrincipal() user = Config.getEnv('USER') kerbTicket = Machine.getKerberosTicket(user) else: oozie_qa_auth = "simple" nnPrincipal = "blah" jtPrincipal = "blah" kerbTicket = "blah" return { "JAVA_HOME": Config.get('machine', 'JAVA_HOME'), "HADOOP_HOME": Config.get('hadoop', 'HADOOP_HOME'), "HADOOP_CONF_DIR": Config.get('hadoop', 'HADOOP_CONF'), "HIT_NN": str(Hadoop.getFSDefaultValue(False)), "HIT_NN_PORT": str(Hadoop.getFSDefaultValue(True)).split(":")[2], "HIT_JT": str(MAPRED.getJobtrackerAddress()[0]), "HIT_JT_PORT": str(MAPRED.getJobtrackerAddress()[1]), "OOZIE_QA_AUTH": oozie_qa_auth, "OOZIE_NN_KRB_PRINCIPAL": nnPrincipal, "OOZIE_JT_KRB_PRINCIPAL": jtPrincipal, "OOZIE_TEST_SUITE": "testngRegressionSuiteDebug", "OOZIE_HOME": Config.get("oozie", "OOZIE_HOME"), "OOZIE_PORT": Config.get("oozie", "OOZIE_PORT"), "OOZIE_FOLDER": Config.get("oozie", "OOZIE_USER"), "OOZIE_USER": Config.get("oozie", "OOZIE_USER"), "FIREFOX_PATH": Config.get("firefox", "FIREFOX_PATH"), "FIREFOX_DISPLAY": Config.get("firefox", "FIREFOX_DISPLAY"), "OOZIE_QA_REG_DIR": OOZIE_QA_REG_DIR, "OOZIE_QA_HADOOP_QUEUE": "default", "OOZIE_URL": str(oozie_server), "HIT_OOZIE": ((str(oozie_server)).split(":")[2]).split("/")[0], "LOCAL_CP": path, "HIT_HDFS_STORAGE_DIR": directory, "KRB5CCNAME": kerbTicket, "OOZIE_HDFS_LOG_DIR": OOZIE_HDFS_LOG_DIR }