def updateJobProperties(cls, propFile, properties=None, haEnabled=False, debug=False): fileSystemName = Hadoop.getFSDefaultValue() jobTrackerIP = MAPRED.getJobtrackerAddress() jobTracker = jobTrackerIP[0] + ":" + jobTrackerIP[1] if not properties: properties = {} if not properties.has_key('nameNode'): properties['nameNode'] = fileSystemName if not properties.has_key('jobTracker'): properties['jobTracker'] = jobTracker if "hcatalog" in propFile: if Hadoop.isSecure(): kerberosPrincipal = Hive.getConfigValue( "hive.metastore.kerberos.principal") properties[ 'hive.metastore.kerberos.principal'] = kerberosPrincipal logger.info("Updating for hcatalog workflow") hcatNode = Hive.getConfigValue("hive.metastore.uris").replace( 'thrift', 'hcat') logger.info("Hcat node is " + hcatNode) properties['hcatNode'] = hcatNode if Hadoop.isSecure(): # determine the namenode and the jobtracker principal nnPrincipal = None if haEnabled: nnPrincipal = HDFS.getNameNodePrincipal().replace( '_HOST', HDFS.getNamenodeByState('active')) else: nnPrincipal = HDFS.getNameNodePrincipal().replace( '_HOST', HDFS.getNamenodeHttpAddress()[0]) jtPrincipal = MAPRED.getMasterPrincipal().replace( '_HOST', jobTrackerIP[0]) properties['dfs.namenode.kerberos.principal'] = nnPrincipal properties['mapreduce.jobtracker.kerberos.principal'] = jtPrincipal wfPath = util.getPropertyValueFromFile(propFile, "oozie.wf.application.path") if wfPath != None and wfPath.find("hdfs://localhost:9000") != -1: wfPath = wfPath.replace("hdfs://localhost:9000", fileSystemName) logger.info("Value of replaced oozie.wf.application.path is " + wfPath) properties['oozie.wf.application.path'] = wfPath util.writePropertiesToFile(propFile, propFile, properties) if debug: logger.info('Content of properties file %s' % propFile) f = open(propFile, 'r') # print the file to the console logger.info(f.read()) f.close()
def background_job_setup(cls, runSmokeTestSetup=True, config=None): ''' Setup for background long running job :param runSmokeTestSetup: Runs smoke test setup if set to true ''' from beaver.component.rollingupgrade.ruUpgrade import UpgradePerNode UpgradePerNode.reportProgress( "[INFO][FLUME][BGJobSetup] Long running job setup for Flume component started" ) from beaver.component.flume import Agent global agent1 global agent2 if not os.path.exists(cls._local_work_dir): os.mkdir(cls._local_work_dir) shutil.copy(cls._flume_datagen_src, cls._local_work_dir) agent1 = Agent(cls._local_work_dir) agent2 = Agent(cls._local_work_dir) for outdir in (cls._agent1_chkpt_dir, cls._agent1_data_dir, cls._agent2_chkpt_dir, cls._agent2_data_dir): os.mkdir(outdir) logger.info("Preparing the Flume configs for long running test") propertyMap = {} namenode = Hadoop.getFSDefaultValue() propertyMap['agent2.sinks.hdfsSink.hdfs.path'] = "%s%s" % ( namenode, cls._hdfs_test_dir) if Hadoop.isSecure(): if Config.hasOption('machine', 'USER_REALM'): user_realm = Config.get('machine', 'USER_REALM', '') else: nnKerbPrincipal = HDFS.getNameNodePrincipal(defaultValue='') atloc = nnKerbPrincipal.find("@") if atloc != -1: user_realm = nnKerbPrincipal[atloc:] if user_realm: propertyMap[ 'agent2.sinks.hdfsSink.hdfs.kerberosPrincipal'] = cls._test_user + '@' + user_realm propertyMap[ 'agent2.sinks.hdfsSink.hdfs.kerberosKeytab'] = Machine.getHeadlessUserKeytab( cls._test_user) util.writePropertiesToFile( os.path.join(cls._flume_test_conf, 'longrunning.properties'), cls._flume_test_src, propertyMap)
def read_job_properties(cls, local_work_dir): propChanges = {} COMPONENT = Config.get('ambari', 'COMPONENT') if 'deploytest' in COMPONENT: propChanges['INSTALL_HDP'] = "true" propChanges['MR_FRAMEWORK'] = Config.get('ambari', 'MR_FRAMEWORK') propChanges['HOST'] = Config.get('ambari', 'HOST') + ":8080" propChanges['CLUSTER_NAME'] = Config.get('ambari', 'CLUSTER_NAME') propChanges['STACK'] = Config.get('ambari', 'STACK') propChanges['UMASK'] = Config.get('ambari', 'UMASK') propChanges['AMBARI_CUSTOM_USER'] = Config.get('ambari', 'AMBARI_CUSTOM_USER') propChanges['AMBARI_AGENT_USER'] = Config.get('ambari', 'AMBARI_AGENT_USER') propChanges['PWD_ENCRYPT'] = Config.get('ambari', 'PWD_ENCRYPT') propChanges['CUSTOMIZED_SERVICES_USERS'] = Config.get('ambari', 'CUSTOMIZED_SERVICES_USERS') propChanges['CUSTOM_PIDS'] = Config.get('ambari', 'CUSTOM_PIDS') propChanges['AMBARI_2WAY_SSL'] = Config.get('ambari', 'AMBARI_2WAY_SSL') propChanges['AMBARI_SERVER_HTTPS'] = Config.get('ambari', 'AMBARI_SERVER_HTTPS') propChanges['IS_TMP_NOEXEC'] = Config.get('ambari', 'IS_TMP_NOEXEC') propChanges['NUMBER_OF_BASE_HOSTS'] = Config.get('ambari', 'NUMBER_OF_BASE_HOSTS') propChanges['DN_NONROOT'] = Config.get('ambari', 'DN_NONROOT') propChanges['MOTD_ENABLE'] = Config.get('ambari', 'MOTD_ENABLE') propChanges['HDP_REPO_BASEURL'] = Config.get('ambari', 'HDP_REPO_BASEURL') propChanges['HDP_UTILS_REPO_BASEURL'] = Config.get('ambari', 'HDP_UTILS_REPO_BASEURL') propChanges['AMBARI_DB'] = Config.get('ambari', 'AMBARI_DB') propChanges['JDK_VERSION'] = Config.get('ambari', 'JDK_VERSION') propChanges['IS_SECURE'] = Config.get('machine', 'IS_SECURE') propChanges['CLIENT'] = Config.get('ambari', 'CLIENT') propChanges['CLIENT_OS'] = Config.get('ambari', 'CLIENT_OS') propChanges['CLIENT_PORT'] = Config.get('ambari', 'CLIENT_PORT') propChanges['UPGRADE_TO'] = Config.get('ambari', 'UPGRADE_TO') propChanges['STACK_UPGRADE_TO'] = Config.get('ambari', 'STACK_UPGRADE_TO') propChanges['VIDEO_RECORDING'] = Config.get('ambari', 'VIDEO_RECORDING') propChanges['BROWSER'] = Config.get('ambari', 'BROWSER') propChanges['AMBARI_RPM_URL'] = Config.get('ambari', 'AMBARI_VERSION') propChanges['HIVE_DB'] = Config.get('ambari', 'DATABASE_FLAVOR') propChanges['DRUID_DB'] = Config.get('ambari', 'DATABASE_FLAVOR') propChanges['BEACON_DB'] = Config.get('ambari', 'DATABASE_FLAVOR') propChanges['OOZIE_DB'] = Config.get('ambari', 'DATABASE_FLAVOR') propChanges['XA_DATABASE_FLAVOR'] = Config.get('ambari', 'XA_DATABASE_FLAVOR') propChanges['IS_HA_TEST'] = Config.get('ambari', 'IS_HA_TEST') propChanges['ENABLE_HA_COMPONENTS'] = Config.get('ambari', 'ENABLE_HA_COMPONENTS') propChanges['USE_BLUEPRINT'] = Config.get('ambari', 'USE_BLUEPRINT') propChanges['USER_KERBEROS_SERVER_TYPE'] = Config.get('ambari', 'USER_KERBEROS_SERVER_TYPE') propChanges['KERBEROS_SERVER_TYPE'] = Config.get('ambari', 'KERBEROS_SERVER_TYPE') propChanges['REALM'] = Config.get('ambari', 'REALM') propChanges['USER_REALM'] = Config.get('ambari', 'USER_REALM') propChanges['AD_SERVER_HOST'] = Config.get('ambari', 'AD_SERVER_HOST') propChanges['WIRE_ENCRYPTION'] = Config.get('ambari', 'WIRE_ENCRYPTION') propChanges['SPLIT_NUM'] = Config.get('ambari', 'SPLIT_NUM') propChanges['AMBARI_TESTSNAMES'] = Config.get('ambari', 'AMBARI_TESTSNAMES') propChanges['RUN_MARKER_LIST'] = Config.get('ambari', 'RUN_MARKER_LIST') propChanges['RUN_MARKER_VERSION'] = Config.get('ambari', 'RUN_MARKER_VERSION') propChanges['STACK_TYPE'] = Config.get('ambari', 'STACK_TYPE') propChanges['HDF_REPO_BASEURL'] = Config.get('ambari', 'HDF_REPO_BASEURL') propChanges['MANAGEMENT_PACK_LINK'] = Config.get('ambari', 'MANAGEMENT_PACK_LINK') propChanges['STREAMLINE_DB'] = Config.get('ambari', 'STREAMLINE_DB') propChanges['REGISTRY_DB'] = Config.get('ambari', 'REGISTRY_DB') propChanges['USE_BAKED_IMAGE'] = Config.get('ambari', 'USE_BAKED_IMAGE') # QE-18454 propChanges['INSTALL_MR'] = cls.convert_environment_variable_to_bool("INSTALL_MR") propChanges['INSTALL_ZOOKEEPER'] = cls.convert_environment_variable_to_bool("INSTALL_ZOOKEEPER") propChanges['INSTALL_AMS'] = cls.convert_environment_variable_to_bool("INSTALL_AMS") propChanges['INSTALL_KMS'] = cls.convert_environment_variable_to_bool("INSTALL_KMS") propChanges['INSTALL_NFSGATEWAYS'] = cls.convert_environment_variable_to_bool("INSTALL_NFSGATEWAYS") propChanges['INSTALL_HS_INTERACTIVE'] = cls.convert_environment_variable_to_bool("INSTALL_HS_INTERACTIVE") propChanges['INSTALL_LOGSEARCH'] = cls.convert_environment_variable_to_bool("INSTALL_LOGSEARCH") propChanges['INSTALL_FALCON'] = cls.convert_environment_variable_to_bool("INSTALL_FALCON") propChanges['INSTALL_STREAMLINE'] = cls.convert_environment_variable_to_bool("INSTALL_STREAMLINE") propChanges['INSTALL_SPARK'] = cls.convert_environment_variable_to_bool("INSTALL_SPARK") propChanges['INSTALL_PDSH'] = cls.convert_environment_variable_to_bool("INSTALL_PDSH") propChanges['INSTALL_PIG'] = cls.convert_environment_variable_to_bool("INSTALL_PIG") propChanges['INSTALL_HBASE'] = cls.convert_environment_variable_to_bool("INSTALL_HBASE") propChanges['INSTALL_TEMPLETON'] = cls.convert_environment_variable_to_bool("INSTALL_TEMPLETON") propChanges['INSTALL_OOZIE'] = cls.convert_environment_variable_to_bool("INSTALL_OOZIE") propChanges['INSTALL_HIVE'] = cls.convert_environment_variable_to_bool("INSTALL_HIVE") propChanges['INSTALL_HCAT'] = cls.convert_environment_variable_to_bool("INSTALL_HCAT") propChanges['INSTALL_SQOOP'] = cls.convert_environment_variable_to_bool("INSTALL_SQOOP") propChanges['INSTALL_SQOOP2'] = cls.convert_environment_variable_to_bool("INSTALL_SQOOP2") propChanges['INSTALL_FLUME'] = cls.convert_environment_variable_to_bool("INSTALL_FLUME") propChanges['INSTALL_STORM'] = cls.convert_environment_variable_to_bool("INSTALL_STORM") propChanges['INSTALL_KNOX'] = cls.convert_environment_variable_to_bool("INSTALL_KNOX") propChanges['INSTALL_KAFKA'] = cls.convert_environment_variable_to_bool("INSTALL_KAFKA") propChanges['INSTALL_HBASE_APP'] = cls.convert_environment_variable_to_bool("INSTALL_HBASE_APP") propChanges['INSTALL_STORM_APP'] = cls.convert_environment_variable_to_bool("INSTALL_STORM_APP") propChanges['INSTALL_ACCUMULO_APP'] = cls.convert_environment_variable_to_bool("NSTALL_ACCUMULO_APP") propChanges['INSTALL_PHOENIX_APP'] = cls.convert_environment_variable_to_bool("INSTALL_PHOENIX_APP") propChanges['INSTALL_MAHOUT'] = cls.convert_environment_variable_to_bool("INSTALL_MAHOUT") propChanges['INSTALL_SMARTSENSE'] = cls.convert_environment_variable_to_bool("INSTALL_SMARTSENSE") propChanges['INSTALL_PHOENIX'] = cls.convert_environment_variable_to_bool("INSTALL_PHOENIX") propChanges['INSTALL_RANGER'] = cls.convert_environment_variable_to_bool("INSTALL_XASECURE") propChanges['INSTALL_SLIDER'] = cls.convert_environment_variable_to_bool("INSTALL_SLIDER") propChanges['INSTALL_ACCUMULO'] = cls.convert_environment_variable_to_bool("INSTALL_ACCUMULO") propChanges['INSTALL_HUE'] = cls.convert_environment_variable_to_bool("INSTALL_HUE") propChanges['INSTALL_ATLAS'] = cls.convert_environment_variable_to_bool("INSTALL_ATLAS") propChanges['INSTALL_HDF'] = cls.convert_environment_variable_to_bool("INSTALL_HDF") propChanges['INSTALL_NIFI'] = cls.convert_environment_variable_to_bool("INSTALL_NIFI") propChanges['INSTALL_AMBARI_INFRA'] = cls.convert_environment_variable_to_bool("INSTALL_AMBARI_INFRA") propChanges['INSTALL_JNBG'] = cls.convert_environment_variable_to_bool("INSTALL_JNBG") propChanges['INSTALL_R4ML'] = cls.convert_environment_variable_to_bool("INSTALL_R4ML") propChanges['INSTALL_TITAN'] = cls.convert_environment_variable_to_bool("INSTALL_TITAN") propChanges['INSTALL_SOLR'] = cls.convert_environment_variable_to_bool("INSTALL_SOLR") propChanges['INSTALL_REGISTRY'] = cls.convert_environment_variable_to_bool("INSTALL_REGISTRY") propChanges['INSTALL_ZEPPELIN'] = cls.convert_environment_variable_to_bool("INSTALL_ZEPPELIN") propChanges['INSTALL_SPARK2'] = cls.convert_environment_variable_to_bool("INSTALL_SPARK2") propChanges['INSTALL_DRUID'] = cls.convert_environment_variable_to_bool("INSTALL_DRUID") propChanges['INSTALL_BEACON'] = cls.convert_environment_variable_to_bool("INSTALL_BEACON") propChanges['MR_FRAMEWORK'] = cls.convert_environment_variable_to_bool("MR_FRAMEWORK") propChanges['INSTALL_LZO'] = cls.convert_environment_variable_to_bool("INSTALL_LZO") propChanges['ENABLE_KNOX_SSO'] = cls.convert_environment_variable_to_bool("ENABLE_KNOX_SSO") # QE-19395 logger.info("the prop changes" + str(propChanges)) ADDITIONAL_AMBARI_PROPS = Config.get('ambari', 'ADDITIONAL_AMBARI_PROPS') if ADDITIONAL_AMBARI_PROPS: parameter_map = ADDITIONAL_AMBARI_PROPS.split(",") for parameter in parameter_map: key_value = parameter.split("=") key = key_value[0] value = key_value[1] print "Reading key :%s = value :%s" % (key, value) propChanges[key] = value if 'sanity' in COMPONENT and not 'sanity-preupgrade' in COMPONENT or 'postupg-sec-enable' in COMPONENT: stack_upgrade_to = Config.get('ambari', 'STACK_UPGRADE_TO') if stack_upgrade_to is not None and len(stack_upgrade_to) > 0: propChanges['STACK'] = Config.get('ambari', 'STACK_UPGRADE_TO') if not propChanges['UPGRADE_TO'] is None: logger.info("Check value " + propChanges['UPGRADE_TO']) # If the gateway is not deployed, change /root/hosts to have only those hosts that are to be part of gateway cluster if CommonLib.update_hosts_for_deployed_clusters(propChanges): propChanges['INSTALL_HDP'] = "true" util.writePropertiesToFile( os.path.join(local_work_dir, 'ambari.properties'), os.path.join(local_work_dir, 'ambari.properties'), propChanges ) log_prop_changes = {} log_prop_changes['log4j.appender.UIFRM.File'] = os.path.join(local_work_dir, "uifrm.log") util.writePropertiesToFile( os.path.join(local_work_dir, 'log4j.properties'), os.path.join(local_work_dir, 'log4j.properties'), log_prop_changes ) return propChanges
def perform_express_upgrade(self): COMPONENT = str(self.COMPONENT) STACK_TYPE = str(self.STACK_TYPE) env = {} env['DISPLAY'] = self.DISPLAY # Update pom.xml of uifrm with Markers - applies for tests that involve a combination of API and UI tests run within the same split ambariutils.update_pom_xml_with_markers(self.LOCAL_WORK_DIR) SRC_DIR = os.path.join(Config.getEnv('WORKSPACE'), 'apitestframework') print "SRC_DIR = ", SRC_DIR # Change the current directory for api test code LOCAL_WORK_DIR = os.path.join(Config.getEnv('ARTIFACTS_DIR'), COMPONENT, 'apitestframework') # Copy the ambari api test code to artifacts dir shutil.copytree(SRC_DIR, LOCAL_WORK_DIR) # Change the permission of openstack-keypairfile os.chmod(os.path.join(LOCAL_WORK_DIR, 'src/resources/openstack-keypair'), 0400) # populate the config properties file for api test code configPropChanges = ambariutils.updateProperties() # Change the current directory for api test code LOCAL_WORK_DIR = os.path.join( Config.getEnv('ARTIFACTS_DIR'), COMPONENT, 'apitestframework', 'src', 'resources' ) util.writePropertiesToFile( os.path.join(LOCAL_WORK_DIR, 'config.properties'), os.path.join(LOCAL_WORK_DIR, 'config.properties'), configPropChanges ) # Change the current directory for api test code LOCAL_WORK_DIR = os.path.join(Config.getEnv('ARTIFACTS_DIR'), COMPONENT, 'apitestframework') testSuiteLocation = "src/test/suites" # Check what is the STACK_TYPE. Based on the stack type we will decide which test suite to select logger.info( "STACK TYPE is " + STACK_TYPE + ". So going into " + STACK_TYPE + " if block for test suite " "selection" ) if STACK_TYPE == "HDF": # Setup for HDF EU. We can now add an else method to handle ru if 'ambarieu-hdf-downgrade' in COMPONENT: testSuiteFile = "ambarieu-hdf-downgrade.suite" elif 'ambarieu-hdf' in COMPONENT: testSuiteFile = "ambarieu-hdf.suite" logger.info("Setting HDF testsuite file to " + testSuiteFile) elif STACK_TYPE == "HDP" and "ambarieu-hdp-hdf" in COMPONENT: testSuiteFile = "ambarieu-hdp-hdf.suite" else: if 'e2e' not in COMPONENT: # These are system tests so use suite filename sent from json file testSuiteFile = COMPONENT + ".suite" else: if 'dwngd' in COMPONENT: # For EU E2E downgrade runs testSuiteFile = "ambari-expressupgrade-downgrade-e2e.suite" elif 'mlevel' in COMPONENT: # For EU E2E multi-level paths runs testSuiteFile = "ambari-expressupgrade-mlevel-e2e.suite" elif 'denygpl' in COMPONENT: # For Deny GPL tests in EU E2E testSuiteFile = "ambarieu-denygpl.suite" elif 'wkflow' in COMPONENT: # For EU E2E workflow suite runs testSuiteFile = "ambari-expressupgrade-wkflow-e2e.suite" elif 'iopmigration' in COMPONENT: # Added for IOP migration E2E tests testSuiteFile = "ambari-iopmigration-e2e.suite" elif 'iopintg' in COMPONENT: # Added for IOP integration tests testSuiteFile = "ambarieu-iopintg-e2e.suite" elif 'patchupgradeintg-ru' in COMPONENT: # Added for RU PU integration tests testSuiteFile = "ambarieu-patchupgradeintg-ru-e2e.suite" elif 'patchupgradeintg-thirddigit' in COMPONENT: # Added for EU PU 3rd digit integration tests testSuiteFile = "ambarieu-patchupgradeintg-thirddigit-e2e.suite" elif 'patchupgradeintg-revert' in COMPONENT: # Added for EU PU integration tests with revert testSuiteFile = "ambarieu-patchupgradeintg-revert-e2e.suite" elif 'patchupgradeintg' in COMPONENT: # Added for EU PU integration tests testSuiteFile = "ambarieu-patchupgradeintg-e2e.suite" elif 'experiment' in COMPONENT: # Added for full EU integration tests testSuiteFile = "ambari-expressupgrade-experiment-e2e.suite" else: # Default ofr EU E2E runs testSuiteFile = "ambari-expressupgrade-upgrade-e2e.suite" logger.info("Opening test suite file : " + testSuiteFile + " for test execution") file = open(os.path.join(LOCAL_WORK_DIR, testSuiteLocation, testSuiteFile)) testSuite = json.load(file) file.close() # magic word to use as key in suite file magic = "split" + self.splitNumStr print "magic key is : ", magic if 'experiment' in COMPONENT or 'patchupgradeintg' in COMPONENT or 'iopintg' in COMPONENT: magic = "split1" print "magic key for experiment EU/PU run is : ", magic # Update pom.xml for API framework with Markers info ambariutils.update_pom_xml_with_markers(LOCAL_WORK_DIR) upgrade_test_results = {} # Iterate over the list of all test classes in the split and execute them logger.info("=====Starting Express Upgrade tests=========") if testSuite.has_key(magic): for testName in testSuite[magic]: if not ambariutils.isTestClassPresent(testName, LOCAL_WORK_DIR): LOCAL_WORK_DIR = ambariutils.switchDirectory(LOCAL_WORK_DIR, COMPONENT) logger.info('LOCAL_WORK_DIR %s ', LOCAL_WORK_DIR) logger.info('================Running %s with maven===============' % (testName)) self.LOCAL_WORK_DIR = LOCAL_WORK_DIR exit_code, stdout = self.Maven2runas( ' -Dtest=%s -DfailIfNoTests=false test' % testName, cwd=self.LOCAL_WORK_DIR, env=env, user='******' ) UpgradeLogger.reportProgress('================Finished %s ========================' % (testName), True) logger.info('Exit code of the test: %s ' % (exit_code)) if exit_code != 0: upgrade_test_results.update(UpgradeLogger.get_stack_trace(testName, LOCAL_WORK_DIR)) UpgradeLogger.reportProgress("Test failure encountered: %s" % (testName), False) # Do not run any further tests if Upgrade itself has failed if self.is_upgrade_executed(): if not self.is_stack_upgrade_success(): UpgradeLogger.reportProgress('Express Upgrade failed, aborting rest of the tests', False) break else: UpgradeLogger.reportProgress( 'Error(s) in steps before starting Upgrade, aborting rest of the tests', False ) break else: print "================Not correct test suite format========================" if len(upgrade_test_results) > 0: UpgradeLogger.reportProgress("=====List of failed test(s)=====\n", False) for key, value in upgrade_test_results.items(): UpgradeLogger.reportProgress(key, False) UpgradeLogger.reportProgress("=====Error details for failed test(s)=====", False) for key, value in upgrade_test_results.items(): UpgradeLogger.reportProgress("Test:%s AND Failure details:\n %s" % (key, value), False) UpgradeLogger.reportProgress("=======================================================", True) else: UpgradeLogger.reportProgress("=====Express Upgrade test(s) completed successfully=========", True) # Gather reports for tests executed from apitestframework dir LOCAL_WORK_DIR = os.path.join(Config.getEnv('ARTIFACTS_DIR'), COMPONENT, 'apitestframework') Machine.runas('root', 'chmod -R 777 ' + LOCAL_WORK_DIR) uifrmReportDirectory = os.path.join(LOCAL_WORK_DIR, '..', 'target/surefire-reports') if not os.path.exists(uifrmReportDirectory): Machine.runas('root', 'mkdir -p ' + uifrmReportDirectory) Machine.runas('root', 'chmod -R 777 ' + uifrmReportDirectory) logger.info('Created path for reporting') Machine.runas('root', 'chmod -R 777 ' + os.path.join(LOCAL_WORK_DIR, '..', 'target')) apiReportDirectory = os.path.join(LOCAL_WORK_DIR, 'target', 'surefire-reports', 'junitreports') if os.path.exists(apiReportDirectory): files = os.listdir(os.path.join(LOCAL_WORK_DIR, 'target', 'surefire-reports', 'junitreports')) for file in files: shutil.copy( os.path.join(LOCAL_WORK_DIR, 'target', 'surefire-reports', 'junitreports', file), os.path.join(LOCAL_WORK_DIR, '..', 'target', 'surefire-reports') ) # Switch back to uifrm dir for reporting purposes LOCAL_WORK_DIR = os.path.join(Config.getEnv('ARTIFACTS_DIR'), COMPONENT) self.LOCAL_WORK_DIR = LOCAL_WORK_DIR
def modifyConfig( # pylint: disable=redefined-builtin changes, confDir, updatedConfDir, nodes, isFirstUpdate=True, makeCurrConfBackupInWindows=True, id=None): ''' Modifies hadoop config or config with similar structure. Returns None. Linux: 1. Create tmpModifyConfDir_<time> in artifacts dir based on source config directory in gateway 2. Modify contents in created directory. 3. Copy the directory to /tmp/hadoopConf in target machines Windows: 1. If makeCurrConfBackupInWindows is True, backup current config first. Copy current config to artifacts/HDPStackBackupConfig 2. Create tmpModifyConfDir_<time> in gateway. 3. Modify contents in created directory. 4. Copy the directory to target machines. Replace config in default locations in remote machines. Calling modifyConfig twice, changes will be cumulative. ''' backuploc = getBackupConfigLocation(id=id) if Machine.type() == 'Windows' and makeCurrConfBackupInWindows: # clean up the backup Machine.rm(None, Machine.getfqdn(), backuploc, isdir=True, passwd=None) util.copyReadableFilesFromDir(confDir, backuploc) if isFirstUpdate: tmpConfDir = os.path.join( ARTIFACTS_DIR, 'tmpModifyConfDir_' + str(int(round(time.time() * 1000)))) Config.set(PYTHON_CONFIG_NAME, TMP_CONF_DIR_VAR, tmpConfDir, overwrite=True) tmpConfDir = Config.get(PYTHON_CONFIG_NAME, TMP_CONF_DIR_VAR) if isFirstUpdate: util.copyReadableFilesFromDir(confDir, tmpConfDir) for filename, values in changes.items(): filepath = os.path.join(tmpConfDir, filename) if os.path.isfile(filepath): logger.info("Modifying file: %s", filepath) _fname, fext = os.path.splitext(filepath) if fext == ".xml": util.writePropertiesToConfigXMLFile(filepath, filepath, values) elif fext == ".json": util.writePropertiesToConfigJSONFile(filepath, filepath, values, ["global"], "site.hbase-site.") elif fext == ".properties": util.writePropertiesToFile(filepath, filepath, values) elif fext == ".cfg": util.writePropertiesToFile(filepath, filepath, values) elif fext == ".conf": util.writePropertiesToConfFile(filepath, filepath, values) elif fext == ".ini": # 'shiro.ini : {'section:prop' : 'val} util.writePropertiesToIniFile(filepath, filepath, values) elif fext == ".sh": text = "" for value in values: text += "\n" + value util.writeToFile(text, filepath, isAppend=True) elif fext == ".yaml": text = "" for k, v in values.iteritems(): text += k + " : " + v util.writeToFile(text, filepath, isAppend=True) elif fext == ".cmd": text = "" for value in values: text += "\n" + value util.writeToFile(text, filepath, isAppend=True) elif fext is None or fext == "" or fext == ".include": text = "" isFirst = True for value in values: if isFirst: text += value else: text += "\n" + value isFirst = False util.writeToFile(text, filepath, isAppend=True) # in windows world copy the configs back to the src location if Machine.type() == 'Windows': for node in nodes: for filename in changes.keys(): Machine.copyFromLocal(None, node, os.path.join(tmpConfDir, filename), os.path.join(confDir, filename), passwd=None) else: for node in nodes: Machine.rm(user=Machine.getAdminUser(), host=node, filepath=updatedConfDir, isdir=True, passwd=Machine.getAdminPasswd()) Machine.copyFromLocal(None, node, tmpConfDir, updatedConfDir)
def modifyConfigRemote(changes, OriginalConfDir, ConfDir, nodes, id=None): # pylint: disable=redefined-builtin ''' Modifies hadoop config or config with similar structure. Returns None. Linux: 1. Create tmpModifyConfDir_<time> in artifacts dir based on source config directory in gateway 2. Modify contents in created directory. 3. Copy the directory to /tmp/hadoopConf in target machines ''' _backuploc = getBackupConfigLocation(id=id) tmpConfDir = os.path.join( ARTIFACTS_DIR, 'tmpModifyConfDir_' + str(int(round(time.time() * 1000)))) Config.set(PYTHON_CONFIG_NAME, TMP_CONF_DIR_VAR, tmpConfDir, overwrite=True) tmpConfDir = Config.get(PYTHON_CONFIG_NAME, TMP_CONF_DIR_VAR) for node in nodes: Machine.rm(Machine.getAdminUser(), node, ConfDir, isdir=True) Machine.rm(Machine.getAdminUser(), Machine.getfqdn(), tmpConfDir, isdir=True) logger.info("*** COPY ORIGINAL CONFIGS FROM REMOTE TO LOCAL ***") Machine.copyToLocal(None, node, OriginalConfDir, tmpConfDir) #if node == Machine.getfqdn(): # Machine.copy(OriginalConfDir,tmpConfDir) for filename, values in changes.items(): filepath = os.path.join(tmpConfDir, filename) if os.path.isfile(filepath): logger.info("Modifying file locally: %s", filepath) _fname, fext = os.path.splitext(filepath) if fext == ".xml": util.writePropertiesToConfigXMLFile( filepath, filepath, values) elif fext == ".json": util.writePropertiesToConfigJSONFile( filepath, filepath, values, ["global"], "site.hbase-site.") elif fext == ".properties": util.writePropertiesToFile(filepath, filepath, values) elif fext == ".cfg": util.writePropertiesToFile(filepath, filepath, values) elif fext == ".conf": util.writePropertiesToConfFile(filepath, filepath, values) elif fext == ".sh": text = "" for value in values: text += "\n" + value util.writeToFile(text, filepath, isAppend=True) elif fext == ".yaml": text = "" for k, v in values.iteritems(): text += k + " : " + v util.writeToFile(text, filepath, isAppend=True) elif fext == ".cmd": text = "" for value in values: text += "\n" + value util.writeToFile(text, filepath, isAppend=True) elif fext is None or fext == "" or fext == ".include": text = "" isFirst = True for value in values: if isFirst: text += value else: text += "\n" + value isFirst = False util.writeToFile(text, filepath, isAppend=True) logger.info("****** Copy back the configs to remote ******") #if node!=Machine.getfqdn(): Machine.copyFromLocal(None, node, tmpConfDir, ConfDir) Machine.chmod('777', ConfDir, recursive=True, host=node)