Esempio n. 1
0
    def get_repo_base_url(cls):
        """

        :return: Get the location of the base url for the repo. If it runs on an OS that is not supported or on windows this will return None.
        """
        if not cls._repo_base_url:
            repo_file = cls.get_repo_file()
            if repo_file:
                # QE-16037: Make sure file is readable.
                Machine.chmod("755", repo_file, False, Machine.getAdminUser(),
                              None, Machine.getAdminPasswd())
                file = open(repo_file, 'r')
                content = file.read()
                file.close()

                if Machine.isCentOs() or Machine.isSuse():
                    m = re.search('^baseurl=(.*HDF.*)', content, re.MULTILINE)
                    if m and m.group(1):
                        cls._repo_base_url = m.group(1)
                elif Machine.isDebian() or Machine.isUbuntu():
                    m = re.search('^deb (.*) HDF main', content, re.MULTILINE)
                    if m and m.group(1):
                        cls._repo_base_url = m.group(1)

        return cls._repo_base_url
Esempio n. 2
0
    def collect_application_logs_for_failed_aborted_tests(cls):
        """
        Collect application logs for failed and aborted tests
        1) Read test_case_status.log to collected failed and aborted tests
        2) Gather Jobsummary log files for all RMs
        3) List down the applicationIds for failed and aborted tests
        4) Gather logs
        :return:
        """
        curr_component = util.get_TESTSUITE_COMPONENT()
        logger.info(curr_component)
        m = re.search(FALCON_STR, curr_component)
        isFalcon = bool(m)

        LocalJobSummaryLogs = cls.get_local_job_summary_logs(curr_component)
        logger.info(LocalJobSummaryLogs)

        testcase_map = cls.get_failed_aborted_test_with_timestamp()
        logger.info(testcase_map)

        if not Machine.pathExists(None, None, cls.LOCAL_TMP_APP_STORAGE, None):
            Machine.makedirs(Machine.getAdminUser(),
                             None, cls.LOCAL_TMP_APP_STORAGE,
                             Machine.getAdminPasswd())
            Machine.chmod("777", cls.LOCAL_TMP_APP_STORAGE, False,
                          Machine.getAdminUser(), None,
                          Machine.getAdminPasswd(), True)

        for testcase in testcase_map:
            testcase_data = testcase_map[testcase]
            cls.gather_application_log_from_JobSummary(
                LocalJobSummaryLogs,
                testcase_data["start_time"],
                testcase_data["end_time"],
                isFalcon=isFalcon)
Esempio n. 3
0
    def background_job_setup(cls, runSmokeTestSetup=True, config=None):
        '''
        Upload Data to HDFS before Upgrade starts
        Creates /user/hrt_qa/test_rollingupgrade dir on HDFS
        Upload 20 files to /user/hrt_qa/test_rollingupgrade
        '''
        if not cls._base_hdfs_dir:
            cls._base_hdfs_dir = '/user/%s/test_rollingupgrade' % Config.get(
                'hadoop', 'HADOOPQA_USER')
        exit_code, stdout = HDFS.createDirectory(cls._base_hdfs_dir,
                                                 force=True)
        ruAssert("HDFS", exit_code == 0,
                 '[BGJobSetup] could not create dir on hdfs.')
        LOCAL_WORK_DIR = os.path.join(Config.getEnv('ARTIFACTS_DIR'),
                                      'HDFS_RU_TEST')
        localTestWorkDir1 = os.path.join(LOCAL_WORK_DIR, "Temp_data")
        HadoopJobHelper.runCustomWordWriter(LOCAL_WORK_DIR, localTestWorkDir1,
                                            20, 40, 1000)
        HDFS.copyFromLocal(os.path.join(localTestWorkDir1, "*"),
                           cls._base_hdfs_dir)

        # set up for loadGenerator
        cls._lgTestDataDir = cls._base_hdfs_dir + '/testData'
        cls._lgTestOutputDir = cls._base_hdfs_dir + '/lg_job'
        cls._lgStructureDir = Machine.getTempDir() + "/structure"
        # test dir setup
        HDFS.deleteDirectory(cls._lgTestDataDir)
        HDFS.deleteDirectory(cls._lgTestOutputDir)
        command = "rm -rf " + cls._lgStructureDir
        exit_code, stdout = Machine.runas(Machine.getAdminUser(), command,
                                          None, None, None, "True",
                                          Machine.getAdminPasswd())
        command = "mkdir " + cls._lgStructureDir
        stdout = Machine.runas(None, command, None, None, None, "True", None)
        Machine.chmod("777", cls._lgStructureDir, "True",
                      Machine.getAdminUser(), None, Machine.getAdminPasswd())

        HADOOP_TEST_JAR = cls.get_hadoop_test_jar()
        TEST_USER = Config.get('hadoop', 'HADOOPQA_USER')
        # structure generator
        jobCmd = 'jar %s NNstructureGenerator -maxDepth 5 -minWidth 2 -maxWidth 5 -numOfFiles 100 -avgFileSize 3 -outDir %s' % (
            HADOOP_TEST_JAR, cls._lgStructureDir)
        exit_code, stdout = Hadoop.run(jobCmd)
        ruAssert("HDFS", exit_code == 0,
                 "[BGJobSetup] StructureGenerator failed")
        # data generator
        jobCmd = 'jar %s NNdataGenerator -inDir %s -root %s' % (
            HADOOP_TEST_JAR, cls._lgStructureDir, cls._lgTestDataDir)
        exit_code, stdout = Hadoop.run(jobCmd)
        ruAssert("HDFS", exit_code == 0, "[BGJobSetup] DataGenerator failed")

        if runSmokeTestSetup:
            logger.info("**** Running HDFS Smoke Test Setup ****")
            cls.smoke_test_setup()
Esempio n. 4
0
 def get_local_job_summary_logs(cls, component):
     """
     Copy Job_summary Logs to local dirs [artifacts/job_summary_local.log]
     parameter: component : Component name for which log collection is taking place
     return: List of Local copies of Job summary log
     Note: Some components need special handling where there are multiple Job Summary Log files
           such as HA and Falcon
     """
     LocalJobSummaryLogs = []
     try:
         if component == FALCON_STR:
             from beaver.component.falcon import Falcon  # pylint: disable=redefined-outer-name
             host1 = Falcon.get_cluster_1_masters()['rm']
             host2 = Falcon.get_cluster_2_masters()['rm']
             host3 = Falcon.get_cluster_3_masters()['rm']
             for host in [host1, host2, host3]:
                 JobSummaryLog = ComponentLogUtil.MAPRED_getJobSummaryLogFile(
                     host)
                 LocalJobSummaryLog = os.path.join(
                     Config.getEnv('ARTIFACTS_DIR'),
                     "jobsummary_" + host + ".log")
                 Machine.copyToLocal(None, host, JobSummaryLog,
                                     LocalJobSummaryLog, None)
                 if Machine.pathExists(None,
                                       None,
                                       LocalJobSummaryLog,
                                       passwd=None):
                     LocalJobSummaryLogs.append(LocalJobSummaryLog)
         else:
             for host in ComponentLogUtil.YARN_getRMHANodes():
                 JobSummaryLog = ComponentLogUtil.MAPRED_getJobSummaryLogFile(
                     host)
                 LocalJobSummaryLog = os.path.join(
                     Config.getEnv('ARTIFACTS_DIR'),
                     "jobsummary_" + host + ".log")
                 Machine.copyToLocal(Machine.getAdminUser(), host,
                                     JobSummaryLog, LocalJobSummaryLog,
                                     Machine.getAdminPasswd())
                 Machine.chmod("777",
                               LocalJobSummaryLog,
                               user=Machine.getAdminUser(),
                               passwd=Machine.getAdminPasswd())
                 if Machine.pathExists(Machine.getAdminUser(),
                                       None,
                                       LocalJobSummaryLog,
                                       passwd=Machine.getAdminPasswd()):
                     LocalJobSummaryLogs.append(LocalJobSummaryLog)
         return LocalJobSummaryLogs
     except Exception as e:
         logger.info("Exception occurs at job_summary_log collection %s", e)
         tb = traceback.format_exc()
         logger.info(tb)
         return LocalJobSummaryLogs
Esempio n. 5
0
 def create_Logs_directories(  # pylint: disable=redefined-builtin,unused-argument
         cls,
         logHost,
         logHostUser,
         logHostBasePath,
         passwd=None,
         logoutput=False,
         type="all"):
     '''
     Create log directories for log collection.
     type : Choose the type to create directory
            if type = "service-logs" , <logHostBasepath>/service-logs dir will be created on logHost
            if type = "app-logs", <logHostBasepath>/app-logs dir will be created on logHost
            if type == "test-logs", <logHostBasepath>/test-logs dir will be created on logHost
            if type == "artifacts", <logHostBasepath>/artifacts dir will be created on logHost
            if type == "test-logs", <logHostBasepath>/test-logs dir will be created on logHost
            if type == "jenkins-logs", <logHostBasepath>/jenkins-logs dir will be created on logHost
            if type == all, all of above directories will be created.
     Returns None.
     '''
     cls.get_log_aggregation_Dir_Locations()
     # create temp dir in gateway for app logs
     if not Machine.pathExists(None, logHost, cls.LOCAL_TMP_CLUSTER_DIR,
                               None):
         Machine.makedirs(Machine.getAdminUser(),
                          None, cls.LOCAL_TMP_CLUSTER_DIR,
                          Machine.getAdminPasswd())
         Machine.chmod("777", cls.LOCAL_TMP_CLUSTER_DIR, False,
                       Machine.getAdminUser(), None,
                       Machine.getAdminPasswd(), True)
     if not Machine.pathExists(None, None, cls.LOCAL_TMP_APP_STORAGE, None):
         Machine.makedirs(Machine.getAdminUser(),
                          None, cls.LOCAL_TMP_APP_STORAGE,
                          Machine.getAdminPasswd())
         Machine.chmod("777", cls.LOCAL_TMP_APP_STORAGE, False,
                       Machine.getAdminUser(), None,
                       Machine.getAdminPasswd(), True)
     cls.createDirInternal(logHost,
                           logHostUser,
                           logHostBasePath,
                           passwd,
                           logoutput=logoutput)
     # create base dirs in log server
     cls.createDirInternal(logHost,
                           logHostUser,
                           logHostBasePath + "/" + "artifacts",
                           passwd,
                           logoutput=logoutput)
Esempio n. 6
0
 def getConfigValue(cls, propertyValue, defaultValue=None):
     _localHiveConf = os.path.join(Config.getEnv('ARTIFACTS_DIR'), cls._localConfDir)
     if not os.path.exists(_localHiveConf):
         admin_user = Machine.getAdminUser()
         Machine.copyToLocal(admin_user, cls.getHiveHost(), cls._configDir, _localHiveConf)
         Machine.chmod('777', _localHiveConf, recursive=True, user=admin_user)
     if cls._modifiedConfigs.has_key(propertyValue):
         return cls._modifiedConfigs[propertyValue]
     value = util.getPropertyValueFromConfigXMLFile(
         os.path.join(_localHiveConf, 'hiveserver2-site.xml'), propertyValue
     )
     if value is None or value == '':
         value = util.getPropertyValueFromConfigXMLFile(
             os.path.join(_localHiveConf, 'hive-site.xml'), propertyValue, defaultValue=defaultValue
         )
     return value
Esempio n. 7
0
    def createLocalCSVDataFile(cls, numMB, filePath, fileName):
        """
         File will be created in test_IndexTable folder
         Max size = 4GB
        """
        cities = ['San Jose', 'San Francisco', 'Santa Clara']
        if numMB > 1024 * 4:
            logger.info(
                "--- Creating CSV data file of 4096 MB (max limit for this function), "
                "instead of requested %s MB file ---", numMB)
            numMB = 1024 * 4
        # if the file or the path does not exist, we create it
        if not Machine.pathExists(Machine.getAdminUser(), Machine.getfqdn(),
                                  filePath, Machine.getAdminPasswd()):
            Machine.makedirs(Machine.getAdminUser(), Machine.getfqdn(),
                             filePath, Machine.getAdminPasswd())

        if not Machine.pathExists(
                Machine.getAdminUser(), Machine.getfqdn(),
                os.path.join(os.getcwd(), filePath, fileName),
                Machine.getAdminPasswd()):
            Machine.create_file(numMB, os.path.join(os.getcwd(), filePath),
                                fileName)
            Machine.chmod('777',
                          os.path.join(filePath, fileName),
                          user=Machine.getAdminUser(),
                          host=Machine.getfqdn(),
                          passwd=Machine.getAdminPasswd())

        # We insert the data.
        insertFile = open(os.path.join(os.getcwd(), filePath, fileName), 'w')
        for j in range(numMB):
            query = ''
            for i in range(1, 11000):
                # 1MB more or less equals 10999 rows of these records.
                randInt = str(i + (j * 10999))
                firstName = 'first_name%s' % randInt
                secondName = 'last_name%s' % randInt
                city = random.choice(cities)
                query += "%s,%s,%s,%s\n" % (str(randInt), firstName,
                                            secondName, city)
            insertFile.write(query)
        insertFile.close()
Esempio n. 8
0
 def modifyConfig(cls,
                  changes,
                  nodeSelection,
                  isFirstUpdate=True,
                  host=None,
                  tmpConf=None):
     nodes = HBase.getSelectedNodes(nodeSelection, host)
     phoenix_conf = os.path.join(Config.get('phoenix', 'PHOENIX_HOME'),
                                 'bin')
     if tmpConf is None:
         tmp_conf = os.path.join(Machine.getTempDir(), 'phoenixConf')
     else:
         tmp_conf = tmpConf
     # We change the permissions for phoenix-home/bin
     Machine.chmod('777',
                   tmp_conf,
                   user=Machine.getAdminUser(),
                   host=host,
                   passwd=Machine.getAdminPasswd())
     configUtils.modifyConfig(changes, phoenix_conf, tmp_conf, nodes,
                              isFirstUpdate)
Esempio n. 9
0
    def run(self, randomwriter_bytes="10", local_dir_name="small_rw_jobs"):  # pylint: disable=unused-argument
        local_dir = os.path.join(Config.getEnv('ARTIFACTS_DIR'),
                                 self.local_dir_name)

        if not Machine.pathExists(None, None, local_dir, passwd=None):
            Machine.makedirs(None, None, local_dir, None)
            Machine.chmod("777", local_dir)

        while self.signal:
            input_dir = "rw_%d" % int(999999 * random.random())
            HDFS.deleteDirectory(input_dir)

            HadoopJobHelper.runRandomTextWriterJob(input_dir,
                                                   self.randomwriter_bytes,
                                                   bytesPerMap=1,
                                                   mapsPerHost=1,
                                                   jobArg="",
                                                   user=None,
                                                   config=None,
                                                   runInBackground=False,
                                                   redirect_file=os.path.join(
                                                       local_dir, input_dir))
Esempio n. 10
0
 def collect_application_log_for_Falcon_locally(cls, JobSummaryLogfile,
                                                appId, user):
     '''
     Collects application logs for Falcon component and save it in Local Dir with <appId>.log filename
     '''
     host = re.search("jobsummary_(.*).log", JobSummaryLogfile).group(1)
     if not Machine.pathExists(
             None, None, os.path.join(cls.LOCAL_TMP_APP_STORAGE, host),
             None):
         Machine.makedirs(None, None,
                          os.path.join(cls.LOCAL_TMP_APP_STORAGE, host),
                          None)
         Machine.chmod("777",
                       os.path.join(cls.LOCAL_TMP_APP_STORAGE, host),
                       recursive=True,
                       user=None,
                       host=None,
                       passwd=None,
                       logoutput=True)
     filename = os.path.join(cls.LOCAL_TMP_APP_STORAGE, host,
                             appId + ".log")
     try:
         from beaver.component.falcon import Falcon  # pylint: disable=redefined-outer-name
         Falcon.get_application_log(host,
                                    appId,
                                    appOwner=user,
                                    nodeAddress=None,
                                    containerId=None,
                                    logoutput=False,
                                    grepFilter=None,
                                    pipeToFileOutput=filename)
     except Exception:
         logger.error(
             "Exception occured during collect_application_log_for_Falcon_locally() call"
         )
         logger.error(traceback.format_exc())
         logger.info("Get application log for Falcon is broken")
Esempio n. 11
0
def modifyConfigRemote(changes, OriginalConfDir, ConfDir, nodes, id=None):  # pylint: disable=redefined-builtin
    '''
    Modifies hadoop config or config with similar structure.
    Returns None.

    Linux:
    1. Create tmpModifyConfDir_<time> in artifacts dir based on source config directory in gateway
    2. Modify contents in created directory.
    3. Copy the directory to /tmp/hadoopConf in target machines

    '''
    _backuploc = getBackupConfigLocation(id=id)
    tmpConfDir = os.path.join(
        ARTIFACTS_DIR,
        'tmpModifyConfDir_' + str(int(round(time.time() * 1000))))
    Config.set(PYTHON_CONFIG_NAME,
               TMP_CONF_DIR_VAR,
               tmpConfDir,
               overwrite=True)
    tmpConfDir = Config.get(PYTHON_CONFIG_NAME, TMP_CONF_DIR_VAR)
    for node in nodes:
        Machine.rm(Machine.getAdminUser(), node, ConfDir, isdir=True)
        Machine.rm(Machine.getAdminUser(),
                   Machine.getfqdn(),
                   tmpConfDir,
                   isdir=True)
        logger.info("*** COPY ORIGINAL CONFIGS FROM REMOTE TO LOCAL ***")
        Machine.copyToLocal(None, node, OriginalConfDir, tmpConfDir)
        #if node == Machine.getfqdn():
        #   Machine.copy(OriginalConfDir,tmpConfDir)
        for filename, values in changes.items():
            filepath = os.path.join(tmpConfDir, filename)
            if os.path.isfile(filepath):
                logger.info("Modifying file locally: %s", filepath)
                _fname, fext = os.path.splitext(filepath)
                if fext == ".xml":
                    util.writePropertiesToConfigXMLFile(
                        filepath, filepath, values)
                elif fext == ".json":
                    util.writePropertiesToConfigJSONFile(
                        filepath, filepath, values, ["global"],
                        "site.hbase-site.")
                elif fext == ".properties":
                    util.writePropertiesToFile(filepath, filepath, values)
                elif fext == ".cfg":
                    util.writePropertiesToFile(filepath, filepath, values)
                elif fext == ".conf":
                    util.writePropertiesToConfFile(filepath, filepath, values)
                elif fext == ".sh":
                    text = ""
                    for value in values:
                        text += "\n" + value
                    util.writeToFile(text, filepath, isAppend=True)
                elif fext == ".yaml":
                    text = ""
                    for k, v in values.iteritems():
                        text += k + " : " + v
                    util.writeToFile(text, filepath, isAppend=True)
                elif fext == ".cmd":
                    text = ""
                    for value in values:
                        text += "\n" + value
                    util.writeToFile(text, filepath, isAppend=True)
                elif fext is None or fext == "" or fext == ".include":
                    text = ""
                    isFirst = True
                    for value in values:
                        if isFirst:
                            text += value
                        else:
                            text += "\n" + value
                            isFirst = False
                    util.writeToFile(text, filepath, isAppend=True)
        logger.info("****** Copy back the configs to remote ******")
        #if node!=Machine.getfqdn():
        Machine.copyFromLocal(None, node, tmpConfDir, ConfDir)
        Machine.chmod('777', ConfDir, recursive=True, host=node)
Esempio n. 12
0
 def modifyTicketLifetime(cls, lifetime, isFirstUpdate=True):
     '''
     Restart KDC with new ticket_lifetime
     '''
     #Extending lifetime beyond 24 hours is currently supported for Redhat linux and Debian only
     logger.info("Modifying Keberos ticket lifetime")
     if Machine.isDebian() or Machine.isHumboldt():
         kdc_conf = '/etc/krb5kdc/kdc.conf'
         Machine.chmod(perm="777",
                       filepath="/etc/krb5kdc",
                       recursive=True,
                       user=Machine.getAdminUser(),
                       passwd=Machine.getAdminPasswd())
         with open(kdc_conf, "r+") as f:
             text = f.read()
             text = (re.sub(r"max_life = \S*", "max_life = %s" % lifetime,
                            text))
             f.seek(0)
             f.write(text)
             f.truncate()
             f.close()
     if Machine.isRedHat() or Machine.isCentOs():
         kdc_conf = '/var/kerberos/krb5kdc/kdc.conf'
         Machine.chmod(perm="777",
                       filepath=kdc_conf,
                       user=Machine.getAdminUser(),
                       passwd=Machine.getAdminPasswd())
         with open(kdc_conf, "r+") as f:
             text = f.read()
             if isFirstUpdate:
                 text = (re.sub(
                     "EXAMPLE.COM = {",
                     "EXAMPLE.COM = {\n  max_life = %s" % lifetime, text))
             else:
                 text = (re.sub(
                     r"EXAMPLE.COM = {\n  max_life = \S*",
                     "EXAMPLE.COM = {\n  max_life = %s" % lifetime, text))
             f.seek(0)
             f.write(text)
             f.truncate()
             f.close()
     kerberos_conf = '/etc/krb5.conf'
     Machine.chmod(perm="777",
                   filepath=kerberos_conf,
                   user=Machine.getAdminUser(),
                   passwd=Machine.getAdminPasswd())
     with open(kerberos_conf, "r+") as f:
         text = f.read()
         text = (re.sub(r"ticket_lifetime = \S*",
                        "ticket_lifetime = %s" % lifetime, text))
         f.seek(0)
         f.write(text)
         f.truncate()
         f.close()
     kadmin_local_loc = Machine.which("kadmin.local", "root")
     logger.info("kadmin_local_loc: %s", kadmin_local_loc)
     cmd = "echo 'modify_principal -maxlife %s %s@%s' | %s" % (
         lifetime, Config.getEnv('USER'),
         Config.get('machine', 'USER_REALM', ''), kadmin_local_loc)
     exit_code, stdout = Machine.runas(Machine.getAdminUser(),
                                       cmd,
                                       passwd=Machine.getAdminPasswd())
     if exit_code != 0:
         logger.info(
             "modify_principal failed with the following output:\n%s",
             stdout)
     cmd = "echo 'modify_principal -maxlife %s krbtgt/[email protected]' | %s" % (
         lifetime, kadmin_local_loc)
     exit_code, stdout = Machine.runas(Machine.getAdminUser(),
                                       cmd,
                                       passwd=Machine.getAdminPasswd())
     if exit_code != 0:
         logger.info(
             "modify_principal failed with the following output:\n%s",
             stdout)
     cmd = "service krb5kdc restart"
     exit_code, stdout = Machine.runas(Machine.getAdminUser(),
                                       cmd,
                                       passwd=Machine.getAdminPasswd())
     if exit_code != 0:
         logger.info("KDC failed to restart with the following output:\n%s",
                     stdout)