Beispiel #1
0
def find(basedir, matchstr, isDirectory=False, isRecursive=True):
    if isRecursive:
        matches = util.findMatchingFiles(basedir, matchstr, isDirectory)
    else:
        matches = util.findMatchingFiles(basedir, matchstr, isDirectory, 1)
    if matches:
        return matches[0]
    return ""
Beispiel #2
0
def getClasspathForJdbcClient():
    jdbc_standalone_jar = getStandaloneHiveJdbcJar()
    assert jdbc_standalone_jar, "No JDBC standalone jar found"
    classpath = [jdbc_standalone_jar]
    hadoop_home = Config.get('hadoop', 'HADOOP_HOME')
    if Hadoop.isHadoop2() and Machine.type() == 'Windows':
        hadoop_home = os.path.join(hadoop_home, "share", "hadoop", "common")
    hadoop_common_jar = util.findMatchingFiles(hadoop_home, "hadoop-common-*[!(tests)].jar", depth=1)
    assert len(hadoop_common_jar) > 0, "No hadoop-common.jar found"
    classpath.append(hadoop_common_jar[0])
    if Hadoop.isSecure():
        hadoop_auth_jar = util.findMatchingFiles(hadoop_home, "hadoop-auth-*[!(tests)].jar", depth=1)
        assert len(hadoop_auth_jar) > 0, "No hadoop-auth.jar found"
        classpath.append(hadoop_auth_jar[0])
    classpath.append(Config.get('hadoop', 'HADOOP_CONF'))
    return (os.pathsep).join(classpath)
Beispiel #3
0
 def getVersion(cls):
     #  determine the hive version from the jar that is deployed
     jarDir = os.path.join(cls._hcat_home, 'share', 'hcatalog')
     files = util.findMatchingFiles(jarDir, "*hcatalog-core-*.jar")
     p = re.compile('hcatalog-core-(\S+).jar')
     m = p.search(files[0])
     if m:
         return m.group(1)
     else:
         return ""
Beispiel #4
0
 def getVersion(cls):
     # determine the hive version from the jar that is deployed
     jarDir = os.path.join(cls.getHiveHome(), 'lib')
     files = util.findMatchingFiles(jarDir, "hive-metastore-*.jar")
     p = re.compile('hive-metastore-(\S+).jar')
     m = p.search(files[0])
     if m:
         return m.group(1)
     else:
         return ""
Beispiel #5
0
    def get_stack_trace(cls, test_name, current_dir):
        apiReportDirectory = os.path.join(current_dir, 'target', 'surefire-reports', 'junitreports')
        uifrmReportDirectory = os.path.join(current_dir, 'target', 'surefire-reports')

        pattern = '*' + UpgradeLogger.get_testclass_name(test_name) + '*.xml'

        testResultFiles = util.findMatchingFiles(
            apiReportDirectory, pattern
        )  # First search for test result xml in api framework dir
        if testResultFiles.__len__() == 0:
            testResultFiles = util.findMatchingFiles(
                uifrmReportDirectory, pattern
            )  # Now search for test result xml in uifrm dir
            Machine.runas(
                'root', 'chmod -R 755 ' + uifrmReportDirectory
            )  # Provide read permissions to everyone in uifrm junit report xml directory
        else:
            Machine.runas(
                'root', 'chmod -R 755 ' + apiReportDirectory
            )  # Provide read permissions to everyone in api framework report xml directory

        testresult = {}
        for resultFile in testResultFiles:
            testresult.update(util.parseJUnitXMLResult(resultFile))

        testOutput = {}

        for key, value in testresult.items():
            m = re.search("([^\.]*)$", key)  # Extract test name
            print "key : %s " % key
            print "value : %s " % value
            key = m.group(0)
            fail_message = value['failure']
            print "Final key: %s" % key
            print "Final fail_msg: %s" % fail_message

            if fail_message:
                testOutput[str(key)] = fail_message

        print testOutput

        return testOutput
Beispiel #6
0
    def getdnjars(cls):
        # Workaround BUG-58287 org.datanucleus.api.jdo.JDOPersistenceManagerFactory
        spark_lib_dir = os.path.join(Spark.getSparkHome(), "lib")
        dn_jars = util.findMatchingFiles(spark_lib_dir, "datanucleus*.jar")
        jars = ''
        for jar in dn_jars:
            jars = jar + "," + jars

        # remove the last "," in the list
        jars = jars[:-1]
        return jars
Beispiel #7
0
    def gatherTestResults(self):
        logger.info("=====Gathering results=========")
        testresult = {}
        global nonPyTestReportList
        Machine.runas(
            'root',
            'chmod -R 755 ' + os.path.join(self.LOCAL_WORK_DIR, 'target'))
        # get a list of all the test result files
        testResultFiles = util.findMatchingFiles(
            os.path.join(self.LOCAL_WORK_DIR, 'target', 'surefire-reports'),
            'TEST-*.xml')
        #print "Test result files: " % testResultFiles
        for resultFile in testResultFiles:
            testresult.update(util.parseJUnitXMLResult(resultFile))

        for key, value in testresult.items():
            fail_message = value['failure']
            print "key : %s " % key
            print "value : %s " % value
            print "fail_mesg : %s " % fail_message
            m = re.search("([^\.]*)$", key)
            #remove braces and hyphens from tc name
            #key = m.group(1).replace('-','')
            key = 'split-' + self.splitNumStr + '-' + m.group(1)
            #if '[' in key:
            #    key = key[:key.index('[')]
            value = re.match(r'.*result(.*)', str(value)).group(1)
            #remove additional quotes from tc status
            if '\'' in value:
                value = value[3:9].replace('\'', "")
            self.TEST_RESULT[str(key)] = {
                "status": str(value),
                "failure": fail_message
            }
        self.TESTCASES.extend(sorted(self.TEST_RESULT.keys()))
        for testcase in self.TESTCASES:
            print(testcase)

        TEST_SUITE_REPORT = dict()
        jsonFile = os.path.join('/tmp', 'testTimeDurations.json')
        if os.path.exists(jsonFile):
            Machine.runas('root', 'chmod -R 777 /tmp')

            with open(jsonFile, 'r') as data_file:
                data = json.load(data_file)

            testcases = data['testCases']
            for testcase in testcases:
                testcasename = 'split-' + self.splitNumStr + '-' + testcase[
                    'testCaseName']
                TEST_SUITE_REPORT[testcasename] = testcase
                nonPyTestReportList.append(
                    {testcasename: TEST_SUITE_REPORT[testcasename]})
                print nonPyTestReportList
Beispiel #8
0
 def getVersion(cls):
     # determine the sqoop version from the jar that is deployed
     jarDir = os.path.join(Config.get('sqoop2', 'SQOOP2_HOME'), 'shell-lib')
     files = util.findMatchingFiles(jarDir, "sqoop-client-*.jar")
     p = re.compile('sqoop-client-(\S+).jar')
     if files:
         m = p.search(files[0])
     if m:
         return m.group(1)
     else:
         return ""
Beispiel #9
0
    def getVersion(cls):
        jarDir = Config.get("phoenix", "PHOENIX_HOME")
        files = util.findMatchingFiles(jarDir, "phoenix-*-thin-client.jar")
        jarFileName = files[0].split(os.path.sep)[-1]

        p = re.compile(r'phoenix-(\S+)-thin-client.jar')
        m = p.search(jarFileName)
        if m:
            return m.group(1)
        else:
            return ""
Beispiel #10
0
 def getVersion(cls):
     '''
     Returns Druid user
     '''
     jarDir = os.path.join(cls.getDruidHome(), 'lib')
     logger.info(jarDir)
     files = util.findMatchingFiles(jarDir, "druid-server-*.jar")
     p = re.compile('druid-server-(\S+).jar')
     m = p.search(files[0])
     if m:
         return m.group(1)
     else:
         return ""
Beispiel #11
0
    def run_smoke_test(cls, smoketestnumber, config=None):
        '''
        Run smoke test for yarn
        :param smoketestnumber: Used for unique output log location
        '''
        global new_conf_path
        global buildNo
        from beaver.component.hbase import HBase
        # Run slider agent labels funtests
        from beaver.component.rollingupgrade.ruUpgrade import UpgradePerNode
        UpgradePerNode.reportProgress("### Slider smoke test started ####")
        exit_code, stdout = HBase.runShellCmds(cls.SCAN_TABLE_CMD, user=cls.HBASE_USER, configPath=new_conf_path)
        UpgradePerNode.reportProgress("### Slider smoke test scanned " + cls.TABLE_NAME)

        hbase_ver = HBase.getVersion(configPath=new_conf_path)
        if buildNo not in hbase_ver:
            UpgradePerNode.reportProgress("### smoke test failed: " + buildNo + " not found in " + hbase_ver)
        else:
            UpgradePerNode.reportProgress("### smoke test passed: " + buildNo + " found in " + hbase_ver)
        UpgradePerNode.reportProgress("scanned " + cls.TABLE_NAME)
        TABLE_NM = cls.id_generator(10)
        CREATE_TABLE_CMD = ["create '" + TABLE_NM + "', 'family1', 'family2', 'family3'"]
        exit_code, stdout = HBase.runShellCmds(CREATE_TABLE_CMD, user=cls.HBASE_USER, configPath=new_conf_path)
        if exit_code == 0:
            UpgradePerNode.reportProgress("created " + TABLE_NM)
        else:
            UpgradePerNode.reportProgress("creation of " + TABLE_NM + "failed")
        if not cls._LOCAL_WORK_DIR:
            UpgradePerNode.reportProgress(
                "[FAILED][Slider][Smoke] Slider smoke test failed due to Slider source code unavailability"
            )
            return

        exit_code, stdout = Maven.run(
            "verify -Dslider.conf.dir=%s "
            "-Dhadoop.version=%s "
            "-Dprivate.repo.url=%s "
            "-Dit.test=AppsUpgradeIT" % (cls._LOCAL_CONF_DIR, Hadoop.getVersion(), Maven.getPublicRepoUrl()),
            cwd=os.path.join(cls._LOCAL_WORK_DIR, 'slider-funtest')
        )
        testresults = {}
        testResultFiles = []
        TEST_RESULT = {}

        # get a list of all the test result files
        for name, dirs, files in os.walk(cls._LOCAL_WORK_DIR):
            if os.path.basename(name) == 'target':
                # Add in each failsafe-report we find -- this test only runs failsafe reports
                testResultFiles.extend(util.findMatchingFiles(os.path.join(name, 'failsafe-reports'), 'TEST-*.xml'))

        for resultFile in testResultFiles:
            testresults.update(util.parseJUnitXMLResult(resultFile))
        for key, value in testresults.items():
            TEST_RESULT[key] = value

        logger.info("=======================")
        logger.info(TEST_RESULT)
        logger.info("=======================")
        TestCases = TEST_RESULT.keys()
        for testcase in TestCases:
            result = TEST_RESULT[testcase]['result']
            if result == "pass":
                UpgradePerNode.reportProgress("[PASSED][Slider][Smoke] Slider smoke test passed")
            else:
                UpgradePerNode.reportProgress("[FAILED][Slider][Smoke] Slider smoke test failed")
Beispiel #12
0
def find(basedir, matchstr):
    matches = util.findMatchingFiles(basedir, matchstr)
    if len(matches) > 0:
        return matches[0]
    return ""
Beispiel #13
0
def getStandaloneHiveJdbcJar():
    hive_lib_dir = os.path.join(Hive.getHiveHome(), 'jdbc')
    jdbc_standalone_jar = util.findMatchingFiles(hive_lib_dir, "hive-jdbc-*-standalone.jar", depth=1)
    return jdbc_standalone_jar[0] if len(jdbc_standalone_jar) > 0 else None