예제 #1
0
    def smoke_test_setup(cls):
        '''
        Setup required to run Smoke test
        '''
        from beaver.component.slider import Slider
        from beaver.component.rollingupgrade.ruUpgrade import UpgradePerNode
        HADOOPQA_USER = Config.get('hadoop', 'HADOOPQA_USER')
        UpgradePerNode.reportProgress("###  Starting set up for Slider smoke test  ####")
        if Hadoop.isSecure():
            keytabFile = Machine.getHeadlessUserKeytab(HADOOPQA_USER)
            kinitloc = Machine.which("kinit", "root")
            cmd = "%s  -k -t %s %s" % (kinitloc, keytabFile, Machine.get_user_principal(HADOOPQA_USER))
            exit_code, stdout = Machine.run(cmd)
            if exit_code != 0:
                UpgradePerNode.reportProgress("###  smoke test setup for Slider failed due to kinit failed  ####")

        # Local directory in artifacts that we'll run tests from
        # it is possible the download_source() will fail
        try:
            cls._LOCAL_WORK_DIR = Slider.download_source(useHDPBaseRepoFile=False, isRUcluster=True)
            logger.info("Local work dir = %s" % cls._LOCAL_WORK_DIR)
        except TypeError as err:
            UpgradePerNode.reportProgress("[FAILED][Slider][Smoke] Slider Source download fail.")
            return
        if not cls._LOCAL_WORK_DIR:
            UpgradePerNode.reportProgress("[FAILED][Slider][Smoke] Slider Source Code missing.")
            return
        # Local conf directory with modified conf for tests
        if not os.path.exists(cls._LOCAL_CONF_DIR):
            os.makedirs(cls._LOCAL_CONF_DIR)
        slider_conf = os.path.join(Slider.getSliderHome(), 'conf')
        logger.info("slider_conf = %s" % slider_conf)
        logger.info("os path exist slider_conf = %s" % os.path.exists(slider_conf))
        if os.path.exists(slider_conf):
            Machine.copy(
                os.path.join(slider_conf, 'log4j.properties'), os.path.join(cls._LOCAL_CONF_DIR, 'log4j.properties')
            )
            Machine.copy(
                os.path.join(slider_conf, 'slider-env.sh'), os.path.join(cls._LOCAL_CONF_DIR, 'slider-env.sh')
            )
        else:
            UpgradePerNode.reportProgress("[FAILED][Slider][Smoke] Slider Conf %s missing" % slider_conf)
            return

        if Hadoop.isSecure():
            util.writePropertiesToConfigXMLFile(
                os.path.join(Slider.getSliderHome(), 'conf', 'slider-client.xml'),
                os.path.join(cls._LOCAL_CONF_DIR, 'slider-client.xml'), {
                    "slider.funtest.enabled": "true",
                    "slider.test.agent.enabled": "true",
                    "HADOOP_CONF_DIR": Config.get('hadoop', 'HADOOP_CONF'),
                    "slider.am.keytab.local.path": Machine.getHeadlessUserKeytab(HADOOPQA_USER),
                    "slider.keytab.principal.name": Machine.get_user_principal(HADOOPQA_USER)
                }
            )
        else:
            util.writePropertiesToConfigXMLFile(
                os.path.join(Slider.getSliderHome(), 'conf', 'slider-client.xml'),
                os.path.join(cls._LOCAL_CONF_DIR, 'slider-client.xml'), {
                    "slider.funtest.enabled": "true",
                    "HADOOP_CONF_DIR": Config.get('hadoop', 'HADOOP_CONF'),
                    "slider.test.agent.enabled": "true"
                }
            )

        logger.info("Local work dir = %s" % cls._LOCAL_WORK_DIR)
        # Check that the precondition is met and the source is available
        if not os.path.exists(cls._LOCAL_WORK_DIR) or not os.path.exists(os.path.join(cls._LOCAL_WORK_DIR, 'pom.xml')):
            logger.info('Slider source does not appear to exist at %s' % (cls._LOCAL_WORK_DIR))
            UpgradePerNode.reportProgress(
                "###  Slider source does not appear to exist at %s ####" % (cls._LOCAL_WORK_DIR)
            )
        logger.info("Local work dir = %s" % cls._LOCAL_WORK_DIR)
        if cls._LOCAL_WORK_DIR == None:
            logger.info("ERROR: cls._LOCAL_WORK_DIR is None")
        # Install first so isolated modules can be tested
        exit_code, stdout = Maven.run(
            "clean install -DskipTests "
            "-Dhadoop.version=%s "
            "-Dprivate.repo.url=%s " % (Hadoop.getVersion(), Maven.getPublicRepoUrl()),
            cwd=cls._LOCAL_WORK_DIR
        )
        if exit_code != 0:
            UpgradePerNode.reportProgress("### Error installing Slider source : %d: %s ####" % (exit_code, stdout))
        else:
            UpgradePerNode.reportProgress("### Slider source install passed ####")
예제 #2
0
    def run_smoke_test(cls, smoketestnumber, config=None):
        '''
        Run smoke test for yarn
        :param smoketestnumber: Used for unique output log location
        '''
        global new_conf_path
        global buildNo
        from beaver.component.hbase import HBase
        # Run slider agent labels funtests
        from beaver.component.rollingupgrade.ruUpgrade import UpgradePerNode
        UpgradePerNode.reportProgress("### Slider smoke test started ####")
        exit_code, stdout = HBase.runShellCmds(cls.SCAN_TABLE_CMD, user=cls.HBASE_USER, configPath=new_conf_path)
        UpgradePerNode.reportProgress("### Slider smoke test scanned " + cls.TABLE_NAME)

        hbase_ver = HBase.getVersion(configPath=new_conf_path)
        if buildNo not in hbase_ver:
            UpgradePerNode.reportProgress("### smoke test failed: " + buildNo + " not found in " + hbase_ver)
        else:
            UpgradePerNode.reportProgress("### smoke test passed: " + buildNo + " found in " + hbase_ver)
        UpgradePerNode.reportProgress("scanned " + cls.TABLE_NAME)
        TABLE_NM = cls.id_generator(10)
        CREATE_TABLE_CMD = ["create '" + TABLE_NM + "', 'family1', 'family2', 'family3'"]
        exit_code, stdout = HBase.runShellCmds(CREATE_TABLE_CMD, user=cls.HBASE_USER, configPath=new_conf_path)
        if exit_code == 0:
            UpgradePerNode.reportProgress("created " + TABLE_NM)
        else:
            UpgradePerNode.reportProgress("creation of " + TABLE_NM + "failed")
        if not cls._LOCAL_WORK_DIR:
            UpgradePerNode.reportProgress(
                "[FAILED][Slider][Smoke] Slider smoke test failed due to Slider source code unavailability"
            )
            return

        exit_code, stdout = Maven.run(
            "verify -Dslider.conf.dir=%s "
            "-Dhadoop.version=%s "
            "-Dprivate.repo.url=%s "
            "-Dit.test=AppsUpgradeIT" % (cls._LOCAL_CONF_DIR, Hadoop.getVersion(), Maven.getPublicRepoUrl()),
            cwd=os.path.join(cls._LOCAL_WORK_DIR, 'slider-funtest')
        )
        testresults = {}
        testResultFiles = []
        TEST_RESULT = {}

        # get a list of all the test result files
        for name, dirs, files in os.walk(cls._LOCAL_WORK_DIR):
            if os.path.basename(name) == 'target':
                # Add in each failsafe-report we find -- this test only runs failsafe reports
                testResultFiles.extend(util.findMatchingFiles(os.path.join(name, 'failsafe-reports'), 'TEST-*.xml'))

        for resultFile in testResultFiles:
            testresults.update(util.parseJUnitXMLResult(resultFile))
        for key, value in testresults.items():
            TEST_RESULT[key] = value

        logger.info("=======================")
        logger.info(TEST_RESULT)
        logger.info("=======================")
        TestCases = TEST_RESULT.keys()
        for testcase in TestCases:
            result = TEST_RESULT[testcase]['result']
            if result == "pass":
                UpgradePerNode.reportProgress("[PASSED][Slider][Smoke] Slider smoke test passed")
            else:
                UpgradePerNode.reportProgress("[FAILED][Slider][Smoke] Slider smoke test failed")
예제 #3
0
    "the cow jumped over the moon,1",
    "the man went to the store and bought some candy,2",
    "four score and seven years ago,3", "how many apples can you eat,4",
    "to be or not to be the person,5"
]
HDFS_TRIDENT_SEQ_EXPECTED_LINES = [
    "the cow jumped over the moon",
    "the man went to the store and bought some candy",
    "four score and seven years ago", "how many apples can you eat",
    "to be or not to be the person"
]

HDFS_USER = Config.get('hadoop', 'HDFS_USER')
HADOOPQA_USER = Config.get('hadoop', 'HADOOPQA_USER')

HADOOP_VERSION = Hadoop.getVersion()

HDFS_TOPOLOGY_NAME = "HDFSTopology"
HDFS_SEQ_TOPOLOGY_NAME = "HDFSSequenceTopology"
HDFS_TRIDENT_TOPOLOGY_NAME = "HDFSTridentTopology"
HDFS_TRIDENT_SEQ_TOPOLOGY_NAME = "HDFSTridentSeqTopology"

######JDBC config######
STORM_JDBC_TEST_USER = "******"
STORM_JDBC_TEST_PASSWD = "storm_test_password"
JAVA_JDBC_SRC_DIR = os.path.join(Config.getEnv('WORKSPACE'), 'tests', 'storm',
                                 'storm-jdbc', 'java')
LOCAL_JDBC_WORK_DIR = os.path.join(Config.getEnv('ARTIFACTS_DIR'),
                                   'storm-jdbc-tests')
TARGET_JDBC_STORM_JAR = os.path.join(
    Config.getEnv('ARTIFACTS_DIR'), 'storm-jdbc-tests', 'target',