コード例 #1
0
ファイル: ruStorm.py プロジェクト: thakkardharmik/beaver
    def submit_storm_hive_topology(cls, tcId, className, args,
                                   useStandaloneCmd):
        if Hadoop.isSecure():
            if Config.hasOption('machine', 'USER_REALM'):
                user_realm = Config.get('machine', 'USER_REALM', '')
            else:
                nnKerbPrincipal = HDFS.getNameNodePrincipal(defaultValue='')
                atloc = nnKerbPrincipal.find("@")
                if (atloc != -1):
                    user_realm = nnKerbPrincipal[atloc:]
            if user_realm != None:
                args += " " + Machine.getHeadlessUserKeytab(
                    Config.getEnv('USER')) + " " + Config.getEnv(
                        'USER') + '@' + user_realm

        exit_code, stdout = Storm.runStormHdfsTopology(
            TARGET_HIVE_STORM_JAR,
            className,
            args,
            None,
            logoutput=True,
            inBackground=False,
            useStandaloneCmd=useStandaloneCmd)
        logger.info(exit_code)

        ruAssert("Storm", exit_code == 0,
                 "[StormHiveSubmit] %s Failed" % (tcId))
コード例 #2
0
ファイル: ruFlume.py プロジェクト: thakkardharmik/beaver
 def background_job_setup(cls, runSmokeTestSetup=True, config=None):
     '''
     Setup for background long running job
     :param runSmokeTestSetup: Runs smoke test setup if set to true
     '''
     from beaver.component.rollingupgrade.ruUpgrade import UpgradePerNode
     UpgradePerNode.reportProgress(
         "[INFO][FLUME][BGJobSetup] Long running job setup for Flume component started"
     )
     from beaver.component.flume import Agent
     global agent1
     global agent2
     if not os.path.exists(cls._local_work_dir):
         os.mkdir(cls._local_work_dir)
     shutil.copy(cls._flume_datagen_src, cls._local_work_dir)
     agent1 = Agent(cls._local_work_dir)
     agent2 = Agent(cls._local_work_dir)
     for outdir in (cls._agent1_chkpt_dir, cls._agent1_data_dir,
                    cls._agent2_chkpt_dir, cls._agent2_data_dir):
         os.mkdir(outdir)
     logger.info("Preparing the Flume configs for long running test")
     propertyMap = {}
     namenode = Hadoop.getFSDefaultValue()
     propertyMap['agent2.sinks.hdfsSink.hdfs.path'] = "%s%s" % (
         namenode, cls._hdfs_test_dir)
     if Hadoop.isSecure():
         if Config.hasOption('machine', 'USER_REALM'):
             user_realm = Config.get('machine', 'USER_REALM', '')
         else:
             nnKerbPrincipal = HDFS.getNameNodePrincipal(defaultValue='')
             atloc = nnKerbPrincipal.find("@")
             if atloc != -1:
                 user_realm = nnKerbPrincipal[atloc:]
         if user_realm:
             propertyMap[
                 'agent2.sinks.hdfsSink.hdfs.kerberosPrincipal'] = cls._test_user + '@' + user_realm
         propertyMap[
             'agent2.sinks.hdfsSink.hdfs.kerberosKeytab'] = Machine.getHeadlessUserKeytab(
                 cls._test_user)
     util.writePropertiesToFile(
         os.path.join(cls._flume_test_conf, 'longrunning.properties'),
         cls._flume_test_src, propertyMap)
コード例 #3
0
def generateTestReportConf(infile, outfile, results):
    config = ConfigParser()
    config.optionxform=str
    config.read(infile)
    if config.has_section(SECTION):
        for option, value in config.items(SECTION):
            if value != "": continue
            elif option == "BUILD_ID" and config.has_option(SECTION, "REPO_URL"):
                config.set(SECTION, option, getBuildId(config.get(SECTION, "REPO_URL")))
                config.remove_option(SECTION, "REPO_URL")
            elif option == "HOSTNAME":
                config.set(SECTION, option, socket.getfqdn())
            elif option == "COMPONENT_VERSION":
                if not config.has_option(SECTION, "COMPONENT") or config.get(SECTION, "COMPONENT") == "":
                    config.set(SECTION, "COMPONENT", "Hadoop")
                config.set(SECTION, option, getComponentVersion(config.get(SECTION, "COMPONENT")))
            elif option == "OS":
                config.set(SECTION, option, platform.platform())
            elif option == "SECURE" and Config.hasOption('hadoop', 'IS_SECURE'):
                config.set(SECTION, option, Config.get('hadoop', 'IS_SECURE').lower())
            elif option == "BLOB":
                pass
            elif option == "RAN":
                config.set(SECTION, option, results[0] + len(results[1]))
            elif option == "PASS":
                config.set(SECTION, option, results[0])
            elif option == "FAIL":
                config.set(SECTION, option, len(results[1]))
            elif option == "SKIPPED":
                config.set(SECTION, option, results[2])
            elif option == "ABORTED":
                config.set(SECTION, option, results[3])
            elif option == "FAILED_TESTS":
                config.set(SECTION, option, ",".join(results[1]))
            elif option == "SINGLE_NODE":
                from beaver.component.hadoop import HDFS
                if HDFS.getDatanodeCount() > 1:
                    config.set(SECTION, option, "false")
                else:
                    config.set(SECTION, option, "true")
        config.write(open(outfile, 'w'))
コード例 #4
0
ファイル: hiveutils.py プロジェクト: thakkardharmik/beaver
def runJdbcMultiSessionDriver(
        testDir,
        addlClasspath=[],
        connectionUrl=None,
        skippedTests=[],
        addlArgs=[],
        reuseConnections=False,
        testFilter=None,
        logsDir=None,
        queryTimeout=3600
):
    '''
  Run the Hive Jdbc MultiSession Test Driver
  '''
    harnessDir = os.path.join(Config.getEnv('WORKSPACE'), 'datateamtest', 'hive_jdbc_multisession')
    logger.info("Build the TestDriver to run tests")
    exit_code, stdout = Maven.run("clean package", cwd=harnessDir)
    assert exit_code == 0, "Failed to build the test driver"
    classpath = [
        os.path.join(harnessDir, "target", "hive-multisession-test-0.1.jar"),
        Config.get('hadoop', 'HADOOP_CONF')
    ]
    if len(addlClasspath) == 0:
        hiveJdbcDriver = getStandaloneHiveJdbcJar()
        classpath.insert(0, hiveJdbcDriver)
    else:
        classpath = addlClasspath + classpath

    cobert_tool_version = "cobertura-2.1.1"
    COBERTURA_CLASSPTH = os.path.join(
        tempfile.gettempdir(), "coverage-tmp", cobert_tool_version, cobert_tool_version + ".jar"
    )
    if Machine.pathExists(Machine.getAdminUser(), None, COBERTURA_CLASSPTH, Machine.getAdminPasswd()):
        classpath.append(COBERTURA_CLASSPTH)

    args = ["-t " + testDir]
    if connectionUrl is None:
        connectionUrl = Hive.getHiveServer2Url()
    args.append("-c \"%s\"" % connectionUrl)
    if Hadoop.isSecure():
        args.append("-k " + Config.get('machine', 'KEYTAB_FILES_DIR'))
        if Config.hasOption('machine', 'USER_REALM'):
            USER_REALM = Config.get('machine', 'USER_REALM', '')
            args.append("-e USER_REALM=%s" % (USER_REALM))
    args.extend(["--skip %s" % t for t in skippedTests])
    if reuseConnections:
        args.append("--reuseConnections")
    if testFilter:
        args.append("-f " + testFilter)
    from beaver.marker import getMarkerCondition
    markerCondition = getMarkerCondition()
    if markerCondition:
        args.append("-e 'marker=%s'" % markerCondition)
    if not logsDir:
        logsDir = os.path.join(Config.getEnv('ARTIFACTS_DIR'), "logs_%d" % int(999999 * random.random()))
    args.append("-l " + logsDir)
    if queryTimeout > 0:
        args.append("--queryTimeout %d" % queryTimeout)
    args.extend(addlArgs)
    return Java.runJava(
        Config.getEnv('ARTIFACTS_DIR'),
        "org.apache.hive.jdbc.TestDriver",
        classPath=(os.pathsep).join(classpath),
        cmdArgs=args
    )