示例#1
0
    def setup_storm_hdfs_topology(cls, useStandaloneCmd):
        storm_version = Storm.getVersion(useStandaloneCmd=True)
        try:
            file_obj = open(HDFS_CONFIG_FILE, 'w')
            if Hadoop.isSecure():
                file_obj.write('hdfs.keytab.file: ' +
                               Machine.getHeadlessUserKeytab(
                                   user=HADOOPQA_USER) + '\n')
                file_obj.write('hdfs.kerberos.principal: ' +
                               Machine.get_user_principal(user=HADOOPQA_USER) +
                               '\n')
        finally:
            file_obj.close()

        HDFS.createDirectory("/tmp/mySeqTopology", HDFS_USER, "777", False)
        HDFS.createDirectory("/tmp/dest", HDFS_USER, "777", False)
        HDFS.createDirectory("/tmp/dest2", HDFS_USER, "777", False)
        HDFS.createDirectory("/tmp/foo", HDFS_USER, "777", False)
        HDFS.createDirectory("/tmp/trident", HDFS_USER, "777", False)
        HDFS.createDirectory("/tmp/trident-seq", HDFS_USER, "777", False)

        Machine.copy(JAVA_HDFS_SRC_DIR,
                     LOCAL_HDFS_WORK_DIR,
                     user=None,
                     passwd=None)
        if not Machine.isWindows():
            (exit_code, _) = Maven.run('package',
                                       cwd=LOCAL_HDFS_WORK_DIR,
                                       env={
                                           HADOOP_VERSION_MAVEN_PARAMETER:
                                           HADOOP_VERSION,
                                           STORM_VERSION_MAVEN_PARAMETER:
                                           storm_version,
                                           HADOOP_CONF_MAVEN_PARAMETER:
                                           HADOOP_CONF,
                                           HDFS_FILE_MAVEN_PARAMETER:
                                           HDFS_FILE,
                                           HADOOP_CORE_MAVEN_PARAMETER:
                                           HADOOP_CONF,
                                           CORE_FILE_MAVEN_PARAMETER:
                                           CORE_FILE,
                                           PUBLIC_REPO_MAVEN_PARAMETER:
                                           Maven.getPublicRepoUrl()
                                       })
        else:
            filepath = os.path.join(MOD_CONF_PATH, "core-site.xml")
            (exit_code, _) = Maven.run(
                'package -D%s=%s -D%s=%s -D%s=%s -D%s=%s -D%s=%s -D%s=%s -D%s=%s'
                % (HADOOP_VERSION_MAVEN_PARAMETER, HADOOP_VERSION,
                   STORM_VERSION_MAVEN_PARAMETER, storm_version,
                   HADOOP_CONF_MAVEN_PARAMETER, HADOOP_CONF,
                   HDFS_FILE_MAVEN_PARAMETER, HDFS_FILE,
                   HADOOP_CORE_MAVEN_PARAMETER, HADOOP_CONF,
                   CORE_FILE_MAVEN_PARAMETER, CORE_FILE,
                   PUBLIC_REPO_MAVEN_PARAMETER, Maven.getPublicRepoUrl()),
                cwd=LOCAL_HDFS_WORK_DIR)
        ruAssert("Storm", exit_code == 0,
                 "[StormHDFSSetup] maven package command failed")
示例#2
0
    def setup_storm_hbase_topology(cls, useStandaloneCmd):
        from beaver.component.hbase import HBase

        storm_version = Storm.getVersion(useStandaloneCmd=True)

        Machine.copy(JAVA_HBASE_SRC_DIR,
                     LOCAL_HBASE_WORK_DIR,
                     user=None,
                     passwd=None)

        if Machine.isWindows():
            (_, _) = Maven.run(
                'package -D%s=%s -D%s=%s -D%s=%s -D%s=%s -D%s=%s -D%s=%s -D%s=%s -D%s=%s'
                % (HADOOP_VERSION_MAVEN_PARAMETER, HADOOP_VERSION,
                   STORM_VERSION_MAVEN_PARAMETER, storm_version,
                   HBASE_CONF_MAVEN_PARAMETER, HBASE_CONF,
                   HBASE_FILE_MAVEN_PARAMETER, HBASE_FILE,
                   HADOOP_CONF_MAVEN_PARAMETER, HADOOP_CONF,
                   HDFS_FILE_MAVEN_PARAMETER, HDFS_FILE,
                   CORE_FILE_MAVEN_PARAMETER, CORE_FILE,
                   PUBLIC_REPO_MAVEN_PARAMETER, Maven.getPublicRepoUrl()),
                cwd=LOCAL_HBASE_WORK_DIR)
        else:
            (_, _) = Maven.run('package',
                               cwd=LOCAL_HBASE_WORK_DIR,
                               env={
                                   HADOOP_VERSION_MAVEN_PARAMETER:
                                   HADOOP_VERSION,
                                   STORM_VERSION_MAVEN_PARAMETER:
                                   storm_version,
                                   HBASE_CONF_MAVEN_PARAMETER:
                                   HBASE_CONF,
                                   HBASE_FILE_MAVEN_PARAMETER:
                                   HBASE_FILE,
                                   HADOOP_CONF_MAVEN_PARAMETER:
                                   HADOOP_CONF,
                                   HDFS_FILE_MAVEN_PARAMETER:
                                   HDFS_FILE,
                                   CORE_FILE_MAVEN_PARAMETER:
                                   CORE_FILE,
                                   PUBLIC_REPO_MAVEN_PARAMETER:
                                   Maven.getPublicRepoUrl()
                               })

        exit_code, stdout = HBase.createTable(HBASE_TABLE_NAME, "cf", True,
                                              None)
        ruAssert("Storm", exit_code == 0)
        grant_cmd = "grant '%s', 'RWCXA', '%s', 'cf'" % (Storm.getStormUser(),
                                                         HBASE_TABLE_NAME)
        exit_code, stdout = HBase.runShellCmds([grant_cmd])
示例#3
0
 def setup_storm_jdbc_topology(cls, useStandaloneCmd):
     from beaver.dbutil import MySQL
     MySQL.createUserWithAllPriveleges(STORM_JDBC_TEST_USER,
                                       STORM_JDBC_TEST_PASSWD)
     storm_version = Storm.getVersion(useStandaloneCmd=True)
     try:
         file_obj = open(HDFS_CONFIG_FILE, 'w')
         if Hadoop.isSecure():
             file_obj.write('hdfs.keytab.file: ' +
                            Machine.getHeadlessUserKeytab(
                                user=HADOOPQA_USER) + '\n')
             file_obj.write('hdfs.kerberos.principal: ' +
                            Machine.get_user_principal(user=HADOOPQA_USER) +
                            '\n')
     finally:
         file_obj.close()
     Machine.copy(JAVA_JDBC_SRC_DIR,
                  LOCAL_JDBC_WORK_DIR,
                  user=None,
                  passwd=None)
     post_fenton_opt = " -DpostFenton=true" if Storm.isAfterFenton() else ""
     package_cmd = 'package ' + post_fenton_opt
     (exit_code, _) = Maven.run(package_cmd,
                                cwd=LOCAL_JDBC_WORK_DIR,
                                env={
                                    HADOOP_VERSION_MAVEN_PARAMETER:
                                    HADOOP_VERSION,
                                    STORM_VERSION_MAVEN_PARAMETER:
                                    storm_version,
                                    HADOOP_CONF_MAVEN_PARAMETER:
                                    HADOOP_CONF,
                                    HDFS_FILE_MAVEN_PARAMETER:
                                    HDFS_FILE,
                                    CORE_FILE_MAVEN_PARAMETER:
                                    CORE_FILE,
                                    PUBLIC_REPO_MAVEN_PARAMETER:
                                    Maven.getPublicRepoUrl()
                                })
     ruAssert("Storm", exit_code == 0,
              "[StormJDBCSetup] maven package command failed")
示例#4
0
    def setup_storm_hive_topology(cls, useStandaloneCmd):
        from beaver.component.hive import Hive

        storm_version = Storm.getVersion(useStandaloneCmd=True)
        hive_version = Hive.getVersion()
        HIVE_METASTORE_URI = Hive.getConfigValue(
            "hive.metastore.uris", defaultValue="thrift://localhost:9083")

        global HIVE_METASTORE_URI
        global HIVE_HOST
        global HIVE_PORT
        global HIVE_WAREHOUSE_DIR
        HIVE_WAREHOUSE_DIR = Hive.getConfigValue(
            "hive.metastore.warehouse.dir",
            defaultValue="/apps/hive/warehouse")
        HIVE_HOST = Hive.getHiveHost()
        HIVE_PORT = Hive.getMetastoreThriftPort()
        if Storm.isDalorBeyond():
            JAVA_HIVE_SRC_DIR = os.path.join(Config.getEnv('WORKSPACE'),
                                             'tests', 'rolling_upgrade',
                                             'Storm', '2_3', 'storm-hive',
                                             'java')
        else:
            JAVA_HIVE_SRC_DIR = os.path.join(Config.getEnv('WORKSPACE'),
                                             'tests', 'rolling_upgrade',
                                             'Storm', '2_2', 'storm-hive',
                                             'java')
        # hive.txn.manager and hive.support.concurrency are set through ambari as per bug-40500
        #logger.info("Restart Hive")
        #changes = {'hive-site.xml': {'hive.txn.manager': 'org.apache.hadoop.hive.ql.lockmgr.DbTxnManager',
        #                             'hive.support.concurrency': 'true'}}
        #Hive.modifyConfig(changes, services=['metastore'], restartService=True)
        logger.info("Create test database in Hive")

        exit_code, stdout = Hive.runQuery(
            cls.get_set_queue_cmd(useStandaloneCmd) +
            " drop database if exists stormdb cascade; \
                                               create database stormdb;")
        ruAssert("Storm", exit_code == 0,
                 "[StormHiveSetup] Failed to create test database" + stdout)
        HDFS.chmod(runasUser=HDFS.getHDFSUser(),
                   perm=777,
                   directory=HIVE_WAREHOUSE_DIR + "/" + DATABASE_NAME + ".db")
        #copy tests/storm/storm-hive/java to artifacts/storm-hive-tests
        logger.info("JAVA_SRC_DIR " + JAVA_HIVE_SRC_DIR)
        logger.info("LOCAL_WORK_DIR " + LOCAL_HIVE_WORK_DIR)
        Machine.copy(JAVA_HIVE_SRC_DIR,
                     LOCAL_HIVE_WORK_DIR,
                     user=None,
                     passwd=None)
        #mvn package
        if Machine.isWindows():
            (_, _) = Maven.run(
                'package -D%s=%s -D%s=%s -D%s=%s -D%s=%s' %
                (HADOOP_VERSION_MAVEN_PARAMETER, HADOOP_VERSION,
                 STORM_VERSION_MAVEN_PARAMETER, storm_version,
                 HIVE_VERSION_MAVEN_PARAMETER,
                 hive_version, PUBLIC_REPO_MAVEN_PARAMETER,
                 Maven.getPublicRepoUrl(), CORE_FILE_MAVEN_PARAMETER,
                 CORE_FILE, HADOOP_CORE_MAVEN_PARAMETER, HADOOP_CONF,
                 HIVE_CORE_MAVEN_PARAMETER, HIVE_CORE_DIR,
                 HIVE_FILE_MAVEN_PARAMETER, HIVE_FILE),
                cwd=LOCAL_HIVE_WORK_DIR)
        else:
            (_, _) = Maven.run('package',
                               cwd=LOCAL_HIVE_WORK_DIR,
                               env={
                                   HADOOP_VERSION_MAVEN_PARAMETER:
                                   HADOOP_VERSION,
                                   STORM_VERSION_MAVEN_PARAMETER:
                                   storm_version,
                                   HIVE_VERSION_MAVEN_PARAMETER:
                                   hive_version,
                                   PUBLIC_REPO_MAVEN_PARAMETER:
                                   Maven.getPublicRepoUrl(),
                                   CORE_FILE_MAVEN_PARAMETER:
                                   CORE_FILE,
                                   HADOOP_CONF_MAVEN_PARAMETER:
                                   HADOOP_CONF,
                                   HDFS_FILE_MAVEN_PARAMETER:
                                   HDFS_FILE,
                                   HADOOP_CORE_MAVEN_PARAMETER:
                                   HADOOP_CONF,
                                   HIVE_CORE_MAVEN_PARAMETER:
                                   HIVE_CORE_DIR,
                                   HIVE_FILE_MAVEN_PARAMETER:
                                   HIVE_FILE
                               })
        create_table_q = "use %s; \
          drop table if exists %s; \
          create table %s (id int, name string, phone string, street string) \
          partitioned by (city string, state string) \
          clustered by (id) into %s buckets \
          stored as orc \
          tblproperties ('transactional'='true');" % (
            DATABASE_NAME, HIVE_TABLE_NAME, HIVE_TABLE_NAME, "5")

        exit_code, stdout = Hive.runQuery(
            cls.get_set_queue_cmd(useStandaloneCmd) + create_table_q)
        ruAssert(
            "Storm", exit_code == 0,
            "[StormHiveSetup] Failed to create test table userdata_partitioned"
        )
        HDFS.chmod(runasUser=HDFS.getHDFSUser(),
                   perm=777,
                   directory=HIVE_WAREHOUSE_DIR + "/" + DATABASE_NAME +
                   ".db/" + HIVE_TABLE_NAME)