Example #1
0
 def test_start_hiveserver_process(
         self, add_provisioning_step, check_cluster_exists,
         set_current_instance_id, get_oozie, _hive_create_warehouse_dir,
         _hive_copy_shared_conf, _start_mysql, _hive_create_db,
         _hive_metastore_start, is_mysql_enabled, get_hive_password):
     pctx = mock.Mock()
     path = edp.get_hive_shared_conf_path('hadoop')
     is_mysql_enabled.return_value = True
     cluster = self.instance.cluster
     self.instance.cluster.hadoop_version = '2.7.1'
     ng_cluster = self.instance.node_group.cluster
     get_oozie.return_value = None
     sql_script = pu.get_file_text(
         'plugins/vanilla/v2_7_1/resources/create_hive_db.sql',
         'sahara_plugin_vanilla')
     get_hive_password.return_value = '123'
     pwd_script = sql_script.replace('{{password}}', '123')
     rs.start_hiveserver_process(pctx, self.instance)
     set_current_instance_id.assert_called_once_with(
         self.instance.instance_id)
     _hive_create_warehouse_dir.assert_called_once_with(self.r)
     _hive_copy_shared_conf.assert_called_once_with(self.r, path)
     is_mysql_enabled.assert_called_once_with(pctx, cluster)
     get_oozie.assert_called_once_with(ng_cluster)
     _start_mysql.assert_called_once_with(self.r)
     get_hive_password.assert_called_once_with(cluster)
     self.r.write_file_to.assert_called_once_with('/tmp/create_hive_db.sql',
                                                  pwd_script)
     _hive_create_db.assert_called_once_with(self.r)
     _hive_metastore_start.assert_called_once_with(self.r)
 def put_hive_hdfs_xml(self, cluster):
     servers = self.get_hive_servers(cluster)
     with servers[0].remote() as r:
         conf_path = edp.get_hive_shared_conf_path('hdfs')
         r.execute_command(
             'sudo su - -c "hadoop fs -mkdir -p %s" hdfs'
             % os.path.dirname(conf_path))
         r.execute_command(
             'sudo su - -c "hadoop fs -put /etc/hive/conf/hive-site.xml '
             '%s" hdfs' % conf_path)
def start_hiveserver_process(pctx, instance):
    with context.set_current_instance_id(instance.instance_id):
        with instance.remote() as r:
            _hive_create_warehouse_dir(r)
            _hive_copy_shared_conf(r, edp.get_hive_shared_conf_path('hadoop'))

            if config_helper.is_mysql_enabled(pctx, instance.cluster):
                oozie = vu.get_oozie(instance.node_group.cluster)
                if not oozie or instance.hostname() != oozie.hostname():
                    _start_mysql(r)

                version = instance.cluster.hadoop_version
                sql_script = utils.get_file_text(
                    'plugins/vanilla/v{}/resources/create_hive_db.sql'.format(
                        version.replace('.', '_')), 'sahara_plugin_vanilla')

                sql_script = sql_script.replace(
                    '{{password}}', u.get_hive_password(instance.cluster))
                r.write_file_to('/tmp/create_hive_db.sql', sql_script)
                _hive_create_db(r)
                _hive_metastore_start(r)
                LOG.info("Hive Metastore server at {host} has been "
                         "started".format(host=instance.hostname()))