예제 #1
0
def start_spark_history_server(master):
    sp_home = c_helper.get_spark_home(master.cluster)
    with context.set_current_instance_id(master.instance_id):
        with master.remote() as r:
            r.execute_command(
                'sudo su - -c "bash %s" hadoop' %
                os.path.join(sp_home, 'sbin/start-history-server.sh'))
예제 #2
0
def _push_spark_configs_to_existing_node(spark_master, cluster, extra):

    sp_home = c_helper.get_spark_home(cluster)
    files = {
        os.path.join(sp_home, 'conf/spark-env.sh'): extra['sp_master'],
        os.path.join(sp_home, 'conf/spark-defaults.conf'): extra['sp_defaults']
    }

    with spark_master.remote() as r:
        r.write_files_to(files, run_as_root=True)
예제 #3
0
 def test_get_spark_home(self, get_config_value_or_default):
     get_config_value_or_default.return_value = 1
     self.assertEqual(c_helper.get_spark_home(self.cluster), 1)
     get_config_value_or_default.assert_called_once_with('Spark',
                                                         'Spark home',
                                                         self.cluster)