def scale_cluster(pctx, cluster, instances): config.configure_instances(pctx, instances) _update_include_files(cluster) run.refresh_hadoop_nodes(cluster) rm = vu.get_resourcemanager(cluster) if rm: run.refresh_yarn_nodes(cluster) config.configure_topology_data(pctx, cluster) run.start_dn_nm_processes(instances) swift_helper.install_ssl_certs(instances) config.configure_zookeeper(cluster) run.refresh_zk_servers(cluster)
def test_start_dn_nm_processes(self, instances_with_services, add_provisioning_step, set_current_instance_id, _start_processes): ins = mock.Mock() ins.cluster_id = '111' ins.instance_id = '123' ins.instance_name = 'ins_1' instances = [ins] instances_with_services.return_value = instances mess = pu.start_process_event_message('DataNodes, NodeManagers') ins.node_group.node_processes = ['datanode', 'test'] rs.start_dn_nm_processes(instances) instances_with_services.assert_called_once_with( instances, ['datanode', 'nodemanager']) add_provisioning_step.assert_called_once_with('111', mess, 1) set_current_instance_id.assert_called_once_with('123') _start_processes.assert_called_once_with(ins, ['datanode'])
def start_cluster(self, cluster): keypairs.provision_keypairs(cluster) s_scripts.start_namenode(cluster) s_scripts.start_secondarynamenode(cluster) s_scripts.start_resourcemanager(cluster) run.start_dn_nm_processes(utils.get_instances(cluster)) run.await_datanodes(cluster) s_scripts.start_historyserver(cluster) s_scripts.start_oozie(self.pctx, cluster) s_scripts.start_hiveserver(self.pctx, cluster) s_scripts.start_zookeeper(cluster) swift_helper.install_ssl_certs(utils.get_instances(cluster)) self._set_cluster_info(cluster) s_scripts.start_spark(cluster)