def _install_spark_history_server(self, cluster_context, instances): h_servers = cluster_context.filter_instances(instances, SPARK_HISTORY_SERVER) package = [(SPARK_HISTORY_SERVER.package, self.version)] command = cluster_context.distro.create_install_cmd(package) g.execute_command(h_servers, command, run_as='root') LOG.debug("Spark History Server successfully installed.")
def _install_spark_history_server(self, cluster_context, instances): h_servers = cluster_context.filter_instances( instances, SPARK_HISTORY_SERVER) package = [(SPARK_HISTORY_SERVER.package, self.version)] command = cluster_context.distro.create_install_cmd(package) g.execute_command(h_servers, command, run_as='root') LOG.debug("Spark History Server successfully installed.")
def start(self, cluster_context, instances=None): master_url = SPARK_MASTER.submit_url(cluster_context) args = { 'spark_home': Spark().home_dir(cluster_context), 'start_slave': self._start_script, 'master_url': master_url, } command = g._run_as('mapr', '%(start_slave)s 1 %(master_url)s') command = ('cd %(spark_home)s && ' + command) % args g.execute_command(instances, command)
def set_cluster_mode(instance): return util.execute_command([instance], command, run_as="mapr")
def _set_cluster_mode(self, cluster_context): cluster_mode = cluster_context.cluster_mode if not cluster_mode: return cmd = 'maprcli cluster mapreduce set -mode %s' % cluster_mode util.execute_command(cluster_context.get_instances(), cmd, 'mapr')
def stop(self, cluster_context, instances=None): stop_command = self._get_stop_command(cluster_context, run_as="mapr") g.execute_command(instances, stop_command)
def set_cluster_mode(instance): return util.execute_command([instance], command, run_as=pu.MAPR_USER_NAME)