def _copy_jar_from_hue(self, context): jar_path = "%s/apps/spark/java-lib/javax.servlet-api-*.jar" % \ self._hue(context).home_dir(context) path = '%s/lib/' % self.home_dir(context) with context.get_instance('Hue Livy').remote() as r1: with context.get_instance(SPARK_HISTORY_SERVER).remote() as r2: mfs.exchange(r1, r2, jar_path, path, 'mapr')
def _copy_jar_from_hue(self, context): if not self._hue(context): return jar_path = "%s/apps/spark/java-lib/javax.servlet-api-*.jar" % \ self._hue(context).home_dir(context) path = '%s/lib/' % self.home_dir(context) + self.SERVLET_JAR with context.get_instance('Hue').remote() as r1: for instance in context.get_instances(SPARK_SLAVE): with instance.remote() as r2: mfs.exchange(r1, r2, jar_path, path, 'mapr')
def _copy_hive_site(self, cluster_context): if not self._hive(cluster_context): return hive_conf = self._hive(cluster_context).conf_dir(cluster_context) with cluster_context.get_instance(hive.HIVE_SERVER_2).remote() as h: with cluster_context.get_instance( SPARK_HISTORY_SERVER).remote() as s: mfs.exchange(h, s, hive_conf + '/hive-site.xml', self.conf_dir(cluster_context) + '/hive-site.xml', hdfs_user='******')
def _copy_hive_site(self, cluster_context): hive_site_path = self._hive(cluster_context).conf_dir( cluster_context) + "/hive-site.xml" path = self.conf_dir(cluster_context) + "/hive-site.xml" with cluster_context.get_instance(hive.HIVE_METASTORE).remote() as r1: for instance in cluster_context.get_instances(IMPALA_SERVER): with instance.remote() as r2: mfs.exchange(r1, r2, hive_site_path, path, 'mapr') with cluster_context.get_instance(IMPALA_CATALOG).remote() as r3: mfs.exchange(r1, r3, hive_site_path, path, 'mapr') with cluster_context.get_instance( IMPALA_STATE_STORE).remote() as r4: mfs.exchange(r1, r4, hive_site_path, path, 'mapr')