Beispiel #1
0
 def post_start(self, cluster_context, instances):
     sentry_host = cluster_context.get_instance(SENTRY)
     source = self.conf_dir(cluster_context) + '/global-policy.ini'
     with sentry_host.remote() as r:
         mfs.mkdir(r, '/user/mapr/sentry', run_as='mapr')
         mfs.chmod(r, '/user/mapr/sentry', 777, run_as='mapr')
         mfs.copy_from_local(r, source, self.GLOBAL_POLICY_FILE,
                             hdfs_user='******')
Beispiel #2
0
 def _create_hadoop_spark_dirs(self, cluster_context):
     home = '/apps/spark'
     libs = self.JAR_FILE_TARGET
     run_as_user = '******'
     with cluster_context.get_instance(SPARK_HISTORY_SERVER).remote() as r:
         mfs.mkdir(r, home, run_as=run_as_user)
         mfs.mkdir(r, libs, run_as=run_as_user)
         mfs.chmod(r, home, 777, run_as=run_as_user)
         mfs.chmod(r, libs, 777, run_as=run_as_user)
Beispiel #3
0
 def _create_hadoop_spark_dirs(self, cluster_context):
     home = '/apps/spark'
     libs = self.JAR_FILE_TARGET
     run_as_user = '******'
     with cluster_context.get_instance(SPARK_HISTORY_SERVER).remote() as r:
         mfs.mkdir(r, home, run_as=run_as_user)
         mfs.mkdir(r, libs, run_as=run_as_user)
         mfs.chmod(r, home, 777, run_as=run_as_user)
         mfs.chmod(r, libs, 777, run_as=run_as_user)
Beispiel #4
0
 def _create_hadoop_spark_dirs(self, cluster_context):
     path = '/apps/spark'
     run_as_user = '******'
     with cluster_context.get_instance(SPARK_MASTER).remote() as r:
         mfs.mkdir(r, path, run_as=run_as_user)
         mfs.chmod(r, path, 777, run_as=run_as_user)
Beispiel #5
0
 def _create_hadoop_spark_dirs(self, cluster_context):
     path = '/apps/spark'
     run_as_user = '******'
     with cluster_context.get_instance(SPARK_MASTER).remote() as r:
         mfs.mkdir(r, path, run_as=run_as_user)
         mfs.chmod(r, path, 777, run_as=run_as_user)