Ejemplo n.º 1
0
 def post_start(self, cluster_context, instances):
     sentry_host = cluster_context.get_instance(SENTRY)
     source = self.conf_dir(cluster_context) + '/global-policy.ini'
     with sentry_host.remote() as r:
         mfs.mkdir(r, '/user/mapr/sentry', run_as='mapr')
         mfs.chmod(r, '/user/mapr/sentry', 777, run_as='mapr')
         mfs.copy_from_local(r, source, self.GLOBAL_POLICY_FILE,
                             hdfs_user='******')
Ejemplo n.º 2
0
    def _copy_jar_files_to_mfs(self, cluster_context):
        hive_service = self._hive(cluster_context)

        paths = [self._assembly_jar_path(cluster_context)]

        if hive_service:
            hive_conf = self._hive(cluster_context).conf_dir(cluster_context)
            paths.append('%s/hive-site.xml' % hive_conf)
            paths += self._hive_datanucleus_libs_paths(cluster_context)

        target = self.JAR_FILE_TARGET
        hdfs_user = '******'
        with cluster_context.get_instance(SPARK_HISTORY_SERVER).remote() as r:
            for path in paths:
                mfs.copy_from_local(r, path, target, hdfs_user)
Ejemplo n.º 3
0
    def _copy_jar_files_to_mfs(self, cluster_context):
        hive_service = self._hive(cluster_context)

        paths = [self._assembly_jar_path(cluster_context)]

        if hive_service:
            hive_conf = self._hive(cluster_context).conf_dir(cluster_context)
            paths.append('%s/hive-site.xml' % hive_conf)
            paths += self._hive_datanucleus_libs_paths(cluster_context)

        target = self.JAR_FILE_TARGET
        hdfs_user = '******'
        with cluster_context.get_instance(SPARK_HISTORY_SERVER).remote() as r:
            for path in paths:
                mfs.copy_from_local(r, path, target, hdfs_user)
Ejemplo n.º 4
0
    def _upload_job_files_to_hdfs(self,
                                  where,
                                  job_dir,
                                  job,
                                  configs,
                                  proxy_configs=None):
        mains = job.mains or []
        libs = job.libs or []
        builtin_libs = edp.get_builtin_binaries(job, configs)
        uploaded_paths = []
        hdfs_user = self.get_hdfs_user()
        lib_dir = job_dir + '/lib'

        with where.remote() as r:
            for m in mains:
                path = jb_manager.JOB_BINARIES. \
                    get_job_binary_by_url(m.url). \
                    copy_binary_to_cluster(m, proxy_configs=proxy_configs,
                                           remote=r, context=context.ctx())
                target = os.path.join(job_dir, m.name)
                mfs.copy_from_local(r, path, target, hdfs_user)
                uploaded_paths.append(target)
            if len(libs) > 0:
                self.create_hdfs_dir(r, lib_dir)
            for l in libs:
                path = jb_manager.JOB_BINARIES. \
                    get_job_binary_by_url(l.url). \
                    copy_binary_to_cluster(l, proxy_configs=proxy_configs,
                                           remote=r, context=context.ctx())
                target = os.path.join(lib_dir, l.name)
                mfs.copy_from_local(r, path, target, hdfs_user)
                uploaded_paths.append(target)
            for lib in builtin_libs:
                mfs.put_file_to_maprfs(r, lib['raw'], lib['name'], lib_dir,
                                       hdfs_user)
                uploaded_paths.append(lib_dir + '/' + lib['name'])
        return uploaded_paths
Ejemplo n.º 5
0
 def copy_assembly_jar_to_mfs(self, cluster_context):
     target = self.JAR_FILE_TARGET
     hdfs_user = '******'
     with cluster_context.get_instance(SPARK_HISTORY_SERVER).remote() as r:
         mfs.copy_from_local(r, self._assembly_jar_path(cluster_context),
                             target, hdfs_user)