Esempio n. 1
0
    def _upload_job_files_to_hdfs(self, where, job_dir, job, configs,
                                  proxy_configs=None):

        mains = list(job.mains) if job.mains else []
        libs = list(job.libs) if job.libs else []
        builtin_libs = edp.get_builtin_binaries(job, configs)
        uploaded_paths = []
        hdfs_user = self.get_hdfs_user()
        job_dir_suffix = 'lib' if job.type != edp.JOB_TYPE_SHELL else ''
        lib_dir = os.path.join(job_dir, job_dir_suffix)

        with remote.get_remote(where) as r:
            job_binaries = mains + libs
            self._prepare_job_binaries(job_binaries, r)

            # upload mains
            uploaded_paths.extend(self._upload_job_binaries(r, mains,
                                                            proxy_configs,
                                                            hdfs_user,
                                                            job_dir))
            # upload libs
            if len(libs) and job_dir_suffix:
                # HDFS 2.2.0 fails to put file if the lib dir does not exist
                self.create_hdfs_dir(r, lib_dir)
            uploaded_paths.extend(self._upload_job_binaries(r, libs,
                                                            proxy_configs,
                                                            hdfs_user,
                                                            lib_dir))
            # upload buitin_libs
            for lib in builtin_libs:
                h.put_file_to_hdfs(r, lib['raw'], lib['name'], lib_dir,
                                   hdfs_user)
                uploaded_paths.append(lib_dir + lib['name'])
        return uploaded_paths
Esempio n. 2
0
 def test_put_file_to_hdfs(self, open_get, mock_six):
     open_get.return_value = '/tmp/workflow.xml'
     mock_six.return_value = 111
     helper.put_file_to_hdfs(self.cluster, open_get, 'workflow', '/tmp',
                             'hdfs')
     self.cluster.execute_command.assert_called_once_with(
         'sudo su - -c "hadoop dfs -copyFromLocal /tmp/workflow.111'
         ' /tmp/workflow" hdfs && sudo rm -f /tmp/workflow.111')
Esempio n. 3
0
 def test_put_file_to_hdfs(self, open_get, mock_six):
     open_get.return_value = '/tmp/workflow.xml'
     mock_six.return_value = 111
     helper.put_file_to_hdfs(self.cluster, open_get, 'workflow',
                             '/tmp', 'hdfs')
     self.cluster.execute_command.assert_called_once_with(
         'sudo su - -c "hadoop dfs -copyFromLocal /tmp/workflow.111'
         ' /tmp/workflow" hdfs && sudo rm -f /tmp/workflow.111')
Esempio n. 4
0
def upload_job_files(where, job_dir, job, hdfs_user):
    mains = job.mains or []
    libs = job.libs or []
    uploaded_paths = []

    with remote.get_remote(where) as r:
        for main in mains:
            raw_data = dispatch.get_raw_binary(main)
            h.put_file_to_hdfs(r, raw_data, main.name, job_dir, hdfs_user)
            uploaded_paths.append(job_dir + '/' + main.name)
        for lib in libs:
            raw_data = dispatch.get_raw_binary(lib)
            # HDFS 2.2.0 fails to put file if the lib dir does not exist
            h.create_dir(r, job_dir + "/lib", hdfs_user)
            h.put_file_to_hdfs(r, raw_data, lib.name, job_dir + "/lib",
                               hdfs_user)
            uploaded_paths.append(job_dir + '/lib/' + lib.name)
    return uploaded_paths
Esempio n. 5
0
def upload_job_files(where, job_dir, job, hdfs_user):

    mains = job.mains or []
    libs = job.libs or []
    uploaded_paths = []

    with remote.get_remote(where) as r:
        for main in mains:
            raw_data = dispatch.get_raw_binary(main)
            h.put_file_to_hdfs(r, raw_data, main.name, job_dir, hdfs_user)
            uploaded_paths.append(job_dir + '/' + main.name)
        for lib in libs:
            raw_data = dispatch.get_raw_binary(lib)
            # HDFS 2.2.0 fails to put file if the lib dir does not exist
            h.create_dir(r, job_dir + "/lib", hdfs_user)
            h.put_file_to_hdfs(r, raw_data, lib.name, job_dir + "/lib",
                               hdfs_user)
            uploaded_paths.append(job_dir + '/lib/' + lib.name)
    return uploaded_paths
Esempio n. 6
0
    def _upload_job_files_to_hdfs(self, where, job_dir, job,
                                  proxy_configs=None):
        mains = job.mains or []
        libs = job.libs or []
        uploaded_paths = []
        hdfs_user = self.get_hdfs_user()

        with remote.get_remote(where) as r:
            for main in mains:
                raw_data = dispatch.get_raw_binary(main, proxy_configs)
                h.put_file_to_hdfs(r, raw_data, main.name, job_dir, hdfs_user)
                uploaded_paths.append(job_dir + '/' + main.name)
            for lib in libs:
                raw_data = dispatch.get_raw_binary(lib, proxy_configs)
                # HDFS 2.2.0 fails to put file if the lib dir does not exist
                self.create_hdfs_dir(r, job_dir + "/lib")
                h.put_file_to_hdfs(r, raw_data, lib.name, job_dir + "/lib",
                                   hdfs_user)
                uploaded_paths.append(job_dir + '/lib/' + lib.name)
        return uploaded_paths
Esempio n. 7
0
    def _upload_job_files_to_hdfs(self, where, job_dir, job,
                                  proxy_configs=None):
        mains = job.mains or []
        libs = job.libs or []
        uploaded_paths = []
        hdfs_user = self.get_hdfs_user()

        with remote.get_remote(where) as r:
            for main in mains:
                raw_data = dispatch.get_raw_binary(main, proxy_configs)
                h.put_file_to_hdfs(r, raw_data, main.name, job_dir, hdfs_user)
                uploaded_paths.append(job_dir + '/' + main.name)
            for lib in libs:
                raw_data = dispatch.get_raw_binary(lib, proxy_configs)
                # HDFS 2.2.0 fails to put file if the lib dir does not exist
                self.create_hdfs_dir(r, job_dir + "/lib")
                h.put_file_to_hdfs(r, raw_data, lib.name, job_dir + "/lib",
                                   hdfs_user)
                uploaded_paths.append(job_dir + '/lib/' + lib.name)
        return uploaded_paths
Esempio n. 8
0
    def _upload_job_files_to_hdfs(self, where, job_dir, job, configs, proxy_configs=None):
        mains = job.mains or []
        libs = job.libs or []
        builtin_libs = edp.get_builtin_binaries(job, configs)
        uploaded_paths = []
        hdfs_user = self.get_hdfs_user()
        job_dir_suffix = "lib" if job.type != edp.JOB_TYPE_SHELL else ""
        lib_dir = os.path.join(job_dir, job_dir_suffix)

        with remote.get_remote(where) as r:
            for main in mains:
                raw_data = dispatch.get_raw_binary(main, proxy_configs)
                h.put_file_to_hdfs(r, raw_data, main.name, job_dir, hdfs_user)
                uploaded_paths.append(job_dir + "/" + main.name)
            if len(libs) and job_dir_suffix:
                # HDFS 2.2.0 fails to put file if the lib dir does not exist
                self.create_hdfs_dir(r, lib_dir)
            for lib in libs:
                raw_data = dispatch.get_raw_binary(lib, proxy_configs)
                h.put_file_to_hdfs(r, raw_data, lib.name, lib_dir, hdfs_user)
                uploaded_paths.append(lib_dir + "/" + lib.name)
            for lib in builtin_libs:
                h.put_file_to_hdfs(r, lib["raw"], lib["name"], lib_dir, hdfs_user)
                uploaded_paths.append(lib_dir + "/" + lib["name"])
        return uploaded_paths
Esempio n. 9
0
    def _upload_job_files_to_hdfs(self,
                                  where,
                                  job_dir,
                                  job,
                                  configs,
                                  proxy_configs=None):
        mains = job.mains or []
        libs = job.libs or []
        builtin_libs = edp.get_builtin_binaries(job, configs)
        uploaded_paths = []
        hdfs_user = self.get_hdfs_user()
        lib_dir = job_dir + '/lib'

        with remote.get_remote(where) as r:
            for main in mains:
                raw_data = dispatch.get_raw_binary(main, proxy_configs)
                h.put_file_to_hdfs(r, raw_data, main.name, job_dir, hdfs_user)
                uploaded_paths.append(job_dir + '/' + main.name)
            if len(libs) > 0:
                # HDFS 2.2.0 fails to put file if the lib dir does not exist
                self.create_hdfs_dir(r, lib_dir)
            for lib in libs:
                raw_data = dispatch.get_raw_binary(lib, proxy_configs)
                h.put_file_to_hdfs(r, raw_data, lib.name, lib_dir, hdfs_user)
                uploaded_paths.append(lib_dir + '/' + lib.name)
            for lib in builtin_libs:
                h.put_file_to_hdfs(r, lib['raw'], lib['name'], lib_dir,
                                   hdfs_user)
                uploaded_paths.append(lib_dir + '/' + lib['name'])
        return uploaded_paths
Esempio n. 10
0
    def _upload_job_files_to_hdfs(self,
                                  where,
                                  job_dir,
                                  job,
                                  configs,
                                  proxy_configs=None):
        mains = job.mains or []
        libs = job.libs or []
        builtin_libs = edp.get_builtin_binaries(job, configs)
        uploaded_paths = []
        hdfs_user = self.get_hdfs_user()
        job_dir_suffix = 'lib' if job.type != edp.JOB_TYPE_SHELL else ''
        lib_dir = os.path.join(job_dir, job_dir_suffix)

        with remote.get_remote(where) as r:
            for main in mains:
                raw_data = dispatch.get_raw_binary(main,
                                                   proxy_configs=proxy_configs,
                                                   remote=r)
                if isinstance(raw_data, dict) and raw_data["type"] == "path":
                    h.copy_from_local(r, raw_data['path'], job_dir, hdfs_user)
                else:
                    h.put_file_to_hdfs(r, raw_data, main.name, job_dir,
                                       hdfs_user)
                uploaded_paths.append(job_dir + '/' + main.name)
            if len(libs) and job_dir_suffix:
                # HDFS 2.2.0 fails to put file if the lib dir does not exist
                self.create_hdfs_dir(r, lib_dir)
            for lib in libs:
                raw_data = dispatch.get_raw_binary(lib,
                                                   proxy_configs=proxy_configs,
                                                   remote=remote)
                if isinstance(raw_data, dict) and raw_data["type"] == "path":
                    h.copy_from_local(r, raw_data['path'], lib_dir, hdfs_user)
                else:
                    h.put_file_to_hdfs(r, raw_data, lib.name, lib_dir,
                                       hdfs_user)
                uploaded_paths.append(lib_dir + '/' + lib.name)
            for lib in builtin_libs:
                h.put_file_to_hdfs(r, lib['raw'], lib['name'], lib_dir,
                                   hdfs_user)
                uploaded_paths.append(lib_dir + '/' + lib['name'])
        return uploaded_paths
Esempio n. 11
0
    def _upload_job_files_to_hdfs(self, where, job_dir, job, configs,
                                  proxy_configs=None):
        mains = job.mains or []
        libs = job.libs or []
        builtin_libs = edp.get_builtin_binaries(job, configs)
        uploaded_paths = []
        hdfs_user = self.get_hdfs_user()
        job_dir_suffix = 'lib' if job.type != edp.JOB_TYPE_SHELL else ''
        lib_dir = os.path.join(job_dir, job_dir_suffix)

        with remote.get_remote(where) as r:
            for main in mains:
                raw_data = dispatch.get_raw_binary(
                    main, proxy_configs=proxy_configs, remote=r)
                if isinstance(raw_data, dict) and raw_data["type"] == "path":
                    h.copy_from_local(r, raw_data['path'],
                                      job_dir, hdfs_user)
                else:
                    h.put_file_to_hdfs(r, raw_data, main.name,
                                       job_dir, hdfs_user)
                uploaded_paths.append(job_dir + '/' + main.name)
            if len(libs) and job_dir_suffix:
                # HDFS 2.2.0 fails to put file if the lib dir does not exist
                self.create_hdfs_dir(r, lib_dir)
            for lib in libs:
                raw_data = dispatch.get_raw_binary(
                    lib, proxy_configs=proxy_configs, remote=remote)
                if isinstance(raw_data, dict) and raw_data["type"] == "path":
                    h.copy_from_local(r, raw_data['path'],
                                      lib_dir, hdfs_user)
                else:
                    h.put_file_to_hdfs(r, raw_data, lib.name,
                                       lib_dir, hdfs_user)
                uploaded_paths.append(lib_dir + '/' + lib.name)
            for lib in builtin_libs:
                h.put_file_to_hdfs(r, lib['raw'], lib['name'], lib_dir,
                                   hdfs_user)
                uploaded_paths.append(lib_dir + '/' + lib['name'])
        return uploaded_paths
Esempio n. 12
0
 def _upload_workflow_file(self, where, job_dir, wf_xml, hdfs_user):
     with remote.get_remote(where) as r:
         h.put_file_to_hdfs(r, wf_xml, "workflow.xml", job_dir, hdfs_user)
     return "%s/workflow.xml" % job_dir
Esempio n. 13
0
 def _upload_workflow_file(self, where, job_dir, wf_xml, hdfs_user):
     with remote.get_remote(where) as r:
         h.put_file_to_hdfs(r, wf_xml, "workflow.xml", job_dir, hdfs_user)
     return "%s/workflow.xml" % job_dir
Esempio n. 14
0
 def _upload_coordinator_file(self, where, job_dir, wf_xml, hdfs_user):
     with remote.get_remote(where) as r:
         h.put_file_to_hdfs(r, wf_xml, "coordinator.xml", job_dir,
                            hdfs_user)
     return "%s/coordinator.xml" % job_dir
Esempio n. 15
0
 def _upload_coordinator_file(self, where, job_dir, wf_xml, hdfs_user):
     with remote.get_remote(where) as r:
         h.put_file_to_hdfs(r, wf_xml, "coordinator.xml", job_dir,
                            hdfs_user)
     return "%s/coordinator.xml" % job_dir