Esempio n. 1
0
    def _mount_volume(self):
        '''
		directories on hosts mounted to containers
		'''
        if self.volume_mounted:
            return

        # create work dir
        (work_dir, host_work_dir, name) = self.work_volume
        cmd = 'ssh ' + self.node + ' "sudo rm -rf ' + host_work_dir + '; mkdir -p ' + host_work_dir + '"'
        os.system(cmd)

        self.volume_mounted = True

        # if it is worker task or data not mounted from local host, then read data from HDFS
        if self.role == "ps":
            return
        if self.local_mounted:
            return
        if self.hdfs_url is None or self.hdfs_url == '':
            raise ValueError('HDFS data URL is not specified')

        (data_dir, host_data_dir, name) = self.data_volume
        pool = Threads()
        for data in self.hdfs_url:
            fn = data.split("/")[-1]
            local_file = host_data_dir + fn
            # force copy even exist: some file may be broken due to interruption
            cmd = 'ssh ' + self.node + ' "/usr/local/hadoop/bin/hadoop fs -copyToLocal -f ' + data + ' ' + local_file + '"'
            os.system(cmd)
            thread = threading.Thread(target=(lambda cmd=cmd: os.system(cmd)),
                                      args=())
            pool.add(thread)
        pool.start()
        pool.wait()
Esempio n. 2
0
    def delete(self, all):
        '''delete the task'''
        pool = Threads()
        thread = threading.Thread(target=(os.system('kubectl delete -f ' +
                                                    self.yaml)),
                                  args=())
        pool.add(thread)

        if all:
            (work_dir, host_work_dir, name) = self.work_volume
            cmd = 'timeout 10 ssh ' + self.node + ' "sudo rm -r ' + host_work_dir + '"'
            thread = threading.Thread(target=(lambda cmd=cmd: os.system(cmd)),
                                      args=())
            pool.add(thread)

        pool.start()
        pool.wait()