コード例 #1
0
    def transfer_files(self):
        JobDeploymentBase.transfer_files(self)
        # Here we transfer any input files to the relevant directory on the
        # target platform.
        # Use SAGA-Python to handle the file transfer.
        LOG.debug('Transfer files...')
        job_dir = self.platform_config.storage_job_directory
        host = self.platform_config.platform_service_host

        try:
            directory = Directory('sftp://%s%s' % (host, job_dir),
                                  session=self.session)
        except saga.BadParameter as e:
            LOG.error('The specified job directory does not exist on PBS '
                      'submission node <%s> (%s).' % (host, str(e)))
            raise JobError('The specified job directory does not exist on PBS'
                           'submission node <%s> (%s)' % (host, str(e)))

        try:
            # directory.make_dir() does not return a handle to the new directory
            # so need to create the directory URL manually.
            directory.make_dir(self.job_config.job_id)
            job_data_dir = os.path.join(str(directory.url),
                                        self.job_config.job_id)
        except saga.NoSuccess as e:
            LOG.error('The specified job data directory already exists on '
                      'PBS submission node <%s> (%s).' % (host, str(e)))
            raise JobError('The specified job directory already exists on PBS'
                           'submission node <%s> (%s)' % (host, str(e)))

        # Now upload the file(s) to the job data directory
        # and create an input file list containing the resulting locations
        # of the files.
        # There are some cases where jobs may not have input files (they may,
        # for example pull the input files from a remote location as part of
        # the job process) so we first check whether there are any input files
        # to process, if not, then return from this function
        if not self.job_config.input_files:
            LOG.debug('There are no input files to transfer for this job...')
            return

        self.transferred_input_files = []
        for f in self.job_config.input_files:
            try:
                f_obj = File('file://%s' % f, session=self.session)
                f_obj.copy(job_data_dir)
                dest_dir = os.path.join(directory.url.path,
                                        self.job_config.job_id)
                self.transferred_input_files.append(
                    os.path.join(dest_dir, os.path.basename(f_obj.url.path)))
            except:
                LOG.error('Error copying the input file <%s> to the remote '
                          'platform.' % f)
                raise JobError('Error copying the input file <%s> to the '
                               'remote platform.' % f)
コード例 #2
0
 def transfer_files(self):
     JobDeploymentBase.transfer_files(self)
コード例 #3
0
    def transfer_files(self):
        JobDeploymentBase.transfer_files(self)
        LOG.debug('SSH Deployer: Transfer files...')
        # Here we transfer any input files to the relevant directory on the
        # target platform.
        # Use SAGA-Python to handle the file transfer.
        job_dir = self.platform_config.storage_job_directory
        # Check that the job storage directory exists and then create a
        # sub-directory specifically for this job.
        try:
            LOG.debug('URL for file transfer: <sftp://%s:%s%s>' %
                      (self.host, self.port, job_dir))
            directory = Directory('sftp://%s:%s%s' %
                                  (self.host, self.port, job_dir),
                                  session=self.session)
        except saga.BadParameter as e:
            LOG.error('Error setting up connection to resource directory.')
            if 'connection refused' in str(e).lower():
                raise ConnectionError('Unable to connect to remote resource '
                                      'to set up connection to directory.')

            raise StorageDirectoryNotFoundError(
                'The specified job data base '
                'directory does not exist on resource <%s> (%s)' %
                (self.host, str(e)))
        try:
            # directory.make_dir() does not return a handle to the new directory
            # so need to create the directory URL manually.
            directory.make_dir(self.job_config.job_id)
            job_data_dir = os.path.join(str(directory.url),
                                        self.job_config.job_id)
        except saga.NoSuccess as e:
            LOG.error('The specified job data directory already exists on '
                      'resource <%s> (%s).' % (self.host, str(e)))
            raise DirectoryExistsError('The specified job directory already '
                                       'exists on resource <%s> (%s)' %
                                       (self.host, str(e)))

        # Now upload the file(s) to the job data directory
        # and create an input file list containing the resulting locations
        # of the files.
        # There are some cases where jobs may not have input files (they may,
        # for example pull the input files from a remote location as part of
        # the job process) so we first check whether there are any input files
        # to process, if not, then return from this function
        if not self.job_config.input_files:
            LOG.debug('There are no input files to transfer for this job...')
            return

        self.transferred_input_files = []
        for f in self.job_config.input_files:
            try:
                f_obj = File('file://%s' % f, session=self.session)
                f_obj.copy(job_data_dir)
                dest_dir = os.path.join(directory.url.path,
                                        self.job_config.job_id)
                self.transferred_input_files.append(
                    os.path.join(dest_dir, os.path.basename(f_obj.url.path)))
            except:
                LOG.error('Error copying the input file <%s> to the remote '
                          'platform.' % f)
                raise JobError('Error copying the input file <%s> to the '
                               'remote platform.' % f)
コード例 #4
0
    def transfer_files(self):
        JobDeploymentBase.transfer_files(self)
        # Here we transfer any input files to the relevant directory on the
        # target platform.
        # Use SAGA-Python to handle the file transfer.
        LOG.debug('Transfer files...')
        job_dir = self.platform_config.storage_job_directory

        # At this point we need to switch back to using the job secruity
        # context. If we were using unconfigured resources, these will have
        # been configured using an admin context by now.
        self.session = saga.Session(default=False)
        self.session.add_context(self.job_ctx)

        # Begin by checking if we're working with more than one instance, if
        # so we have a master and one or more slave nodes. We'll push the data
        # to the master and then direct the master to distribute it to the
        # slave nodes.
        master_node = self.running_nodes[0][0]
        slave_nodes = []
        if len(self.running_nodes) > 1:
            slave_nodes = [node[0] for node in self.running_nodes[1:]]

        # On the master node: Check that the job storage directory exists and
        # then create a sub-directory specifically for this job.

        # Node is a tuple consisting of two items, the node object and an
        # IP list. For now we work with the node object directly.
        node_ip = master_node.public_ips[0]
        try:
            directory = Directory('sftp://%s%s' % (node_ip, job_dir),
                                  session=self.session)
        except saga.BadParameter as e:
            LOG.error('The specified job directory does not exist on node '
                      '<%s> (%s).' % (node_ip, str(e)))
            #raise JobError('The specified job directory does not exist '
            #               'on node <%s> (%s)' % (node_ip, str(e)))
        try:
            # directory.make_dir() does not return a handle to the new directory
            # so need to create the directory URL manually.
            directory.make_dir(self.job_config.job_id)
        except saga.NoSuccess as e:
            LOG.warning('The specified job data directory already exists on '
                        'node <%s> (%s).' % (node_ip, str(e)))
            #raise JobError('The specified job directory already exists on '
            #               'on node <%s> (%s)' % (node_ip, str(e)))

        job_data_dir = os.path.join(str(directory.url), self.job_config.job_id)

        # Now upload the file(s) to the job data directory
        # and create an input file list containing the resulting locations
        # of the files.
        # There are some cases where jobs may not have input files (they may,
        # for example pull the input files from a remote location as part of
        # the job process) so we first check whether there are any input files
        # to process, if not, then return from this function
        if not self.job_config.input_files:
            LOG.debug('There are no input files to transfer for this job...')
            return

        self.transferred_input_files = []
        for f in self.job_config.input_files:
            try:
                f_obj = File('file://%s' % f, session=self.session)
                f_obj.copy(job_data_dir)
                dest_dir = os.path.join(directory.url.path,
                                        self.job_config.job_id)
                self.transferred_input_files.append(
                    os.path.join(dest_dir, os.path.basename(f_obj.url.path)))
            except:
                LOG.error('Error copying the input file <%s> to the remote '
                          'platform.' % f)
                raise JobError('Error copying the input file <%s> to the '
                               'remote platform.' % f)

        # At this point input files have been successfully transferred to
        # the master node. We now direct the master node to send the files
        # to each of the slave nodes:
        if slave_nodes:
            slave_private_ips = [node.private_ips[0] for node in slave_nodes]
            self._distribute_job_data(master_node.public_ips[0],
                                      slave_private_ips,
                                      self.platform_config.user_id,
                                      self.platform_config.user_key_file,
                                      job_dir, self.job_config.job_id)