Esempio n. 1
0
def submit_transfer(external_host,
                    job,
                    submitter='submitter',
                    process=0,
                    thread=0,
                    timeout=None):
    """
    Submit a transfer or staging request

    :param external_host:   FTS server to submit to.
    :param job:             Job dictionary.
    :param submitter:       Name of the submitting entity.
    :param process:         Process which submits.
    :param thread:          Thread which submits.
    :param timeout:         Timeout
    """

    # prepare submitting
    xfers_ret = {}
    try:
        for file in job['files']:
            file_metadata = file['metadata']
            request_id = file_metadata['request_id']
            log_str = '%s:%s PREPARING REQUEST %s DID %s:%s TO SUBMITTING STATE PREVIOUS %s FROM %s TO %s USING %s ' % (
                process, thread, file_metadata['request_id'],
                file_metadata['scope'], file_metadata['name'],
                file_metadata['previous_attempt_id']
                if 'previous_attempt_id' in file_metadata else None,
                file['sources'], file['destinations'], external_host)
            xfers_ret[request_id] = {
                'state': RequestState.SUBMITTING,
                'external_host': external_host,
                'external_id': None,
                'dest_url': file['destinations'][0]
            }
            logging.info("%s" % (log_str))
            xfers_ret[request_id]['file'] = file
        logging.debug("%s:%s start to prepare transfer" % (process, thread))
        transfer_core.prepare_sources_for_transfers(xfers_ret)
        logging.debug("%s:%s finished to prepare transfer" % (process, thread))
    except:
        logging.error(
            "%s:%s Failed to prepare requests %s state to SUBMITTING(Will not submit jobs but return directly) with error: %s"
            % (process, thread, xfers_ret.keys(), traceback.format_exc()))
        return

    # submit the job
    eid = None
    try:
        ts = time.time()
        logging.info("%s:%s About to submit job to %s with timeout %s" %
                     (process, thread, external_host, timeout))
        eid = transfer_core.submit_bulk_transfers(external_host,
                                                  files=job['files'],
                                                  transfertool='fts3',
                                                  job_params=job['job_params'],
                                                  timeout=timeout)
        duration = time.time() - ts
        logging.info("%s:%s Submit job %s to %s in %s seconds" %
                     (process, thread, eid, external_host, duration))
        record_timer(
            'daemons.conveyor.%s.submit_bulk_transfer.per_file' % submitter,
            (time.time() - ts) * 1000 / len(job['files']))
        record_counter('daemons.conveyor.%s.submit_bulk_transfer' % submitter,
                       len(job['files']))
        record_timer(
            'daemons.conveyor.%s.submit_bulk_transfer.files' % submitter,
            len(job['files']))
    except Exception, ex:
        logging.error("Failed to submit a job with error %s: %s" %
                      (str(ex), traceback.format_exc()))
Esempio n. 2
0
def submit_transfer(external_host, job, submitter='submitter', timeout=None, user_transfer_job=False, logger=logging.log, transfertool=TRANSFER_TOOL):
    """
    Submit a transfer or staging request

    :param external_host:         FTS server to submit to.
    :param job:                   Job dictionary.
    :param submitter:             Name of the submitting entity.
    :param timeout:               Timeout
    :param user_transfer_job:     Parameter for transfer with user credentials
    :param logger:                Optional decorated logger that can be passed from the calling daemons or servers.
    """

    # Prepare submitting
    xfers_ret = {}

    try:
        for t_file in job['files']:
            file_metadata = t_file['metadata']
            request_id = file_metadata['request_id']
            log_str = 'PREPARING REQUEST %s DID %s:%s TO SUBMITTING STATE PREVIOUS %s FROM %s TO %s USING %s ' % (file_metadata['request_id'],
                                                                                                                  file_metadata['scope'],
                                                                                                                  file_metadata['name'],
                                                                                                                  file_metadata['previous_attempt_id'] if 'previous_attempt_id' in file_metadata else None,
                                                                                                                  t_file['sources'],
                                                                                                                  t_file['destinations'],
                                                                                                                  external_host)
            xfers_ret[request_id] = {'state': RequestState.SUBMITTING, 'external_host': external_host, 'external_id': None, 'dest_url': t_file['destinations'][0]}
            logger(logging.INFO, "%s", log_str)
            xfers_ret[request_id]['file'] = t_file
        logger(logging.DEBUG, 'Start to prepare transfer')
        transfer_core.prepare_sources_for_transfers(xfers_ret)
        logger(logging.DEBUG, 'Finished to prepare transfer')
    except RequestNotFound as error:
        logger(logging.ERROR, str(error))
        return
    except Exception:
        logger(logging.ERROR, 'Failed to prepare requests %s state to SUBMITTING (Will not submit jobs but return directly) with error: %s' % (list(xfers_ret.keys())), exc_info=True)
        return

    # Prepare the dictionary for xfers results
    xfers_ret = {}
    for t_file in job['files']:
        file_metadata = t_file['metadata']
        request_id = file_metadata['request_id']
        xfers_ret[request_id] = {'scope': file_metadata['scope'],
                                 'name': file_metadata['name'],
                                 'external_host': external_host,
                                 'external_id': None,
                                 'request_type': t_file.get('request_type', None),
                                 'dst_rse': file_metadata.get('dst_rse', None),
                                 'src_rse': file_metadata.get('src_rse', None),
                                 'src_rse_id': file_metadata['src_rse_id'],
                                 'metadata': file_metadata}

    # Submit the job
    try:
        start_time = time.time()
        logger(logging.INFO, 'About to submit job to %s with timeout %s' % (external_host, timeout))
        # A eid is returned if the job is properly submitted otherwise an exception is raised
        eid = transfer_core.submit_bulk_transfers(external_host,
                                                  files=job['files'],
                                                  transfertool=transfertool,
                                                  job_params=job['job_params'],
                                                  timeout=timeout,
                                                  user_transfer_job=user_transfer_job)
        duration = time.time() - start_time
        logger(logging.INFO, 'Submit job %s to %s in %s seconds' % (eid, external_host, duration))
        record_timer('daemons.conveyor.%s.submit_bulk_transfer.per_file' % submitter, (time.time() - start_time) * 1000 / len(job['files']))
        record_counter('daemons.conveyor.%s.submit_bulk_transfer' % submitter, len(job['files']))
        record_timer('daemons.conveyor.%s.submit_bulk_transfer.files' % submitter, len(job['files']))

        # Update all the requests to SUBMITTED and cancel the transfer job in case the update failed
        try:
            for t_file in job['files']:
                file_metadata = t_file['metadata']
                request_id = file_metadata['request_id']
                xfers_ret[request_id]['state'] = RequestState.SUBMITTED
                xfers_ret[request_id]['external_id'] = eid
                logger(logging.INFO, 'COPYING REQUEST %s DID %s:%s USING %s with state(%s) with eid(%s)' % (file_metadata['request_id'], file_metadata['scope'], file_metadata['name'], external_host, RequestState.SUBMITTED, eid))
            logger(logging.DEBUG, 'Start to bulk register transfer state for eid %s' % eid)
            transfer_core.set_transfers_state(xfers_ret, datetime.datetime.utcnow())
            logger(logging.DEBUG, 'Finished to register transfer state',)
        except Exception:
            logger(logging.ERROR, 'Failed to register transfer state with error', exc_info=True)
            try:
                logger(logging.INFO, 'Cancel transfer %s on %s', eid, external_host)
                request.cancel_request_external_id(eid, external_host)
            except Exception:
                # The job is still submitted in the file transfer service but the request is not updated. Possibility to have a double submission during the next cycle
                logger(logging.ERROR, 'Failed to cancel transfers %s on %s with error' % (eid, external_host), exc_info=True)

    # This exception is raised if one job is already submitted for one file
    except DuplicateFileTransferSubmission as error:
        logger(logging.WARNING, 'Failed to submit a job because of duplicate file : %s', str(error))
        logger(logging.INFO, 'Submitting files one by one')

        try:
            # In this loop we submit the jobs and update the requests state one by one
            single_xfers_ret = {}
            for t_file in job['files']:
                single_xfers_ret = {}
                single_xfers_ret[request_id] = xfers_ret[request_id]
                file_metadata = t_file['metadata']
                request_id = file_metadata['request_id']
                start_time = time.time()
                logger(logging.INFO, 'About to submit job to %s with timeout %s', external_host, timeout)
                eid = transfer_core.submit_bulk_transfers(external_host,
                                                          files=[t_file],
                                                          transfertool=transfertool,
                                                          job_params=job['job_params'],
                                                          timeout=timeout,
                                                          user_transfer_job=user_transfer_job)
                duration = time.time() - start_time
                logger(logging.INFO, 'Submit job %s to %s in %s seconds' % (eid, external_host, duration))
                record_timer('daemons.conveyor.%s.submit_bulk_transfer.per_file' % submitter, (time.time() - start_time) * 1000)
                record_counter('daemons.conveyor.%s.submit_bulk_transfer' % submitter, 1)
                record_timer('daemons.conveyor.%s.submit_bulk_transfer.files' % submitter, 1)
                single_xfers_ret[request_id]['state'] = RequestState.SUBMITTED
                single_xfers_ret[request_id]['external_id'] = eid
                logger(logging.INFO, 'COPYING REQUEST %s DID %s:%s USING %s with state(%s) with eid(%s)' % (file_metadata['request_id'], file_metadata['scope'], file_metadata['name'], external_host, RequestState.SUBMITTED, eid))
                try:
                    logger(logging.DEBUG, 'Start to register transfer state')
                    transfer_core.set_transfers_state(single_xfers_ret, datetime.datetime.utcnow())
                    logger(logging.DEBUG, 'Finished to register transfer state')
                except Exception:
                    logger(logging.ERROR, 'Failed to register transfer state with error', exc_info=True)
                    try:
                        logger(logging.INFO, 'Cancel transfer %s on %s', eid, external_host)
                        request.cancel_request_external_id(eid, external_host)
                    except Exception:
                        logger(logging.ERROR, 'Failed to cancel transfers %s on %s with error' % (eid, external_host), exc_info=True)

        except (DuplicateFileTransferSubmission, TransferToolTimeout, TransferToolWrongAnswer, Exception) as error:
            request_id = single_xfers_ret.keys()[0]
            single_xfers_ret[request_id]['state'] = RequestState.SUBMISSION_FAILED
            logger(logging.ERROR, 'Cannot submit the job for request %s : %s', request_id, str(error))
            try:
                logger(logging.DEBUG, 'Update the transfer state to fail')
                transfer_core.set_transfers_state(single_xfers_ret, datetime.datetime.utcnow())
                logger(logging.DEBUG, 'Finished to register transfer state')
            except Exception:
                logger(logging.ERROR, 'Failed to register transfer state with error', exc_info=True)
                # No need to cancel the job here

    # The following exceptions are raised if the job failed to be submitted
    except (TransferToolTimeout, TransferToolWrongAnswer) as error:
        logger(logging.ERROR, str(error))
        try:
            for t_file in job['files']:
                file_metadata = t_file['metadata']
                request_id = file_metadata['request_id']
                xfers_ret[request_id]['state'] = RequestState.SUBMISSION_FAILED
            logger(logging.DEBUG, 'Start to register transfer state')
            transfer_core.set_transfers_state(xfers_ret, datetime.datetime.utcnow())
            logger(logging.DEBUG, 'Finished to register transfer state')
        except Exception:
            logger(logging.ERROR, 'Failed to register transfer state with error', exc_info=True)
            # No need to cancel the job here

    except Exception as error:
        logger(logging.ERROR, 'Failed to submit a job with error %s', str(error), exc_info=True)
Esempio n. 3
0
def submit_transfer(external_host,
                    job,
                    submitter='submitter',
                    logging_prepend_str='',
                    timeout=None,
                    user_transfer_job=False):
    """
    Submit a transfer or staging request

    :param external_host:         FTS server to submit to.
    :param job:                   Job dictionary.
    :param submitter:             Name of the submitting entity.
    :param logging_prepend_str:   String to prepend to the logging
    :param timeout:               Timeout
    :param user_transfer_job:     Parameter for transfer with user credentials
    """

    prepend_str = ''
    if logging_prepend_str:
        prepend_str = logging_prepend_str

    # prepare submitting
    xfers_ret = {}

    try:
        for file in job['files']:
            file_metadata = file['metadata']
            request_id = file_metadata['request_id']
            log_str = prepend_str + 'PREPARING REQUEST %s DID %s:%s TO SUBMITTING STATE PREVIOUS %s FROM %s TO %s USING %s ' % (
                file_metadata['request_id'], file_metadata['scope'],
                file_metadata['name'], file_metadata['previous_attempt_id']
                if 'previous_attempt_id' in file_metadata else None,
                file['sources'], file['destinations'], external_host)
            xfers_ret[request_id] = {
                'state': RequestState.SUBMITTING,
                'external_host': external_host,
                'external_id': None,
                'dest_url': file['destinations'][0]
            }
            logging.info("%s" % (log_str))
            xfers_ret[request_id]['file'] = file
        logging.debug(prepend_str + 'Start to prepare transfer')
        transfer_core.prepare_sources_for_transfers(xfers_ret)
        logging.debug(prepend_str + 'Finished to prepare transfer')
    except Exception:
        logging.error(
            prepend_str +
            'Failed to prepare requests %s state to SUBMITTING(Will not submit jobs but return directly) with error: %s'
            % (list(xfers_ret.keys()), traceback.format_exc()))
        return

    # submit the job
    eid = None
    try:
        start_time = time.time()
        logging.info(prepend_str +
                     'About to submit job to %s with timeout %s' %
                     (external_host, timeout))
        eid = transfer_core.submit_bulk_transfers(
            external_host,
            files=job['files'],
            transfertool='fts3',
            job_params=job['job_params'],
            timeout=timeout,
            user_transfer_job=user_transfer_job)
        duration = time.time() - start_time
        logging.info(prepend_str + 'Submit job %s to %s in %s seconds' %
                     (eid, external_host, duration))
        record_timer(
            'daemons.conveyor.%s.submit_bulk_transfer.per_file' % submitter,
            (time.time() - start_time) * 1000 / len(job['files']))
        record_counter('daemons.conveyor.%s.submit_bulk_transfer' % submitter,
                       len(job['files']))
        record_timer(
            'daemons.conveyor.%s.submit_bulk_transfer.files' % submitter,
            len(job['files']))
    except Exception as error:
        logging.error(prepend_str +
                      'Failed to submit a job with error %s: %s' %
                      (str(error), traceback.format_exc()))

    # register transfer
    xfers_ret = {}
    try:
        for file in job['files']:
            file_metadata = file['metadata']
            request_id = file_metadata['request_id']
            log_str = prepend_str + 'COPYING REQUEST %s DID %s:%s USING %s' % (
                file_metadata['request_id'], file_metadata['scope'],
                file_metadata['name'], external_host)
            if eid:
                xfers_ret[request_id] = {
                    'scope': file_metadata['scope'],
                    'name': file_metadata['name'],
                    'state': RequestState.SUBMITTED,
                    'external_host': external_host,
                    'external_id': eid,
                    'request_type': file.get('request_type', None),
                    'dst_rse': file_metadata.get('dst_rse', None),
                    'src_rse': file_metadata.get('src_rse', None),
                    'src_rse_id': file_metadata['src_rse_id'],
                    'metadata': file_metadata
                }
                log_str += 'with state(%s) with eid(%s)' % (
                    RequestState.SUBMITTED, eid)
                logging.info("%s" % (log_str))
            else:
                xfers_ret[request_id] = {
                    'scope': file_metadata['scope'],
                    'name': file_metadata['name'],
                    'state': RequestState.SUBMISSION_FAILED,
                    'external_host': external_host,
                    'external_id': None,
                    'request_type': file.get('request_type', None),
                    'dst_rse': file_metadata.get('dst_rse', None),
                    'src_rse': file_metadata.get('src_rse', None),
                    'src_rse_id': file_metadata['src_rse_id'],
                    'metadata': file_metadata
                }
                log_str += 'with state(%s) with eid(%s)' % (
                    RequestState.SUBMISSION_FAILED, None)
                logging.warn("%s" % (log_str))
        logging.debug(prepend_str + 'Start to register transfer state')
        transfer_core.set_transfers_state(xfers_ret,
                                          datetime.datetime.utcnow())
        logging.debug(prepend_str + 'Finished to register transfer state')
    except Exception:
        logging.error(prepend_str +
                      'Failed to register transfer state with error: %s' %
                      (traceback.format_exc()))
        try:
            if eid:
                logging.info(prepend_str + 'Cancel transfer %s on %s' %
                             (eid, external_host))
                request.cancel_request_external_id(eid, external_host)
        except Exception:
            logging.error(
                prepend_str +
                'Failed to cancel transfers %s on %s with error: %s' %
                (eid, external_host, traceback.format_exc()))