Example #1
0
    def submit(self):
        """
        Submits a new job

        It returns the information about the new submitted job. To know the format for the
        submission, /api-docs/schema/submit gives the expected format encoded as a JSON-schema.
        It can be used to validate (i.e in Python, jsonschema.validate)
        """
        # First, the request has to be valid JSON
        submitted_dict = get_input_as_dict(request)

        # The auto-generated delegation id must be valid
        user = request.environ['fts3.User.Credentials']
        credential = Session.query(Credential).get((user.delegation_id, user.user_dn))
        if credential is None:
            raise HTTPAuthenticationTimeout('No delegation found for "%s"' % user.user_dn)
        if credential.expired():
            remaining = credential.remaining()
            seconds = abs(remaining.seconds + remaining.days * 24 * 3600)
            raise HTTPAuthenticationTimeout(
                'The delegated credentials expired %d seconds ago (%s)' % (seconds, user.delegation_id)
            )
        if user.method != 'oauth2' and credential.remaining() < timedelta(hours=1):
            raise HTTPAuthenticationTimeout(
                'The delegated credentials has less than one hour left (%s)' % user.delegation_id
            )

        # Populate the job and files
        populated = JobBuilder(user, **submitted_dict)

        log.info("%s (%s) is submitting a transfer job" % (user.user_dn, user.vos[0]))

        # Insert the job
        try:
            try:
                Session.execute(Job.__table__.insert(), [populated.job])
            except IntegrityError:
                raise HTTPConflict('The sid provided by the user is duplicated')
            if len(populated.files):
                Session.execute(File.__table__.insert(), populated.files)
            if len(populated.datamanagement):
                Session.execute(DataManagement.__table__.insert(), populated.datamanagement)
            Session.flush()
            Session.commit()
        except IntegrityError as err:
            Session.rollback()
            raise HTTPConflict('The submission is duplicated '+ str(err))
        except:
            Session.rollback()
            raise

        # Send messages
        # Need to re-query so we get the file ids
        job = Session.query(Job).get(populated.job_id)
        for i in range(len(job.files)):
            try:
                submit_state_change(job, job.files[i], populated.files[0]['file_state'])
            except Exception, e:
                log.warning("Failed to write state message to disk: %s" % e.message)
Example #2
0
    def hosts_activity(self):
        """
        What are the hosts doing
        """
        staging = Session.execute("SELECT COUNT(*), agent_dn "
                                  " FROM t_file "
                                  " WHERE file_state = 'STARTED' "
                                  " GROUP BY agent_dn")
        response = dict()

        for (count, host) in staging:
            response[host] = dict(staging=count)

        active = Session.execute("SELECT COUNT(*), transferHost "
                                 " FROM t_file "
                                 " WHERE file_state = 'ACTIVE' "
                                 " GROUP BY transferHost")
        for (count, host) in active:
            if host not in response:
                response[host] = dict()
            response[host]['active'] = count

        return response
Example #3
0
                raise HTTPBadRequest('Can not specify reuse and multiple replicas at the same time')
            job['reuse_job'] = 'R'

        # Update the optimizer
        unique_pairs = set(map(lambda f: (f['source_se'], f['dest_se']), files))
        for (source_se, dest_se) in unique_pairs:
            optimizer_active = OptimizerActive()
            optimizer_active.source_se = source_se
            optimizer_active.dest_se = dest_se
            optimizer_active.ema = 0
            optimizer_active.datetime = datetime.utcnow()
            Session.merge(optimizer_active)

        # Update the database
        try:
            Session.execute(Job.__table__.insert(), [job])
            if len(files):
                Session.execute(File.__table__.insert(), files)
            if len(datamanagement):
                Session.execute(DataManagement.__table__.insert(), datamanagement)
            Session.commit()
        except:
            Session.rollback()
            raise

        if len(files):
            log.info("Job %s submitted with %d transfers" % (job['job_id'], len(files)))
        else:
            log.info("Job %s submitted with %d data management operations" % (job['job_id'], len(datamanagement)))

        return {'job_id': job['job_id']}
Example #4
0
def _cancel_transfers(storage=None, vo_name=None):
    """
    Cancels the transfers that have the given storage either in source or destination,
    and belong to the given VO.
    Returns the list of affected jobs ids.
    """
    affected_job_ids = set()
    files = Session.query(File.file_id).filter(
        and_((File.source_se == storage) | (File.dest_se == storage),
             File.file_state.in_(FileActiveStates + ['NOT_USED'])))
    if vo_name and vo_name != '*':
        files = files.filter(File.vo_name == vo_name)

    now = datetime.utcnow()

    try:
        for row in files:
            file_id = row[0]
            job_id, file_index = Session.query(
                File.job_id,
                File.file_index).filter(File.file_id == file_id).one()
            affected_job_ids.add(job_id)
            # Cancel the affected file
            Session.query(File).filter(File.file_id == file_id)\
                .update({
                    'file_state': 'CANCELED', 'reason': 'Storage banned',
                    'finish_time': now, 'dest_surl_uuid': None
                }, synchronize_session=False)
            # If there are alternatives, enable them
            if Session.bind.dialect.name == 'mysql':
                limit = " LIMIT 1"
            else:
                limit = ''
            Session.execute(
                "UPDATE t_file SET"
                "   file_state = 'SUBMITTED' "
                "WHERE"
                "  job_id = :job_id AND file_index = :file_index AND file_state = 'NOT_USED' "
                + limit, dict(job_id=job_id, file_index=file_index))

        Session.commit()
        Session.expire_all()
    except Exception:
        Session.rollback()
        raise

    # Set each job terminal state if needed
    try:
        for job_id in affected_job_ids:
            n_files = Session.query(func.count(distinct(
                File.file_id))).filter(File.job_id == job_id).all()[0][0]
            n_canceled = Session.query(func.count(distinct(File.file_id)))\
                .filter(File.job_id == job_id).filter(File.file_state == 'CANCELED').all()[0][0]
            n_finished = Session.query(func.count(distinct(File.file_id)))\
                .filter(File.job_id == job_id).filter(File.file_state == 'FINISHED').all()[0][0]
            n_failed = Session.query(func.count(distinct(File.file_id)))\
                .filter(File.job_id == job_id).filter(File.file_state == 'FAILED').all()[0][0]

            n_terminal = n_canceled + n_finished + n_failed

            # Job finished!
            if n_terminal == n_files:
                reason = None
                Session.query(Job).filter(Job.job_id == job_id).update({
                    'job_state':
                    'CANCELED',
                    'job_finished':
                    now,
                    'reason':
                    reason
                })

        Session.commit()
    except Exception:
        Session.rollback()
        raise
    return affected_job_ids