Exemple #1
0
def get_transfer_list(http_request):
    filters = setup_filters(http_request)

    transfers = File.objects
    if filters['state']:
        transfers = transfers.filter(file_state__in=filters['state'])
    else:
        transfers = transfers.exclude(file_state='NOT_USED')
    if filters['source_se']:
        transfers = transfers.filter(source_se=filters['source_se'])
    if filters['dest_se']:
        transfers = transfers.filter(dest_se=filters['dest_se'])
    if filters['source_surl']:
        transfers = transfers.filter(source_surl=filters['source_surl'])
    if filters['dest_surl']:
        transfers = transfers.filter(dest_surl=filters['dest_surl'])
    if filters['vo']:
        transfers = transfers.filter(vo_name=filters['vo'])
    if filters['time_window']:
        not_before = datetime.utcnow() - timedelta(
            hours=filters['time_window'])
        # Avoid querying for job_finished is NULL if there are no active states
        if _contains_active_state(filters['state']):
            transfers = transfers.filter(
                Q(finish_time__isnull=True) | (Q(finish_time__gte=not_before)))
        else:
            transfers = transfers.filter(Q(finish_time__gte=not_before))
    if filters['activity']:
        transfers = transfers.filter(activity=filters['activity'])
    if filters['hostname']:
        transfers = transfers.filter(transfer_host=filters['hostname'])
    if filters['reason']:
        transfers = transfers.filter(reason=filters['reason'])

    transfers = transfers.values('file_id', 'file_state', 'job_id',
                                 'source_se', 'dest_se', 'start_time',
                                 'finish_time', 'activity', 'user_filesize',
                                 'filesize')

    # Ordering
    (order_by, order_desc) = get_order_by(http_request)
    if order_by == 'id':
        transfers = transfers.order_by(ordered_field('file_id', order_desc))
    elif order_by == 'start_time':
        transfers = transfers.order_by(ordered_field('start_time', order_desc))
    elif order_by == 'finish_time':
        transfers = transfers.order_by(ordered_field('finish_time',
                                                     order_desc))
    else:
        transfers = transfers.order_by('-finish_time')

    return transfers
Exemple #2
0
def get_deletion(http_request):
    filters = setup_filters(http_request)
    if filters['time_window']:
        not_before = datetime.utcnow() - timedelta(
            hours=filters['time_window'])
    else:
        not_before = datetime.utcnow() - timedelta(hours=1)

    cursor = connection.cursor()

    # Get all pairs first
    pairs_filter = ""
    se_params = []
    if filters['source_se']:
        pairs_filter += " AND source_se = %s "
        se_params.append(filters['source_se'])
    if filters['vo']:
        pairs_filter += " AND vo_name = %s "
        se_params.append(filters['vo'])

    # Result
    triplets = dict()

    # Non terminal
    query = """
    SELECT COUNT(file_state) as count, file_state, source_se, vo_name
    FROM t_dm
    WHERE file_state in ('DELETE', 'STARTED') %s
    GROUP BY file_state, source_se, vo_name order by NULL
    """ % pairs_filter
    cursor.execute(query, se_params)
    for row in cursor.fetchall():
        triplet_key = (row[2], row[3])
        triplet = triplets.get(triplet_key, dict())
        triplet[row[1].lower()] = row[0]
        triplets[triplet_key] = triplet

    # Terminal
    query = """
    SELECT COUNT(file_state) as count, file_state, source_se, vo_name
    FROM t_dm
    WHERE file_state in ('FINISHED', 'FAILED', 'CANCELED') %s
        AND finish_time > %%s
    GROUP BY file_state, source_se, vo_name order by NULL
    """ % pairs_filter
    cursor.execute(query,
                   se_params + [not_before.strftime('%Y-%m-%d %H:%M:%S')])
    for row in cursor.fetchall():
        triplet_key = (row[2], row[3])  # source_se, vo_name
        triplet = triplets.get(triplet_key,
                               dict())  # source_se:______, vo_name:_____
        triplet[row[1].lower()] = row[0]

        triplets[triplet_key] = triplet

    # Transform into a list
    objs = []
    for (triplet, obj) in triplets.iteritems():
        obj['source_se'] = triplet[0]
        obj['vo_name'] = triplet[1]
        failed = obj.get('failed', 0)
        finished = obj.get('finished', 0)
        total = failed + finished
        if total > 0:
            obj['rate'] = (finished * 100.0) / total
        else:
            obj['rate'] = None
        objs.append(obj)
    # Ordering
    (order_by, order_desc) = get_order_by(http_request)

    if order_by == 'active':
        sorting_method = lambda o: (o.get('started', 0), o.get('delete', 0))
    elif order_by == 'finished':
        sorting_method = lambda o: (o.get('finished', 0), o.get('failed', 0))
    elif order_by == 'failed':
        sorting_method = lambda o: (o.get('failed', 0), o.get('finished', 0))
    elif order_by == 'canceled':
        sorting_method = lambda o: (o.get('canceled', 0), o.get('finished', 0))
    elif order_by == 'rate':
        sorting_method = lambda o: (o.get('rate', 0), o.get('finished', 0))
    else:
        sorting_method = lambda o: (o.get('delete', 0), o.get('active', 0))

    # Generate summary - sum of all values
    summary = {
        'submitted': sum(map(lambda o: o.get('delete', 0), objs), 0),
        'active': sum(map(lambda o: o.get('started', 0), objs), 0),
        'finished': sum(map(lambda o: o.get('finished', 0), objs), 0),
        'failed': sum(map(lambda o: o.get('failed', 0), objs), 0),
        'canceled': sum(map(lambda o: o.get('canceled', 0), objs), 0),
    }
    if summary['finished'] > 0 or summary['failed'] > 0:
        summary['rate'] = (float(summary['finished']) /
                           (summary['finished'] + summary['failed'])) * 100

    # Return
    # tables - list of job
    return {
        'overview':
        paged(
            OverviewExtendedDel(not_before,
                                sorted(objs,
                                       key=sorting_method,
                                       reverse=order_desc),
                                cursor=cursor), http_request),
        # sum of values (Generate summary)
        'summary':
        summary
    }
Exemple #3
0
def get_overview(http_request):
    filters = setup_filters(http_request)
    if filters['time_window']:
        not_before = datetime.utcnow() - timedelta(hours=filters['time_window'])
    else:
        not_before = datetime.utcnow() - timedelta(hours=1)

    cursor = connection.cursor()

    # Get all pairs first
    pairs_filter = ""
    se_params = []
    if filters['source_se']:
        pairs_filter += " AND source_se = %s "
        se_params.append(filters['source_se'])
    if filters['dest_se']:
        pairs_filter += " AND dest_se = %s "
        se_params.append(filters['dest_se'])
    if filters['vo']:
        pairs_filter += " AND vo_name = %s "
        se_params.append(filters['vo'])

    # Result
    triplets = dict()

    
    if filters['only_summary']:
        # Non terminal
        query = """
        SELECT COUNT(file_state) as count, file_state,  vo_name
        FROM t_file
        WHERE file_state in ('SUBMITTED', 'ACTIVE', 'READY', 'STAGING', 'STARTED', 'ARCHIVING') %s
        GROUP BY file_state, vo_name order by NULL
        """ % pairs_filter
        cursor.execute(query, se_params)
        for row in cursor.fetchall():
            key = row[2]
            triplet = triplets.get(key, dict())
            triplet[row[1].lower()] = row[0]
            triplets[key] = triplet

    
        # Terminal
        query = """
        SELECT COUNT(file_state) as count, file_state,  vo_name
        FROM t_file
        WHERE file_state in ('FINISHED', 'FAILED', 'CANCELED') %s
        AND finish_time > %%s
        GROUP BY file_state,  vo_name  order by NULL
        """ % pairs_filter
        cursor.execute(query, se_params + [not_before.strftime('%Y-%m-%d %H:%M:%S')])
        for row in cursor.fetchall():
           key = row[2]
           triplet = triplets.get(key, dict())
           triplet[row[1].lower()] = row[0]
           triplets[key] = triplet
   
    else:
        query = """
        SELECT COUNT(file_state) as count, file_state, source_se, dest_se, vo_name
        FROM t_file
        WHERE file_state in ('SUBMITTED', 'ACTIVE', 'READY', 'STAGING', 'STARTED', 'ARCHIVING') %s
        GROUP BY file_state, source_se, dest_se, vo_name order by NULL
        """ % pairs_filter
        cursor.execute(query, se_params)
        for row in cursor.fetchall():
            triplet_key = (row[2], row[3], row[4])
            triplet = triplets.get(triplet_key, dict())
            triplet[row[1].lower()] = row[0]
            triplets[triplet_key] = triplet
      
        query = """
        SELECT COUNT(file_state) as count, file_state, source_se, dest_se, vo_name
        FROM t_file
        WHERE file_state in ('FINISHED', 'FAILED', 'CANCELED') %s
        AND finish_time > %%s
        GROUP BY file_state, source_se, dest_se, vo_name  order by NULL
        """ % pairs_filter
        cursor.execute(query, se_params + [not_before.strftime('%Y-%m-%d %H:%M:%S')])
        for row in cursor.fetchall():
            triplet_key = (row[2], row[3], row[4])
            triplet = triplets.get(triplet_key, dict())

            triplet[row[1].lower()] = row[0]

            triplets[triplet_key] = triplet
    
    # Transform into a list
    objs = []
    for (triplet, obj) in triplets.iteritems():
        if filters['only_summary']:
            obj['vo_name'] = triplet[0]
        else:
            obj['source_se'] = triplet[0]
            obj['dest_se'] = triplet[1]
            obj['vo_name'] = triplet[2]
            if 'current' not in obj and 'active' in obj:
                obj['current'] = 0
            failed = obj.get('failed', 0)
            finished = obj.get('finished', 0)
            total = failed + finished
            if total > 0:
                obj['rate'] = (finished * 100.0) / total
            else:
                obj['rate'] = None
        objs.append(obj)

    # Ordering
    (order_by, order_desc) = get_order_by(http_request)

    if order_by == 'active':
        sorting_method = lambda o: (o.get('active', 0), o.get('submitted', 0))
    elif order_by == 'finished':
        sorting_method = lambda o: (o.get('finished', 0), o.get('failed', 0))
    elif order_by == 'failed':
        sorting_method = lambda o: (o.get('failed', 0), o.get('finished', 0))
    elif order_by == 'canceled':
        sorting_method = lambda o: (o.get('canceled', 0), o.get('finished', 0))
    elif order_by == 'staging':
        sorting_method = lambda o: (o.get('staging', 0), o.get('started', 0))
    elif order_by == 'started':
        sorting_method = lambda o: (o.get('started', 0), o.get('staging', 0))
    elif order_by == 'archiving':
        sorting_method = lambda o: (o.get('archiving', 0), o.get('staging', 0))
    elif order_by == 'rate':
        sorting_method = lambda o: (o.get('rate', 0), o.get('finished', 0))
    else:
        sorting_method = lambda o: (o.get('submitted', 0), o.get('active', 0))

    # Generate summary
    summary = {
        'submitted': sum(map(lambda o: o.get('submitted', 0), objs), 0),
        'active': sum(map(lambda o: o.get('active', 0), objs), 0),
        'ready': sum(map(lambda o: o.get('ready', 0), objs), 0),
        'finished': sum(map(lambda o: o.get('finished', 0), objs), 0),
        'failed': sum(map(lambda o: o.get('failed', 0), objs), 0),
        'canceled': sum(map(lambda o: o.get('canceled', 0), objs), 0),
        'current': sum(map(lambda o: o.get('current', 0), objs), 0),
        'staging': sum(map(lambda o: o.get('staging', 0), objs), 0),
        'started': sum(map(lambda o: o.get('started', 0), objs), 0),
        'archiving': sum(map(lambda o: o.get('archiving', 0), objs), 0),
    }
    if summary['finished'] > 0 or summary['failed'] > 0:
        summary['rate'] = (float(summary['finished']) / (summary['finished'] + summary['failed'])) * 100

    # Return
    if filters['only_summary']:
        return {
            'summary': summary
        }
    else:
        return {
            'overview': paged(
                OverviewExtended(not_before, sorted(objs, key=sorting_method, reverse=order_desc), cursor=cursor),
                http_request
            ),
            'summary': summary
        }
Exemple #4
0
def get_job_transfers(http_request, job_id):
    files = File.objects.filter(job=job_id)

    if not files:
        files = DmFile.objects.filter(job=job_id)
        if not files:
            raise Http404

    # Ordering
    (order_by, order_desc) = get_order_by(http_request)
    if order_by == 'id':
        files = files.order_by(ordered_field('file_id', order_desc))
    elif order_by == 'size':
        files = files.order_by(ordered_field('filesize', order_desc))
    elif order_by == 'throughput':
        files = files.order_by(ordered_field('throughput', order_desc))
    elif order_by == 'start_time':
        files = files.order_by(ordered_field('start_time', order_desc))
    elif order_by == 'finish_time':
        files = files.order_by(ordered_field('finish_time', order_desc))
    elif order_by == 'staging_start':
        files = files.order_by(ordered_field('staging_start', order_desc))
    elif order_by == 'staging_finished':
        files = files.order_by(ordered_field('staging_finished', order_desc))

    # Pre-fetch
    files = list(files)

    # Job submission time
    submission_time = Job.objects.get(job_id=job_id).submit_time

    # Build up stats
    now = datetime.utcnow()
    first_start_time = min(
        map(lambda f: f.get_start_time()
            if f.get_start_time() else now, files))
    if files[0].finish_time:
        running_time = files[0].finish_time - first_start_time
    else:
        running_time = now - first_start_time
    running_time = (running_time.seconds + running_time.days * 24 * 3600)

    total_size = sum(map(lambda f: f.filesize if f.filesize else 0, files))
    transferred = sum(
        map(lambda f: f.transferred if f.transferred else 0, files))
    with_throughputs = filter(lambda f: f.throughput, files)
    actives_throughput = filter(lambda f: f.file_state == 'ACTIVE',
                                with_throughputs)

    stats = {
        'total_size': total_size,
        'total_done': transferred,
        'first_start': first_start_time
    }

    if first_start_time:
        stats['queued_first'] = first_start_time - submission_time
    else:
        stats['queued_first'] = now - submission_time

    if running_time:
        stats['time_transfering'] = running_time
    if len(actives_throughput):
        stats['current_throughput'] = sum(
            map(lambda f: f.throughput, actives_throughput))
    if len(with_throughputs):
        stats['avg_throughput'] = sum(
            map(lambda f: f.throughput,
                with_throughputs)) / len(with_throughputs)

    # Now we got the stats, apply filters
    if http_request.GET.get('state', None):
        files = filter(
            lambda f: f.file_state in http_request.GET['state'].split(','),
            files)
    if http_request.GET.get('reason', None):
        files = filter(lambda f: f.reason == http_request.GET['reason'], files)
    if http_request.GET.get('file', None):
        try:
            file_id = int(http_request.GET['file'])
            files = filter(lambda f: f.file_id == file_id, files)
        except:
            pass

    return {
        'files': paged(RetriesFetcher(LogLinker(files)), http_request),
        'stats': stats
    }