コード例 #1
0
ファイル: views.py プロジェクト: NeCTAR-RC/karaage
def project_usage(request, project_id, machine_category_id):
    machine_category = get_object_or_404(MachineCategory, pk=machine_category_id)
    project = get_object_or_404(Project, pid=project_id)
    if not project.can_view(request.user) and not getattr(settings, 'USAGE_IS_PUBLIC', False):
        return HttpResponseForbidden('<h1>Access Denied</h1>')

    result = progress(request)
    if result is not None:
        return result

    start, end = get_date_range(request)

    result = gen_cache_for_machine_category(request, start, end, machine_category)
    if result is not None:
        return result

    result = gen_cache_for_project(request, start, end, project, machine_category)
    if result is not None:
        return result

    usage_list = []
    total, total_jobs = 0, 0

    # Custom SQL as need to get users that were removed from project too
    rows = CPUJob.objects.filter(
            project=project,
            machine__category=machine_category,
            date__range=(start, end)
            ).values('account').annotate().order_by('account')

    for row in rows:
        u = Account.objects.get(id=row['account']).person
        time, jobs = usage.get_person_usage(u, project, start, end, machine_category)
        if time:
            total += time
            total_jobs += jobs
            if jobs > 0:
                usage_list.append({'person': u, 'usage': time, 'jobs': jobs})

    for u in usage_list:
        if total == 0:
            u['percent'] = 0
        else:
            u['percent'] = (u['usage'] / total) * 100

    usage_list = dictsortreversed(usage_list, 'usage')

    count = 0
    for i in usage_list:
        i['colour'] = graphs.get_colour(count)
        count += 1

    graph = graphs.get_project_trend_graph_url(project, start, end, machine_category)

    return render_to_response('usage/project_usage.html', locals(), context_instance=RequestContext(request))
コード例 #2
0
ファイル: tasks.py プロジェクト: NeCTAR-RC/karaage
def _gen_project_trend_graph(project,
                                start,
                                end,
                                machine_category,
                                force_overwrite=False):

    """Generates a bar graph for a project

    Keyword arguments:
    project -- Project
    start -- start date
    end -- end date
    machine_category -- MachineCategory object

    """
    filename = graphs.get_project_trend_graph_filename(project,
            start, end, machine_category)
    csv_filename = os.path.join(settings.GRAPH_ROOT, filename + '.csv')
    png_filename = os.path.join(settings.GRAPH_ROOT, filename + '.png')

    _check_directory_exists(csv_filename)
    _check_directory_exists(png_filename)

    if not settings.GRAPH_DEBUG or force_overwrite:
        if os.path.exists(csv_filename):
            if os.path.exists(png_filename):
                return

    rows = CPUJob.objects.filter(
            project=project,
            machine__category=machine_category,
            date__range=(start, end)
            ).values('account', 'account__username', 'date').annotate(Sum('cpu_usage')).order_by('account', 'date')

    t_start = start
    t_end = end

    start_str = start.strftime('%Y-%m-%d')
    end_str = end.strftime('%Y-%m-%d')

    fig, ax = plt.subplots(figsize=(6, 4))
    ax.set_xlim(start, end + datetime.timedelta(days=1))
    ax.set_title('%s   %s - %s' % (project.pid, start_str, end_str))
    ax.set_ylabel("CPU Time (hours)")
    ax.set_xlabel("Date")

    locator = mdates.AutoDateLocator()
    ax.xaxis.set_major_locator(locator)
    ax.xaxis.set_major_formatter(mdates.AutoDateFormatter(locator))
    ax.xaxis.set_minor_locator(mdates.DayLocator())

    data = {}
    x_data = {}
    y_data = {}

    with open(csv_filename, 'wb') as csv_file:
        csv_writer = csv.writer(csv_file)
        for row in rows.iterator():
            csv_writer.writerow([
                row['account__username'],
                row['date'], row['cpu_usage__sum']/3600.00
            ])

            account = row['account']
            date = row['date']

            if account not in data:
                data[account] = {}
                x_data[account] = []
                y_data[account] = []

            data[account][date] = row['cpu_usage__sum']

    for account, dates in data.iteritems():
        start = t_start
        end = t_end
        while start <= end:
            total = 0
            if start in dates:
                total = dates[start]
            x_data[account].append(start)
            y_data[account].append(total / 3600.00)
            start = start + datetime.timedelta(days=1)

    del data

    totals = []
    start = t_start
    end = t_end
    while start <= end:
        totals.append(0)
        start = start + datetime.timedelta(days=1)

    count = 0
    for account in x_data.keys():
        ax.bar(
            x_data[account], y_data[account],
            bottom=totals,
            color=graphs.get_colour(count),
            edgecolor=graphs.get_colour(count),
            align='edge')
        count = count + 1

        i = 0
        start = t_start
        end = t_end
        while start <= end:
            totals[i] += y_data[account][i]
            i = i + 1
            start = start + datetime.timedelta(days=1)

    del x_data
    del y_data
    del totals

    fig.autofmt_xdate()
    plt.tight_layout()
    plt.savefig(png_filename)
    plt.close()