def _gen_institute_graph(start, end, machine_category, force_overwrite=False): """ Pie chart comparing institutes usage. """ filename = graphs.get_institute_graph_filename(start, end, machine_category) csv_filename = os.path.join(settings.GRAPH_ROOT, filename + '.csv') png_filename = os.path.join(settings.GRAPH_ROOT, filename + '.png') _check_directory_exists(csv_filename) _check_directory_exists(png_filename) if not settings.GRAPH_DEBUG or force_overwrite: if os.path.exists(csv_filename): if os.path.exists(png_filename): return institute_list = Institute.active.all() plt.subplots(figsize=(4, 4)) data = [] labels = [] with open(csv_filename, 'wb') as csv_file: csv_writer = csv.writer(csv_file) for institute in institute_list.iterator(): hours,jobs = usage.get_institute_usage(institute, start, end, machine_category) if hours > 0: csv_writer.writerow([institute.name, hours, jobs]) data.append(hours) labels.append(institute.name) plt.pie(data, labels=labels, autopct='%1.1f%%', shadow=True) plt.tight_layout() plt.savefig(png_filename) plt.close()
def search(request): if not getattr(settings, 'USAGE_IS_PUBLIC', False): return HttpResponseForbidden('<h1>Access Denied</h1>') if request.method == 'POST': form = UsageSearchForm(request.POST) if form.is_valid(): data = form.cleaned_data project_query = Project.objects.all() institute_query = Institute.objects.all() #person_list = Person.objects.all() terms = data['terms'].lower() start = data['start_date'] end = data['end_date'] machine_category = data['machine_category'] start_str = start.strftime('%Y-%m-%d') end_str = end.strftime('%Y-%m-%d') if terms: # search for projects query = Q() for term in terms.split(' '): q = Q(pid__icontains=term) | Q(name__icontains=term) query = query & q project_query = project_query.filter(query) # search for institutes query = Q() for term in terms.split(' '): q = Q(name__icontains=term) query = query & q institute_query = institute_query.filter(query) project_list = [] for p in project_query: time, jobs = usage.get_project_usage(p, start, end, machine_category) project_list.append({ 'obj': p, 'time': time, 'jobs': jobs, }) del project_query institute_list = [] for i in institute_query: time, jobs = usage.get_institute_usage(i, start, end, machine_category) institute_list.append({ 'obj': i, 'time': time, 'jobs': jobs, }) del institute_query else: return HttpResponseRedirect('%s?start=%s&end=%s' % (reverse('kg_usage_list'), start_str, end_str)) else: start, end = get_date_range(request) initial = { 'start_date': start, 'end_date': end, 'machine_category': request.GET.get('machine_category', None) } form = UsageSearchForm(initial=initial) return render_to_response('usage/search.html', locals(), context_instance=RequestContext(request))
def get_usage(self, start, end, machine_category): from karaage.cache.usage import get_institute_usage return get_institute_usage(self, start, end, machine_category)
def institute_usage(request, institute_id, machine_category_id): if not getattr(settings, 'USAGE_IS_PUBLIC', False): return HttpResponseForbidden('<h1>Access Denied</h1>') result = progress(request) if result is not None: return result machine_category = get_object_or_404(MachineCategory, pk=machine_category_id) institute = get_object_or_404(Institute, pk=institute_id) start, end = get_date_range(request) result = gen_cache_for_machine_category(request, start, end, machine_category) if result is not None: return result result = gen_cache_for_institute(request, start, end, institute, machine_category) if result is not None: return result project_list = [] institute_list = Institute.active.all() if not institute.can_view(request.user) and not getattr(settings, 'USAGE_IS_PUBLIC', False): return HttpResponseForbidden('<h1>Access Denied</h1>') mc_cache = usage.get_machine_category_usage(machine_category, start, end) available_time = mc_cache.available_time quota = get_object_or_404(InstituteQuota, institute=institute, machine_category=machine_category) i_usage, i_jobs = usage.get_institute_usage(institute, start, end, machine_category) for p_cache in cache.ProjectCache.objects.filter(project__institute=institute, machine_category=machine_category, date=datetime.date.today(), start=start, end=end): p = p_cache.project p_usage = p_cache.cpu_time p_jobs = p_cache.no_jobs try: chunk = p.projectquota_set.get(machine_category=machine_category) except ProjectQuota.DoesNotExist: chunk = None if chunk is None and p_usage==0 and p_jobs==0: continue if chunk is not None: mpots = mc_cache.get_project_mpots(chunk, start, end) percent = mc_cache.get_project_cap_percent(chunk, start, end) else: mpots = None percent = None if available_time > 0 and quota.quota > 0: quota_percent = p_usage / (available_time * quota.quota) * 10000 else: quota_percent = 0 project_list.append( {'project': p, 'usage': p_usage, 'jobs': p_jobs, 'percent': percent, 'quota_percent': quota_percent, }) person_list = [] person_total, person_total_jobs = 0, 0 for u in cache.PersonCache.objects.order_by('-cpu_time').filter(project__institute=institute, machine_category=machine_category, date=datetime.date.today(), start=start, end=end)[:5]: person_total += u.cpu_time person_total_jobs += u.no_jobs if i_usage > 0: i_percent = (u.cpu_time / i_usage) * 100 else: i_percent = None if available_time > 0 and quota.quota > 0: quota_percent = u.cpu_time / (available_time * quota.quota) * 10000 else: quota_percent = 0 person_list.append( {'person': u.person, 'project': u.project, 'usage': u.cpu_time, 'jobs': u.no_jobs, 'percent': i_percent, 'quota_percent': quota_percent, }) if i_usage > 0: person_percent = (person_total / i_usage) * 100 else: person_percent = None graph = graphs.get_institute_trend_graph_url(institute, start, end, machine_category) return render_to_response('usage/usage_institute_detail.html', locals(), context_instance=RequestContext(request))