def create_summary(addfilter): """ Create summary list. The additional filter is used to get data only for given entity (user, group, grid user). Currently summary contains: - Running jobs for the given entity - Jobs completed since last midnight to this moment - Week statistics (since last monday): - Number of completed jobs - CPU time in total for this week - Total wall time - Average efficiency - Month statistics (since the 1st day of current month): - Number of completed jobs - CPU time in total for this month - Total wall time - Average efficiency """ summary = [] summary.append(('Running jobs', Job.objects.filter( job_state=getJobState('R'), **addfilter ).count() )) dtoday = date.today() summary.append(('Jobs completed today', Job.objects.filter( job_state=getJobState('C'), comp_time__gte=dtoday.isoformat(), **addfilter ).count() )) wsummary = Job.objects.filter( job_state=getJobState('C'), comp_time__gte=date.fromordinal(dtoday.toordinal()-dtoday.weekday()), **addfilter ).aggregate(Sum('cput'), Sum('walltime'), Avg('efficiency'), Count('pk')) summary.append(('--','')) summary.append(('Jobs completed this week',wsummary['pk__count'])) summary.append(('CPU time for this week',secondsToHours(wsummary['cput__sum']))) summary.append(('Wall time for this week',secondsToHours(wsummary['walltime__sum']))) summary.append(('Weekly average efficiency',wsummary['efficiency__avg'])) summary.append(('--','')) msummary = Job.objects.filter( job_state=getJobState('C'), comp_time__gte=date(dtoday.year, dtoday.month, 1), **addfilter ).aggregate(Sum('cput'), Sum('walltime'), Avg('efficiency'), Count('pk')) summary.append(('Jobs completed this month',msummary['pk__count'])) summary.append(('CPU time for this month',secondsToHours(msummary['cput__sum']))) summary.append(('Wall time for this month',secondsToHours(msummary['walltime__sum']))) summary.append(('Monthly average efficiency',msummary['efficiency__avg'])) return summary
def suspicious(request): """ Get lists of suspicious jobs (various reasons) and render page with them. """ sf = SuspicionForm() if not request.POST: return render_to_response_with_config( 'trqacc/jobs_suspicious.html', {'suspicion_form':sf, 'jobs_page':None, 'paginator':None} ) current_date = datetime.date.today() jobs = [] for j in Job.objects.filter(job_state=getJobState('R')): if wl.isProblem(j): jobs.append(j) sf.data['page'] = request.POST['page'] if request.POST['submit']=='>>': sf.data['page'] = int(sf.data['page']) + 1 elif request.POST['submit']=='<<': sf.data['page'] = int(sf.data['page']) - 1 page = int(sf.data['page']) paginator = Paginator(jobs, 50) if page>paginator.num_pages: page=1 jobs_page = paginator.page(page) return render_to_response_with_config( 'trqacc/jobs_suspicious.html', {'suspicion_form':sf, 'jobs_page':jobs_page, 'paginator':paginator} )
def jobs_running(request): run_form = RunningForm() if not request.POST: return render_to_response_with_config( 'trqacc/jobs_running.html', {'jobs_page':[], 'paginator':None, 'run_form':run_form} ) run_form.data['momentdate'] = request.POST['momentdate'] run_form.data['momenttime'] = request.POST['momenttime'] run_form.data['queue'] = request.POST['queue'] run_form.data['user'] = request.POST['user'] run_form.data['group'] = request.POST['group'] run_form.data['griduser'] = request.POST['griduser'] run_form.data['node'] = request.POST['node'] run_form.data['page'] = request.POST['page'] if request.POST['submit']=='>>': run_form.data['page'] = int(run_form.data['page']) + 1 elif request.POST['submit']=='<<': run_form.data['page'] = int(run_form.data['page']) - 1 run_form.is_bound = True args = { 'start_time__lte':run_form.data['momentdate'], 'comp_time__gte':run_form.data['momentdate'] } if run_form.data['queue'] != '0': args['queue__pk'] = run_form.data['queue'] if run_form.data['user'] != '0': args['job_owner__pk'] = run_form.data['user'] if run_form.data['group'] != '0': args['job_owner__group__pk'] = run_form.data['group'] if run_form.data['griduser'] != '0': args['job_gridowner__pk'] = run_form.data['griduser'] if run_form.data['node'] != '0': args['jobslots__node__pk'] = run_form.data['node'] object_list1 = Job.objects.filter(**args) args.pop('comp_time__gte') args['job_state__pk'] = getJobState('R').pk object_list2 = Job.objects.filter(**args) object_list = object_list1|object_list2 page = int(run_form.data['page']) paginator = Paginator(object_list, 50) if page>paginator.num_pages: page=1 jobs_page = paginator.page(page) return render_to_response_with_config( 'trqacc/jobs_running.html', {'jobs_page':jobs_page, 'paginator':paginator, 'run_form':run_form} )
def nodes_table_json_detail(request): pk = request.POST['pk'] node = Node.objects.get(pk=pk) js = getJobState('R') jobs = Job.objects.filter(jobslots__node=pk, job_state=js) jdata = { 'name':node.name, 'pk':node.pk, 'jobs':[] } for j in jobs: jdata['jobs'].append({"jobid":j.jobid, "joburl":j.get_absolute_url(), "queue":j.queue.name, "queueurl":j.queue.get_absolute_url() }) data = simplejson.dumps(jdata) return HttpResponse(data, 'application/javascript')