def __call__(self): # get all the data into datalines sc = getToolByName(self.context, 'bika_setup_catalog') bc = getToolByName(self.context, 'bika_analysis_catalog') rc = getToolByName(self.context, 'reference_catalog') self.report_content = {} parms = [] headings = {} headings['header'] = _("Analysis turnaround times") headings['subheader'] = _("The turnaround time of analyses") query = {'portal_type': 'Analysis'} client_title = None if 'ClientUID' in self.request.form: client_uid = self.request.form['ClientUID'] query['getClientUID'] = client_uid client = rc.lookupObject(client_uid) client_title = client.Title() else: client = logged_in_client(self.context) if client: client_title = client.Title() query['getClientUID'] = client.UID() if client_title: parms.append({ 'title': _('Client'), 'value': client_title, 'type': 'text' }) date_query = formatDateQuery(self.context, 'Received') if date_query: query['created'] = date_query received = formatDateParms(self.context, 'Received') parms.append({ 'title': _('Received'), 'value': received, 'type': 'text' }) query['review_state'] = 'published' workflow = getToolByName(self.context, 'portal_workflow') if 'bika_worksheetanalysis_workflow' in self.request.form: query['worksheetanalysis_review_state'] = self.request.form[ 'bika_worksheetanalysis_workflow'] ws_review_state = workflow.getTitleForStateOnType( self.request.form['bika_worksheetanalysis_workflow'], 'Analysis') parms.append({ 'title': _('Assigned to worksheet'), 'value': ws_review_state, 'type': 'text' }) # query all the analyses and increment the counts count_early = 0 mins_early = 0 count_late = 0 mins_late = 0 count_undefined = 0 services = {} analyses = bc(query) for a in analyses: analysis = a.getObject() service_uid = analysis.getServiceUID() if service_uid not in services: services[service_uid] = { 'count_early': 0, 'count_late': 0, 'mins_early': 0, 'mins_late': 0, 'count_undefined': 0, } earliness = analysis.getEarliness() if earliness < 0: count_late = services[service_uid]['count_late'] mins_late = services[service_uid]['mins_late'] count_late += 1 mins_late -= earliness services[service_uid]['count_late'] = count_late services[service_uid]['mins_late'] = mins_late if earliness > 0: count_early = services[service_uid]['count_early'] mins_early = services[service_uid]['mins_early'] count_early += 1 mins_early += earliness services[service_uid]['count_early'] = count_early services[service_uid]['mins_early'] = mins_early if earliness == 0: count_undefined = services[service_uid]['count_undefined'] count_undefined += 1 services[service_uid]['count_undefined'] = count_undefined # calculate averages for service_uid in services.keys(): count_early = services[service_uid]['count_early'] mins_early = services[service_uid]['mins_early'] if count_early == 0: services[service_uid]['ave_early'] = '' else: avemins = (mins_early) / count_early services[service_uid]['ave_early'] = formatDuration( self.context, avemins) count_late = services[service_uid]['count_late'] mins_late = services[service_uid]['mins_late'] if count_late == 0: services[service_uid]['ave_late'] = '' else: avemins = mins_late / count_late services[service_uid]['ave_late'] = formatDuration( self.context, avemins) # and now lets do the actual report lines formats = { 'columns': 7, 'col_heads': [ _('Analysis'), _('Count'), _('Undefined'), _('Late'), _('Average late'), _('Early'), _('Average early'), ], 'class': '', } total_count_early = 0 total_count_late = 0 total_mins_early = 0 total_mins_late = 0 total_count_undefined = 0 datalines = [] for cat in sc(portal_type='AnalysisCategory', sort_on='sortable_title'): catline = [ { 'value': cat.Title, 'class': 'category_heading', 'colspan': 7 }, ] first_time = True cat_count_early = 0 cat_count_late = 0 cat_count_undefined = 0 cat_mins_early = 0 cat_mins_late = 0 for service in sc(portal_type="AnalysisService", getCategoryUID=cat.UID, sort_on='sortable_title'): dataline = [ { 'value': service.Title, 'class': 'testgreen' }, ] if service.UID not in services: continue if first_time: datalines.append(catline) first_time = False # analyses found cat_count_early += services[service.UID]['count_early'] cat_count_late += services[service.UID]['count_late'] cat_count_undefined += services[service.UID]['count_undefined'] cat_mins_early += services[service.UID]['mins_early'] cat_mins_late += services[service.UID]['mins_late'] count = services[service.UID]['count_early'] + \ services[service.UID]['count_late'] + \ services[service.UID]['count_undefined'] dataline.append({'value': count, 'class': 'number'}) dataline.append({ 'value': services[service.UID]['count_undefined'], 'class': 'number' }) dataline.append({ 'value': services[service.UID]['count_late'], 'class': 'number' }) dataline.append({ 'value': services[service.UID]['ave_late'], 'class': 'number' }) dataline.append({ 'value': services[service.UID]['count_early'], 'class': 'number' }) dataline.append({ 'value': services[service.UID]['ave_early'], 'class': 'number' }) datalines.append(dataline) # category totals dataline = [ { 'value': '%s - total' % (cat.Title), 'class': 'subtotal_label' }, ] dataline.append({ 'value': cat_count_early + cat_count_late + cat_count_undefined, 'class': 'subtotal_number' }) dataline.append({ 'value': cat_count_undefined, 'class': 'subtotal_number' }) dataline.append({ 'value': cat_count_late, 'class': 'subtotal_number' }) if cat_count_late: dataitem = { 'value': cat_mins_late / cat_count_late, 'class': 'subtotal_number' } else: dataitem = {'value': 0, 'class': 'subtotal_number'} dataline.append(dataitem) dataline.append({ 'value': cat_count_early, 'class': 'subtotal_number' }) if cat_count_early: dataitem = { 'value': cat_mins_early / cat_count_early, 'class': 'subtotal_number' } else: dataitem = {'value': 0, 'class': 'subtotal_number'} dataline.append(dataitem) total_count_early += cat_count_early total_count_late += cat_count_late total_count_undefined += cat_count_undefined total_mins_early += cat_mins_early total_mins_late += cat_mins_late # footer data footlines = [] footline = [] footline = [ { 'value': _('Total'), 'class': 'total' }, ] footline.append({ 'value': total_count_early + total_count_late + total_count_undefined, 'class': 'total number' }) footline.append({ 'value': total_count_undefined, 'class': 'total number' }) footline.append({'value': total_count_late, 'class': 'total number'}) if total_count_late: ave_mins = total_mins_late / total_count_late footline.append({ 'value': formatDuration(self.context, ave_mins), 'class': 'total number' }) else: footline.append({'value': ''}) footline.append({'value': total_count_early, 'class': 'total number'}) if total_count_early: ave_mins = total_mins_early / total_count_early footline.append({ 'value': formatDuration(self.context, ave_mins), 'class': 'total number' }) else: footline.append({'value': '', 'class': 'total number'}) footlines.append(footline) self.report_content = { 'headings': headings, 'parms': parms, 'formats': formats, 'datalines': datalines, 'footings': footlines } if self.request.get('output_format', '') == 'CSV': import csv import StringIO import datetime fieldnames = [ 'Analysis', 'Count', 'Undefined', 'Late', 'Average late', 'Early', 'Average early', ] output = StringIO.StringIO() dw = csv.DictWriter(output, extrasaction='ignore', fieldnames=fieldnames) dw.writerow(dict((fn, fn) for fn in fieldnames)) for row in datalines: if len(row) == 1: # category heading thingy continue dw.writerow({ 'Analysis': row[0]['value'], 'Count': row[1]['value'], 'Undefined': row[2]['value'], 'Late': row[3]['value'], 'Average late': row[4]['value'], 'Early': row[5]['value'], 'Average early': row[6]['value'], }) report_data = output.getvalue() output.close() date = datetime.datetime.now().strftime("%Y%m%d%H%M") setheader = self.request.RESPONSE.setHeader setheader('Content-Type', 'text/csv') setheader("Content-Disposition", "attachment;filename=\"analysestats_%s.csv\"" % date) self.request.RESPONSE.write(report_data) else: return { 'report_title': t(headings['header']), 'report_data': self.template() }
def __call__(self): # get all the data into datalines sc = getToolByName(self.context, 'bika_setup_catalog') bac = getToolByName(self.context, 'bika_analysis_catalog') rc = getToolByName(self.context, 'reference_catalog') self.report_content = {} parm_lines = {} parms = [] headings = {} headings['header'] = _("Analyses per sample type") headings['subheader'] = _( "Number of analyses requested per sample type") count_all = 0 query = {'portal_type': 'Analysis'} client_title = None if 'ClientUID' in self.request.form: client_uid = self.request.form['ClientUID'] query['getClientUID'] = client_uid client = rc.lookupObject(client_uid) client_title = client.Title() else: client = logged_in_client(self.context) if client: client_title = client.Title() query['getClientUID'] = client.UID() if client_title: parms.append({ 'title': _('Client'), 'value': client_title, 'type': 'text' }) date_query = formatDateQuery(self.context, 'Requested') if date_query: query['created'] = date_query requested = formatDateParms(self.context, 'Requested') parms.append({ 'title': _('Requested'), 'value': requested, 'type': 'text' }) workflow = getToolByName(self.context, 'portal_workflow') if 'bika_analysis_workflow' in self.request.form: query['review_state'] = self.request.form['bika_analysis_workflow'] review_state = workflow.getTitleForStateOnType( self.request.form['bika_analysis_workflow'], 'Analysis') parms.append({ 'title': _('Status'), 'value': review_state, 'type': 'text' }) if 'bika_cancellation_workflow' in self.request.form: query['cancellation_state'] = self.request.form[ 'bika_cancellation_workflow'] cancellation_state = workflow.getTitleForStateOnType( self.request.form['bika_cancellation_workflow'], 'Analysis') parms.append({ 'title': _('Active'), 'value': cancellation_state, 'type': 'text' }) if 'bika_worksheetanalysis_workflow' in self.request.form: query['worksheetanalysis_review_state'] = self.request.form[ 'bika_worksheetanalysis_workflow'] ws_review_state = workflow.getTitleForStateOnType( self.request.form['bika_worksheetanalysis_workflow'], 'Analysis') parms.append({ 'title': _('Assigned to worksheet'), 'value': ws_review_state, 'type': 'text' }) # and now lets do the actual report lines formats = { 'columns': 2, 'col_heads': [_('Sample type'), _('Number of analyses')], 'class': '', } datalines = [] for sampletype in sc(portal_type="SampleType", sort_on='sortable_title'): query['getSampleTypeUID'] = sampletype.UID analyses = bac(query) count_analyses = len(analyses) dataline = [] dataitem = {'value': sampletype.Title} dataline.append(dataitem) dataitem = {'value': count_analyses} dataline.append(dataitem) datalines.append(dataline) count_all += count_analyses # footer data footlines = [] footline = [] footitem = {'value': _('Total'), 'class': 'total_label'} footline.append(footitem) footitem = {'value': count_all} footline.append(footitem) footlines.append(footline) self.report_content = { 'headings': headings, 'parms': parms, 'formats': formats, 'datalines': datalines, 'footings': footlines } if self.request.get('output_format', '') == 'CSV': import csv import StringIO import datetime fieldnames = [ 'Sample Type', 'Analyses', ] output = StringIO.StringIO() dw = csv.DictWriter(output, extrasaction='ignore', fieldnames=fieldnames) dw.writerow(dict((fn, fn) for fn in fieldnames)) for row in datalines: dw.writerow({ 'Sample Type': row[0]['value'], 'Analyses': row[1]['value'], }) report_data = output.getvalue() output.close() date = datetime.datetime.now().strftime("%Y%m%d%H%M") setheader = self.request.RESPONSE.setHeader setheader('Content-Type', 'text/csv') setheader( "Content-Disposition", "attachment;filename=\"analysespersampletype_%s.csv\"" % date) self.request.RESPONSE.write(report_data) else: return { 'report_title': t(headings['header']), 'report_data': self.template() }
def __call__(self): # get all the data into datalines sc = getToolByName(self.context, "bika_setup_catalog") bc = getToolByName(self.context, "bika_analysis_catalog") rc = getToolByName(self.context, "reference_catalog") self.report_content = {} parms = [] headings = {} headings["header"] = _("Analyses per analysis service") headings["subheader"] = _("Number of analyses requested per analysis service") query = {"portal_type": "Analysis"} client_title = None if "ClientUID" in self.request.form: client_uid = self.request.form["ClientUID"] query["getClientUID"] = client_uid client = rc.lookupObject(client_uid) client_title = client.Title() else: client = logged_in_client(self.context) if client: client_title = client.Title() query["getClientUID"] = client.UID() if client_title: parms.append({"title": _("Client"), "value": client_title, "type": "text"}) date_query = formatDateQuery(self.context, "Requested") if date_query: query["created"] = date_query requested = formatDateParms(self.context, "Requested") parms.append({"title": _("Requested"), "value": requested, "type": "text"}) date_query = formatDateQuery(self.context, "Published") if date_query: query["getDatePublished"] = date_query published = formatDateParms(self.context, "Published") parms.append({"title": _("Published"), "value": published, "type": "text"}) workflow = getToolByName(self.context, "portal_workflow") if "bika_analysis_workflow" in self.request.form: query["review_state"] = self.request.form["bika_analysis_workflow"] review_state = workflow.getTitleForStateOnType(self.request.form["bika_analysis_workflow"], "Analysis") parms.append({"title": _("Status"), "value": review_state, "type": "text"}) if "bika_cancellation_workflow" in self.request.form: query["cancellation_state"] = self.request.form["bika_cancellation_workflow"] cancellation_state = workflow.getTitleForStateOnType( self.request.form["bika_cancellation_workflow"], "Analysis" ) parms.append({"title": _("Active"), "value": cancellation_state, "type": "text"}) if "bika_worksheetanalysis_workflow" in self.request.form: query["worksheetanalysis_review_state"] = self.request.form["bika_worksheetanalysis_workflow"] ws_review_state = workflow.getTitleForStateOnType( self.request.form["bika_worksheetanalysis_workflow"], "Analysis" ) parms.append({"title": _("Assigned to worksheet"), "value": ws_review_state, "type": "text"}) # and now lets do the actual report lines formats = {"columns": 2, "col_heads": [_("Analysis service"), _("Number of analyses")], "class": ""} datalines = [] count_all = 0 for cat in sc(portal_type="AnalysisCategory", sort_on="sortable_title"): dataline = [{"value": cat.Title, "class": "category_heading", "colspan": 2}] datalines.append(dataline) for service in sc(portal_type="AnalysisService", getCategoryUID=cat.UID, sort_on="sortable_title"): query["getServiceUID"] = service.UID analyses = bc(query) count_analyses = len(analyses) dataline = [] dataitem = {"value": service.Title} dataline.append(dataitem) dataitem = {"value": count_analyses} dataline.append(dataitem) datalines.append(dataline) count_all += count_analyses # footer data footlines = [] footline = [] footitem = {"value": _("Total"), "class": "total_label"} footline.append(footitem) footitem = {"value": count_all} footline.append(footitem) footlines.append(footline) self.report_content = { "headings": headings, "parms": parms, "formats": formats, "datalines": datalines, "footings": footlines, } title = t(headings["header"]) if self.request.get("output_format", "") == "CSV": import csv import StringIO import datetime fieldnames = ["Analysis Service", "Analyses"] output = StringIO.StringIO() dw = csv.DictWriter(output, extrasaction="ignore", fieldnames=fieldnames) dw.writerow(dict((fn, fn) for fn in fieldnames)) for row in datalines: if len(row) == 1: # category heading thingy continue dw.writerow({"Analysis Service": row[0]["value"], "Analyses": row[1]["value"]}) report_data = output.getvalue() output.close() date = datetime.datetime.now().strftime("%Y%m%d%H%M") setheader = self.request.RESPONSE.setHeader setheader("Content-Type", "text/csv") setheader("Content-Disposition", 'attachment;filename="analysesperservice_%s.csv"' % date) self.request.RESPONSE.write(report_data) else: return {"report_title": title, "report_data": self.template()}
def __call__(self): # get all the data into datalines sc = getToolByName(self.context, "bika_setup_catalog") bc = getToolByName(self.context, "bika_analysis_catalog") rc = getToolByName(self.context, "reference_catalog") self.report_content = {} parms = [] headings = {} headings["header"] = _("Analysis turnaround times") headings["subheader"] = _("The turnaround time of analyses") query = {"portal_type": "Analysis"} client_title = None if "ClientUID" in self.request.form: client_uid = self.request.form["ClientUID"] query["getClientUID"] = client_uid client = rc.lookupObject(client_uid) client_title = client.Title() else: client = logged_in_client(self.context) if client: client_title = client.Title() query["getClientUID"] = client.UID() if client_title: parms.append({"title": _("Client"), "value": client_title, "type": "text"}) date_query = formatDateQuery(self.context, "Received") if date_query: query["created"] = date_query received = formatDateParms(self.context, "Received") parms.append({"title": _("Received"), "value": received, "type": "text"}) query["review_state"] = "published" workflow = getToolByName(self.context, "portal_workflow") if "bika_worksheetanalysis_workflow" in self.request.form: query["worksheetanalysis_review_state"] = self.request.form["bika_worksheetanalysis_workflow"] ws_review_state = workflow.getTitleForStateOnType( self.request.form["bika_worksheetanalysis_workflow"], "Analysis" ) parms.append({"title": _("Assigned to worksheet"), "value": ws_review_state, "type": "text"}) # query all the analyses and increment the counts count_early = 0 mins_early = 0 count_late = 0 mins_late = 0 count_undefined = 0 services = {} analyses = bc(query) for a in analyses: analysis = a.getObject() service_uid = analysis.getServiceUID() if service_uid not in services: services[service_uid] = { "count_early": 0, "count_late": 0, "mins_early": 0, "mins_late": 0, "count_undefined": 0, } earliness = analysis.getEarliness() if earliness < 0: count_late = services[service_uid]["count_late"] mins_late = services[service_uid]["mins_late"] count_late += 1 mins_late -= earliness services[service_uid]["count_late"] = count_late services[service_uid]["mins_late"] = mins_late if earliness > 0: count_early = services[service_uid]["count_early"] mins_early = services[service_uid]["mins_early"] count_early += 1 mins_early += earliness services[service_uid]["count_early"] = count_early services[service_uid]["mins_early"] = mins_early if earliness == 0: count_undefined = services[service_uid]["count_undefined"] count_undefined += 1 services[service_uid]["count_undefined"] = count_undefined # calculate averages for service_uid in services.keys(): count_early = services[service_uid]["count_early"] mins_early = services[service_uid]["mins_early"] if count_early == 0: services[service_uid]["ave_early"] = "" else: avemins = (mins_early) / count_early services[service_uid]["ave_early"] = formatDuration(self.context, avemins) count_late = services[service_uid]["count_late"] mins_late = services[service_uid]["mins_late"] if count_late == 0: services[service_uid]["ave_late"] = "" else: avemins = mins_late / count_late services[service_uid]["ave_late"] = formatDuration(self.context, avemins) # and now lets do the actual report lines formats = { "columns": 7, "col_heads": [ _("Analysis"), _("Count"), _("Undefined"), _("Late"), _("Average late"), _("Early"), _("Average early"), ], "class": "", } total_count_early = 0 total_count_late = 0 total_mins_early = 0 total_mins_late = 0 total_count_undefined = 0 datalines = [] for cat in sc(portal_type="AnalysisCategory", sort_on="sortable_title"): catline = [{"value": cat.Title, "class": "category_heading", "colspan": 7}] first_time = True cat_count_early = 0 cat_count_late = 0 cat_count_undefined = 0 cat_mins_early = 0 cat_mins_late = 0 for service in sc(portal_type="AnalysisService", getCategoryUID=cat.UID, sort_on="sortable_title"): dataline = [{"value": service.Title, "class": "testgreen"}] if service.UID not in services: continue if first_time: datalines.append(catline) first_time = False # analyses found cat_count_early += services[service.UID]["count_early"] cat_count_late += services[service.UID]["count_late"] cat_count_undefined += services[service.UID]["count_undefined"] cat_mins_early += services[service.UID]["mins_early"] cat_mins_late += services[service.UID]["mins_late"] count = ( services[service.UID]["count_early"] + services[service.UID]["count_late"] + services[service.UID]["count_undefined"] ) dataline.append({"value": count, "class": "number"}) dataline.append({"value": services[service.UID]["count_undefined"], "class": "number"}) dataline.append({"value": services[service.UID]["count_late"], "class": "number"}) dataline.append({"value": services[service.UID]["ave_late"], "class": "number"}) dataline.append({"value": services[service.UID]["count_early"], "class": "number"}) dataline.append({"value": services[service.UID]["ave_early"], "class": "number"}) datalines.append(dataline) # category totals dataline = [{"value": "%s - total" % (cat.Title), "class": "subtotal_label"}] dataline.append( {"value": cat_count_early + cat_count_late + cat_count_undefined, "class": "subtotal_number"} ) dataline.append({"value": cat_count_undefined, "class": "subtotal_number"}) dataline.append({"value": cat_count_late, "class": "subtotal_number"}) if cat_count_late: dataitem = {"value": cat_mins_late / cat_count_late, "class": "subtotal_number"} else: dataitem = {"value": 0, "class": "subtotal_number"} dataline.append(dataitem) dataline.append({"value": cat_count_early, "class": "subtotal_number"}) if cat_count_early: dataitem = {"value": cat_mins_early / cat_count_early, "class": "subtotal_number"} else: dataitem = {"value": 0, "class": "subtotal_number"} dataline.append(dataitem) total_count_early += cat_count_early total_count_late += cat_count_late total_count_undefined += cat_count_undefined total_mins_early += cat_mins_early total_mins_late += cat_mins_late # footer data footlines = [] footline = [] footline = [{"value": _("Total"), "class": "total"}] footline.append( {"value": total_count_early + total_count_late + total_count_undefined, "class": "total number"} ) footline.append({"value": total_count_undefined, "class": "total number"}) footline.append({"value": total_count_late, "class": "total number"}) if total_count_late: ave_mins = total_mins_late / total_count_late footline.append({"value": formatDuration(self.context, ave_mins), "class": "total number"}) else: footline.append({"value": ""}) footline.append({"value": total_count_early, "class": "total number"}) if total_count_early: ave_mins = total_mins_early / total_count_early footline.append({"value": formatDuration(self.context, ave_mins), "class": "total number"}) else: footline.append({"value": "", "class": "total number"}) footlines.append(footline) self.report_content = { "headings": headings, "parms": parms, "formats": formats, "datalines": datalines, "footings": footlines, } if self.request.get("output_format", "") == "CSV": import csv import StringIO import datetime fieldnames = ["Analysis", "Count", "Undefined", "Late", "Average late", "Early", "Average early"] output = StringIO.StringIO() dw = csv.DictWriter(output, extrasaction="ignore", fieldnames=fieldnames) dw.writerow(dict((fn, fn) for fn in fieldnames)) for row in datalines: if len(row) == 1: # category heading thingy continue dw.writerow( { "Analysis": row[0]["value"], "Count": row[1]["value"], "Undefined": row[2]["value"], "Late": row[3]["value"], "Average late": row[4]["value"], "Early": row[5]["value"], "Average early": row[6]["value"], } ) report_data = output.getvalue() output.close() date = datetime.datetime.now().strftime("%Y%m%d%H%M") setheader = self.request.RESPONSE.setHeader setheader("Content-Type", "text/csv") setheader("Content-Disposition", 'attachment;filename="analysestats_%s.csv"' % date) self.request.RESPONSE.write(report_data) else: return {"report_title": t(headings["header"]), "report_data": self.template()}
def __call__(self): bc = getToolByName(self.context, 'bika_catalog') self.report_content = {} parm_lines = {} parms = [] headings = {} headings['header'] = _("Analysis requests not invoiced") headings['subheader'] = _( "Published Analysis Requests which have not been invoiced") count_all = 0 query = { 'portal_type': 'AnalysisRequest', 'getInvoiced': False, 'review_state': 'published', 'sort_order': 'reverse' } date_query = formatDateQuery(self.context, 'c_DatePublished') if date_query: query['getDatePublished'] = date_query pubished = formatDateParms(self.context, 'c_DatePublished') else: pubished = 'Undefined' parms.append({ 'title': _('Published'), 'value': pubished, 'type': 'text' }) if self.request.form.has_key('cancellation_state'): query['cancellation_state'] = self.request.form[ 'cancellation_state'] cancellation_state = wf_tool.getTitleForStateOnType( self.request.form['cancellation_state'], 'AnalysisRequest') else: cancellation_state = 'Undefined' parms.append({ 'title': _('Active'), 'value': cancellation_state, 'type': 'text' }) # and now lets do the actual report lines formats = {'columns': 6, 'col_heads': [_('Client'), \ _('Request'), \ _('Sample type'), \ _('Sample point'), \ _('Published'), \ _('Amount'), \ ], 'class': '', } datalines = [] clients = {} sampletypes = {} samplepoints = {} categories = {} services = {} for ar_proxy in bc(query): ar = ar_proxy.getObject() dataline = [] dataitem = {'value': ar.aq_parent.Title()} dataline.append(dataitem) dataitem = {'value': ar.getRequestID()} dataline.append(dataitem) dataitem = {'value': ar.getSampleTypeTitle()} dataline.append(dataitem) dataitem = {'value': ar.getSamplePointTitle()} dataline.append(dataitem) dataitem = {'value': self.ulocalized_time(ar.getDatePublished())} dataline.append(dataitem) dataitem = {'value': ar.getTotalPrice()} dataline.append(dataitem) datalines.append(dataline) count_all += 1 # table footer data footlines = [] footline = [] footitem = { 'value': _('Number of analyses retested for period'), 'colspan': 5, 'class': 'total_label' } footline.append(footitem) footitem = {'value': count_all} footline.append(footitem) footlines.append(footline) self.report_content = { 'headings': headings, 'parms': parms, 'formats': formats, 'datalines': datalines, 'footings': footlines } return { 'report_title': t(headings['header']), 'report_data': self.template() }
def __call__(self): bc = getToolByName(self.context, 'bika_catalog') self.report_content = {} parm_lines = {} parms = [] headings = {} headings['header'] = _("Analysis requests not invoiced") headings['subheader'] = _( "Published Analysis Requests which have not been invoiced") count_all = 0 query = {'portal_type': 'AnalysisRequest', 'getInvoiced': False, 'review_state': 'published', 'sort_order': 'reverse'} date_query = formatDateQuery(self.context, 'c_DatePublished') if date_query: query['getDatePublished'] = date_query pubished = formatDateParms(self.context, 'c_DatePublished') else: pubished = 'Undefined' parms.append( {'title': _('Published'), 'value': pubished, 'type': 'text'}) if self.request.form.has_key('cancellation_state'): query['cancellation_state'] = self.request.form['cancellation_state'] cancellation_state = wf_tool.getTitleForStateOnType( self.request.form['cancellation_state'], 'AnalysisRequest') else: cancellation_state = 'Undefined' parms.append( {'title': _('Active'), 'value': cancellation_state, 'type': 'text'}) # and now lets do the actual report lines formats = {'columns': 6, 'col_heads': [_('Client'), \ _('Request'), \ _('Sample type'), \ _('Sample point'), \ _('Published'), \ _('Amount'), \ ], 'class': '', } datalines = [] clients = {} sampletypes = {} samplepoints = {} categories = {} services = {} for ar_proxy in bc(query): ar = ar_proxy.getObject() dataline = [] dataitem = {'value': ar.aq_parent.Title()} dataline.append(dataitem) dataitem = {'value': ar.getRequestID()} dataline.append(dataitem) dataitem = {'value': ar.getSampleTypeTitle()} dataline.append(dataitem) dataitem = {'value': ar.getSamplePointTitle()} dataline.append(dataitem) dataitem = {'value': self.ulocalized_time(ar.getDatePublished())} dataline.append(dataitem) dataitem = {'value': ar.getTotalPrice()} dataline.append(dataitem) datalines.append(dataline) count_all += 1 # table footer data footlines = [] footline = [] footitem = {'value': _('Number of analyses retested for period'), 'colspan': 5, 'class': 'total_label'} footline.append(footitem) footitem = {'value': count_all} footline.append(footitem) footlines.append(footline) self.report_content = { 'headings': headings, 'parms': parms, 'formats': formats, 'datalines': datalines, 'footings': footlines} return {'report_title': t(headings['header']), 'report_data': self.template()}
def __call__(self): # get all the data into datalines pc = getToolByName(self.context, 'portal_catalog') rc = getToolByName(self.context, 'reference_catalog') self.report_content = {} parms = [] headings = {} headings['header'] = _("Attachments") headings['subheader'] = _( "The attachments linked to analysis requests and analyses") count_all = 0 query = {'portal_type': 'Attachment'} if 'ClientUID' in self.request.form: client_uid = self.request.form['ClientUID'] query['getClientUID'] = client_uid client = rc.lookupObject(client_uid) client_title = client.Title() else: client = logged_in_client(self.context) if client: client_title = client.Title() query['getClientUID'] = client.UID() else: client_title = 'All' parms.append( {'title': _('Client'), 'value': client_title, 'type': 'text'}) date_query = formatDateQuery(self.context, 'Loaded') if date_query: query['getDateLoaded'] = date_query loaded = formatDateParms(self.context, 'Loaded') parms.append( {'title': _('Loaded'), 'value': loaded, 'type': 'text'}) # and now lets do the actual report lines formats = {'columns': 6, 'col_heads': [_('Request'), _('File'), _('Attachment type'), _('Content type'), _('Size'), _('Loaded'), ], 'class': '', } datalines = [] attachments = pc(query) for a_proxy in attachments: attachment = a_proxy.getObject() attachment_file = attachment.getAttachmentFile() icon = attachment_file.getBestIcon() filename = attachment_file.filename filesize = attachment_file.get_size() filesize = filesize / 1024 sizeunit = "Kb" if filesize > 1024: filesize = filesize / 1024 sizeunit = "Mb" dateloaded = attachment.getDateLoaded() dataline = [] dataitem = {'value': attachment.getTextTitle()} dataline.append(dataitem) dataitem = {'value': filename, 'img_before': icon} dataline.append(dataitem) dataitem = { 'value': attachment.getAttachmentType().Title() if attachment.getAttachmentType() else ''} dataline.append(dataitem) dataitem = { 'value': self.context.lookupMime(attachment_file.getContentType())} dataline.append(dataitem) dataitem = {'value': '%s%s' % (filesize, sizeunit)} dataline.append(dataitem) dataitem = {'value': self.ulocalized_time(dateloaded)} dataline.append(dataitem) datalines.append(dataline) count_all += 1 # footer data footlines = [] footline = [] footitem = {'value': _('Total'), 'colspan': 5, 'class': 'total_label'} footline.append(footitem) footitem = {'value': count_all} footline.append(footitem) footlines.append(footline) self.report_content = { 'headings': headings, 'parms': parms, 'formats': formats, 'datalines': datalines, 'footings': footlines} if self.request.get('output_format', '') == 'CSV': import csv import StringIO import datetime fieldnames = [ _('Request'), _('File'), _('Attachment type'), _('Content type'), _('Size'), _('Loaded'), ] output = StringIO.StringIO() dw = csv.DictWriter(output, fieldnames=fieldnames) dw.writerow(dict((fn, fn) for fn in fieldnames)) for row in datalines: dw.writerow(row) report_data = output.getvalue() output.close() date = datetime.datetime.now().strftime("%Y%m%d%H%M") setheader = self.request.RESPONSE.setHeader setheader('Content-Type', 'text/csv') setheader("Content-Disposition", "attachment;filename=\"analysesattachments_%s.csv\"" % date) self.request.RESPONSE.write(report_data) else: return {'report_title': t(headings['header']), 'report_data': self.template()}
def __call__(self): # get all the data into datalines bc = getToolByName(self.context, 'bika_analysis_catalog') rc = getToolByName(self.context, 'reference_catalog') self.report_content = {} parms = [] headings = {} headings['header'] = _("Analysis turnaround times over time") headings['subheader'] = \ _("The turnaround time of analyses plotted over time") query = {'portal_type': 'Analysis'} if 'ServiceUID' in self.request.form: service_uid = self.request.form['ServiceUID'] query['ServiceUID'] = service_uid service = rc.lookupObject(service_uid) service_title = service.Title() parms.append( {'title': _('Analysis Service'), 'value': service_title, 'type': 'text'}) if 'Analyst' in self.request.form: analyst = self.request.form['Analyst'] query['getAnalyst'] = analyst analyst_title = self.user_fullname(analyst) parms.append( {'title': _('Analyst'), 'value': analyst_title, 'type': 'text'}) if 'getInstrumentUID' in self.request.form: instrument_uid = self.request.form['getInstrumentUID'] query['getInstrument'] = instrument_uid instrument = rc.lookupObject(instrument_uid) instrument_title = instrument.Title() parms.append( {'title': _('Instrument'), 'value': instrument_title, 'type': 'text'}) if 'Period' in self.request.form: period = self.request.form['Period'] else: period = 'Day' date_query = formatDateQuery(self.context, 'tats_DateReceived') if date_query: query['created'] = date_query received = formatDateParms(self.context, 'tats_DateReceived') parms.append( {'title': _('Received'), 'value': received, 'type': 'text'}) query['review_state'] = 'published' # query all the analyses and increment the counts periods = {} total_count = 0 total_duration = 0 analyses = bc(query) for a in analyses: analysis = a.getObject() received = analysis.created() if period == 'Day': datekey = received.strftime('%d %b %Y') elif period == 'Week': # key period on Monday dayofweek = received.strftime('%w') # Sunday = 0 if dayofweek == 0: firstday = received - 6 else: firstday = received - (int(dayofweek) - 1) datekey = firstday.strftime(self.date_format_short) elif period == 'Month': datekey = received.strftime('%m-%d') if datekey not in periods: periods[datekey] = {'count': 0, 'duration': 0, } count = periods[datekey]['count'] duration = periods[datekey]['duration'] count += 1 duration += analysis.getDuration() periods[datekey]['duration'] = duration periods[datekey]['count'] = count total_count += 1 total_duration += duration # calculate averages for datekey in periods.keys(): count = periods[datekey]['count'] duration = periods[datekey]['duration'] ave_duration = (duration) / count periods[datekey]['duration'] = \ formatDuration(self.context, ave_duration) # and now lets do the actual report lines formats = {'columns': 2, 'col_heads': [_('Date'), _('Turnaround time (h)'), ], 'class': '', } datalines = [] period_keys = periods.keys() for period in period_keys: dataline = [{'value': period, 'class': ''}, ] dataline.append({'value': periods[period]['duration'], 'class': 'number'}) datalines.append(dataline) if total_count > 0: ave_total_duration = total_duration / total_count else: ave_total_duration = 0 ave_total_duration = formatDuration(self.context, ave_total_duration) # footer data footlines = [] footline = [] footline = [{'value': _('Total data points'), 'class': 'total'}, ] footline.append({'value': total_count, 'class': 'total number'}) footlines.append(footline) footline = [{'value': _('Average TAT'), 'class': 'total'}, ] footline.append({'value': ave_total_duration, 'class': 'total number'}) footlines.append(footline) self.report_content = { 'headings': headings, 'parms': parms, 'formats': formats, 'datalines': datalines, 'footings': footlines} if self.request.get('output_format', '') == 'CSV': import csv import StringIO import datetime fieldnames = [ 'Date', 'Turnaround time (h)', ] output = StringIO.StringIO() dw = csv.DictWriter(output, extrasaction='ignore', fieldnames=fieldnames) dw.writerow(dict((fn, fn) for fn in fieldnames)) for row in datalines: dw.writerow({ 'Date': row[0]['value'], 'Turnaround time (h)': row[1]['value'], }) report_data = output.getvalue() output.close() date = datetime.datetime.now().strftime("%Y%m%d%H%M") setheader = self.request.RESPONSE.setHeader setheader('Content-Type', 'text/csv') setheader("Content-Disposition", "attachment;filename=\"analysesperservice_%s.csv\"" % date) self.request.RESPONSE.write(report_data) else: return {'report_title': t(headings['header']), 'report_data': self.template()}
def __call__(self): bac = getToolByName(self.context, "bika_analysis_catalog") self.report_content = {} parm_lines = {} parms = [] headings = {} headings["header"] = _("Analyses retested") headings["subheader"] = _("Analyses which have been retested") count_all = 0 query = {"portal_type": "Analysis", "getRetested": True, "sort_order": "reverse"} date_query = formatDateQuery(self.context, "Received") if date_query: query["getDateReceived"] = date_query received = formatDateParms(self.context, "Received") else: received = "Undefined" parms.append({"title": _("Received"), "value": received, "type": "text"}) wf_tool = getToolByName(self.context, "portal_workflow") if self.request.form.has_key("bika_analysis_workflow"): query["review_state"] = self.request.form["bika_analysis_workflow"] review_state = wf_tool.getTitleForStateOnType(self.request.form["bika_analysis_workflow"], "Analysis") else: review_state = "Undefined" parms.append({"title": _("Status"), "value": review_state, "type": "text"}) if self.request.form.has_key("bika_cancellation_workflow"): query["cancellation_state"] = self.request.form["bika_cancellation_workflow"] cancellation_state = wf_tool.getTitleForStateOnType( self.request.form["bika_cancellation_workflow"], "Analysis" ) else: cancellation_state = "Undefined" parms.append({"title": _("Active"), "value": cancellation_state, "type": "text"}) if self.request.form.has_key("bika_worksheetanalysis_workflow"): query["worksheetanalysis_review_state"] = self.request.form["bika_worksheetanalysis_workflow"] ws_review_state = wf_tool.getTitleForStateOnType( self.request.form["bika_worksheetanalysis_workflow"], "Analysis" ) else: ws_review_state = "Undefined" parms.append({"title": _("Assigned to worksheet"), "value": ws_review_state, "type": "text"}) # and now lets do the actual report lines formats = { "columns": 8, "col_heads": [ _("Client"), _("Request"), _("Sample type"), _("Sample point"), _("Category"), _("Analysis"), _("Received"), _("Status"), ], "class": "", } datalines = [] clients = {} sampletypes = {} samplepoints = {} categories = {} services = {} for a_proxy in bac(query): analysis = a_proxy.getObject() dataline = [] dataitem = {"value": analysis.getClientTitle()} dataline.append(dataitem) dataitem = {"value": analysis.getRequestID()} dataline.append(dataitem) dataitem = {"value": analysis.aq_parent.getSampleTypeTitle()} dataline.append(dataitem) dataitem = {"value": analysis.aq_parent.getSamplePointTitle()} dataline.append(dataitem) dataitem = {"value": analysis.getCategoryTitle()} dataline.append(dataitem) dataitem = {"value": analysis.getServiceTitle()} dataline.append(dataitem) dataitem = {"value": self.ulocalized_time(analysis.getDateReceived())} dataline.append(dataitem) state = wf_tool.getInfoFor(analysis, "review_state", "") review_state = wf_tool.getTitleForStateOnType(state, "Analysis") dataitem = {"value": review_state} dataline.append(dataitem) datalines.append(dataline) count_all += 1 # table footer data footlines = [] footline = [] footitem = {"value": _("Number of analyses retested for period"), "colspan": 7, "class": "total_label"} footline.append(footitem) footitem = {"value": count_all} footline.append(footitem) footlines.append(footline) self.report_content = { "headings": headings, "parms": parms, "formats": formats, "datalines": datalines, "footings": footlines, } title = t(headings["header"]) return {"report_title": title, "report_data": self.template()}
def __call__(self): # get all the data into datalines pc = getToolByName(self.context, 'portal_catalog') bac = getToolByName(self.context, 'bika_analysis_catalog') bc = getToolByName(self.context, 'bika_catalog') rc = getToolByName(self.context, 'reference_catalog') self.report_content = {} parm_lines = {} parms = [] headings = {} count_all_ars = 0 count_all_analyses = 0 query = {} this_client = logged_in_client(self.context) if not this_client and 'ClientUID' in self.request.form: client_uid = self.request.form['ClientUID'] this_client = rc.lookupObject(client_uid) parms.append( {'title': _('Client'), 'value': this_client.Title(), 'type': 'text'}) if this_client: headings['header'] = _("Analysis requests and analyses") headings['subheader'] = _("Number of Analysis requests and analyses") else: headings['header'] = _("Analysis requests and analyses per client") headings['subheader'] = _( "Number of Analysis requests and analyses per client") date_query = formatDateQuery(self.context, 'Requested') if date_query: query['created'] = date_query requested = formatDateParms(self.context, 'Requested') parms.append( {'title': _('Requested'), 'value': requested, 'type': 'text'}) workflow = getToolByName(self.context, 'portal_workflow') if 'bika_analysis_workflow' in self.request.form: query['review_state'] = self.request.form['bika_analysis_workflow'] review_state = workflow.getTitleForStateOnType( self.request.form['bika_analysis_workflow'], 'Analysis') parms.append( {'title': _('Status'), 'value': review_state, 'type': 'text'}) if 'bika_cancellation_workflow' in self.request.form: query['cancellation_state'] = self.request.form[ 'bika_cancellation_workflow'] cancellation_state = workflow.getTitleForStateOnType( self.request.form['bika_cancellation_workflow'], 'Analysis') parms.append({'title': _('Active'), 'value': cancellation_state, 'type': 'text'}) if 'bika_worksheetanalysis_workflow' in self.request.form: query['worksheetanalysis_review_state'] = self.request.form[ 'bika_worksheetanalysis_workflow'] ws_review_state = workflow.getTitleForStateOnType( self.request.form['bika_worksheetanalysis_workflow'], 'Analysis') parms.append( {'title': _('Assigned to worksheet'), 'value': ws_review_state, 'type': 'text'}) if 'bika_worksheetanalysis_workflow' in self.request.form: query['worksheetanalysis_review_state'] = self.request.form[ 'bika_worksheetanalysis_workflow'] ws_review_state = workflow.getTitleForStateOnType( self.request.form['bika_worksheetanalysis_workflow'], 'Analysis') parms.append( {'title': _('Assigned to worksheet'), 'value': ws_review_state, 'type': 'text'}) # and now lets do the actual report lines formats = {'columns': 3, 'col_heads': [_('Client'), _('Number of requests'), _('Number of analyses')], 'class': ''} datalines = [] if this_client: c_proxies = pc(portal_type="Client", UID=this_client.UID()) else: c_proxies = pc(portal_type="Client", sort_on='sortable_title') for client in c_proxies: query['getClientUID'] = client.UID dataline = [{'value': client.Title}, ] query['portal_type'] = 'AnalysisRequest' ars = bc(query) count_ars = len(ars) dataitem = {'value': count_ars} dataline.append(dataitem) query['portal_type'] = 'Analysis' analyses = bac(query) count_analyses = len(analyses) dataitem = {'value': count_analyses} dataline.append(dataitem) datalines.append(dataline) count_all_analyses += count_analyses count_all_ars += count_ars # footer data footlines = [] if not this_client: footline = [] footitem = {'value': _('Total'), 'class': 'total_label'} footline.append(footitem) footitem = {'value': count_all_ars} footline.append(footitem) footitem = {'value': count_all_analyses} footline.append(footitem) footlines.append(footline) self.report_content = { 'headings': headings, 'parms': parms, 'formats': formats, 'datalines': datalines, 'footings': footlines} if self.request.get('output_format', '') == 'CSV': import csv import StringIO import datetime fieldnames = [ 'Client', 'Analysis Requests', 'Analyses', ] output = StringIO.StringIO() dw = csv.DictWriter(output, extrasaction='ignore', fieldnames=fieldnames) dw.writerow(dict((fn, fn) for fn in fieldnames)) for row in datalines: dw.writerow({ 'Client': row[0]['value'], 'Analysis Requests': row[1]['value'], 'Analyses': row[2]['value'], }) report_data = output.getvalue() output.close() date = datetime.datetime.now().strftime("%Y%m%d%H%M") setheader = self.request.RESPONSE.setHeader setheader('Content-Type', 'text/csv') setheader("Content-Disposition", "attachment;filename=\"analysesperclient_%s.csv\"" % date) self.request.RESPONSE.write(report_data) else: return {'report_title': t(headings['header']), 'report_data': self.template()}
def __call__(self): bac = getToolByName(self.context, 'bika_analysis_catalog') self.report_content = {} parm_lines = {} parms = [] headings = {} headings['header'] = _("Analyses retested") headings['subheader'] = _("Analyses which have been retested") count_all = 0 query = { 'portal_type': 'Analysis', 'getRetested': True, 'sort_order': 'reverse' } date_query = formatDateQuery(self.context, 'Received') if date_query: query['getDateReceived'] = date_query received = formatDateParms(self.context, 'Received') else: received = 'Undefined' parms.append({ 'title': _('Received'), 'value': received, 'type': 'text' }) wf_tool = getToolByName(self.context, 'portal_workflow') if self.request.form.has_key('bika_analysis_workflow'): query['review_state'] = self.request.form['bika_analysis_workflow'] review_state = wf_tool.getTitleForStateOnType( self.request.form['bika_analysis_workflow'], 'Analysis') else: review_state = 'Undefined' parms.append({ 'title': _('Status'), 'value': review_state, 'type': 'text' }) if self.request.form.has_key('bika_cancellation_workflow'): query['cancellation_state'] = self.request.form[ 'bika_cancellation_workflow'] cancellation_state = wf_tool.getTitleForStateOnType( self.request.form['bika_cancellation_workflow'], 'Analysis') else: cancellation_state = 'Undefined' parms.append({ 'title': _('Active'), 'value': cancellation_state, 'type': 'text' }) if self.request.form.has_key('bika_worksheetanalysis_workflow'): query['worksheetanalysis_review_state'] = self.request.form[ 'bika_worksheetanalysis_workflow'] ws_review_state = wf_tool.getTitleForStateOnType( self.request.form['bika_worksheetanalysis_workflow'], 'Analysis') else: ws_review_state = 'Undefined' parms.append({ 'title': _('Assigned to worksheet'), 'value': ws_review_state, 'type': 'text' }) # and now lets do the actual report lines formats = { 'columns': 8, 'col_heads': [ _('Client'), _('Request'), _('Sample type'), _('Sample point'), _('Category'), _('Analysis'), _('Received'), _('Status'), ], 'class': '', } datalines = [] clients = {} sampletypes = {} samplepoints = {} categories = {} services = {} for a_proxy in bac(query): analysis = a_proxy.getObject() dataline = [] dataitem = {'value': analysis.getClientTitle()} dataline.append(dataitem) dataitem = {'value': analysis.getRequestID()} dataline.append(dataitem) dataitem = {'value': analysis.aq_parent.getSampleTypeTitle()} dataline.append(dataitem) dataitem = {'value': analysis.aq_parent.getSamplePointTitle()} dataline.append(dataitem) dataitem = {'value': analysis.getCategoryTitle()} dataline.append(dataitem) dataitem = {'value': analysis.getServiceTitle()} dataline.append(dataitem) dataitem = { 'value': self.ulocalized_time(analysis.getDateReceived()) } dataline.append(dataitem) state = wf_tool.getInfoFor(analysis, 'review_state', '') review_state = wf_tool.getTitleForStateOnType(state, 'Analysis') dataitem = {'value': review_state} dataline.append(dataitem) datalines.append(dataline) count_all += 1 # table footer data footlines = [] footline = [] footitem = { 'value': _('Number of analyses retested for period'), 'colspan': 7, 'class': 'total_label' } footline.append(footitem) footitem = {'value': count_all} footline.append(footitem) footlines.append(footline) self.report_content = { 'headings': headings, 'parms': parms, 'formats': formats, 'datalines': datalines, 'footings': footlines } title = t(headings['header']) return {'report_title': title, 'report_data': self.template()}