Example #1
0
    def __call__(self):
        this_client = logged_in_client(self.context)
        if this_client:
            self.contentFilter = {
                'portal_type': 'Report',
                'getClientUID': this_client.UID(),
                'sort_order': 'reverse'}
            self.columns = {
                'Title': {'title': _('Title')},
                'FileSize': {'title': _('Size')},
                'Created': {'title': _('Created')},
                'By': {'title': _('By')}, }
            self.review_states = [
                {'id': 'default',
                 'title': 'All',
                 'contentFilter': {},
                 'columns': ['Title',
                             'FileSize',
                             'Created',
                             'By']},
            ]
        else:
            self.contentFilter = {
                'portal_type': 'Report',
                'sort_order': 'reverse'}

            self.columns = {
                'Client': {'title': _('Client')},
                'Title': {'title': _('Report Type')},
                'FileSize': {'title': _('Size')},
                'Created': {'title': _('Created')},
                'By': {'title': _('By')},
            }
            self.review_states = [
                {'id': 'default',
                 'title': 'All',
                 'contentFilter': {},
                 'columns': ['Client',
                             'Title',
                             'FileSize',
                             'Created',
                             'By']},
            ]

        return super(ReportHistoryView, self).__call__()
Example #2
0
    def __call__(self):
        """Create and render selected report
        """

        # if there's an error, we return productivity.pt which requires these.
        self.selection_macros = SelectionMacrosView(self.context, self.request)
        self.additional_reports = []
        adapters = getAdapters((self.context, ), IProductivityReport)
        for name, adapter in adapters:
            report_dict = adapter(self.context, self.request)
            report_dict['id'] = name
            self.additional_reports.append(report_dict)

        report_id = self.request.get('report_id', '')
        if not report_id:
            message = _("No report specified in request")
            self.logger.error(message)
            self.context.plone_utils.addPortalMessage(message, 'error')
            return self.template()

        self.date = DateTime()
        username = self.context.portal_membership.getAuthenticatedMember().getUserName()
        self.reporter = self.user_fullname(username)
        self.reporter_email = self.user_email(username)

        # signature image
        self.reporter_signature = ""
        c = [x for x in self.bika_setup_catalog(portal_type='LabContact')
             if x.getObject().getUsername() == username]
        if c:
            sf = c[0].getObject().getSignature()
            if sf:
                self.reporter_signature = sf.absolute_url() + "/Signature"

        lab = self.context.bika_setup.laboratory
        self.laboratory = lab
        self.lab_title = lab.getName()
        self.lab_address = lab.getPrintAddress()
        self.lab_email = lab.getEmailAddress()
        self.lab_url = lab.getLabURL()

        client = logged_in_client(self.context)
        if client:
            clientuid = client.UID()
            self.client_title = client.Title()
            self.client_address = client.getPrintAddress()
        else:
            clientuid = None
            self.client_title = None
            self.client_address = None

        # Render form output

        # the report can add file names to this list; they will be deleted
        # once the PDF has been generated.  temporary plot image files, etc.
        self.request['to_remove'] = []

        if "report_module" in self.request:
            module = self.request["report_module"]
        else:
            module = "bika.lims.browser.reports.%s" % report_id
        try:
            exec ("from %s import Report" % module)
            # required during error redirect: the report must have a copy of
            # additional_reports, because it is used as a surrogate view.
            Report.additional_reports = self.additional_reports
        except ImportError:
            message = "Report %s.Report not found (shouldn't happen)" % module
            self.logger.error(message)
            self.context.plone_utils.addPortalMessage(message, 'error')
            return self.template()

        # Report must return dict with:
        # - report_title - title string for pdf/history listing
        # - report_data - rendered report
        output = Report(self.context, self.request)()

        # if CSV output is chosen, report returns None
        if not output:
            return

        if type(output) in (str, unicode, bytes):
            # remove temporary files
            for f in self.request['to_remove']:
                os.remove(f)
            return output

        # The report output gets pulled through report_frame.pt
        self.reportout = output['report_data']
        framed_output = self.frame_template()

        # this is the good part
        result = createPdf(framed_output)

        # remove temporary files
        for f in self.request['to_remove']:
            os.remove(f)

        if result:
            # Create new report object
            reportid = self.aq_parent.generateUniqueId('Report')
            report = _createObjectByType("Report", self.aq_parent, reportid)
            report.edit(Client=clientuid)
            report.processForm()

            # write pdf to report object
            report.edit(title=output['report_title'], ReportFile=result)
            report.reindexObject()

            fn = "%s - %s" % (self.date.strftime(self.date_format_short),
                              _u(output['report_title']))

            setheader = self.request.RESPONSE.setHeader
            setheader('Content-Type', 'application/pdf')
            setheader("Content-Disposition",
                      "attachment;filename=\"%s\"" % _c(fn))
            self.request.RESPONSE.write(result)

        return
    def __call__(self):
        # get all the data into datalines

        sc = getToolByName(self.context, 'bika_setup_catalog')
        bc = getToolByName(self.context, 'bika_analysis_catalog')
        rc = getToolByName(self.context, 'reference_catalog')
        self.report_content = {}
        parms = []
        headings = {}
        headings['header'] = _("Analysis turnaround times")
        headings['subheader'] = _("The turnaround time of analyses")

        query = {'portal_type': 'Analysis'}
        client_title = None
        if 'ClientUID' in self.request.form:
            client_uid = self.request.form['ClientUID']
            query['getClientUID'] = client_uid
            client = rc.lookupObject(client_uid)
            client_title = client.Title()
        else:
            client = logged_in_client(self.context)
            if client:
                client_title = client.Title()
                query['getClientUID'] = client.UID()
        if client_title:
            parms.append({
                'title': _('Client'),
                'value': client_title,
                'type': 'text'
            })

        date_query = formatDateQuery(self.context, 'Received')
        if date_query:
            query['created'] = date_query
            received = formatDateParms(self.context, 'Received')
            parms.append({
                'title': _('Received'),
                'value': received,
                'type': 'text'
            })

        query['review_state'] = 'published'

        workflow = getToolByName(self.context, 'portal_workflow')
        if 'bika_worksheetanalysis_workflow' in self.request.form:
            query['worksheetanalysis_review_state'] = self.request.form[
                'bika_worksheetanalysis_workflow']
            ws_review_state = workflow.getTitleForStateOnType(
                self.request.form['bika_worksheetanalysis_workflow'],
                'Analysis')
            parms.append({
                'title': _('Assigned to worksheet'),
                'value': ws_review_state,
                'type': 'text'
            })

        # query all the analyses and increment the counts
        count_early = 0
        mins_early = 0
        count_late = 0
        mins_late = 0
        count_undefined = 0
        services = {}

        analyses = bc(query)
        for a in analyses:
            analysis = a.getObject()
            service_uid = analysis.getServiceUID()
            if service_uid not in services:
                services[service_uid] = {
                    'count_early': 0,
                    'count_late': 0,
                    'mins_early': 0,
                    'mins_late': 0,
                    'count_undefined': 0,
                }
            earliness = analysis.getEarliness()
            if earliness < 0:
                count_late = services[service_uid]['count_late']
                mins_late = services[service_uid]['mins_late']
                count_late += 1
                mins_late -= earliness
                services[service_uid]['count_late'] = count_late
                services[service_uid]['mins_late'] = mins_late
            if earliness > 0:
                count_early = services[service_uid]['count_early']
                mins_early = services[service_uid]['mins_early']
                count_early += 1
                mins_early += earliness
                services[service_uid]['count_early'] = count_early
                services[service_uid]['mins_early'] = mins_early
            if earliness == 0:
                count_undefined = services[service_uid]['count_undefined']
                count_undefined += 1
                services[service_uid]['count_undefined'] = count_undefined

        # calculate averages
        for service_uid in services.keys():
            count_early = services[service_uid]['count_early']
            mins_early = services[service_uid]['mins_early']
            if count_early == 0:
                services[service_uid]['ave_early'] = ''
            else:
                avemins = (mins_early) / count_early
                services[service_uid]['ave_early'] = formatDuration(
                    self.context, avemins)
            count_late = services[service_uid]['count_late']
            mins_late = services[service_uid]['mins_late']
            if count_late == 0:
                services[service_uid]['ave_late'] = ''
            else:
                avemins = mins_late / count_late
                services[service_uid]['ave_late'] = formatDuration(
                    self.context, avemins)

        # and now lets do the actual report lines
        formats = {
            'columns':
            7,
            'col_heads': [
                _('Analysis'),
                _('Count'),
                _('Undefined'),
                _('Late'),
                _('Average late'),
                _('Early'),
                _('Average early'),
            ],
            'class':
            '',
        }

        total_count_early = 0
        total_count_late = 0
        total_mins_early = 0
        total_mins_late = 0
        total_count_undefined = 0
        datalines = []

        for cat in sc(portal_type='AnalysisCategory',
                      sort_on='sortable_title'):
            catline = [
                {
                    'value': cat.Title,
                    'class': 'category_heading',
                    'colspan': 7
                },
            ]
            first_time = True
            cat_count_early = 0
            cat_count_late = 0
            cat_count_undefined = 0
            cat_mins_early = 0
            cat_mins_late = 0
            for service in sc(portal_type="AnalysisService",
                              getCategoryUID=cat.UID,
                              sort_on='sortable_title'):

                dataline = [
                    {
                        'value': service.Title,
                        'class': 'testgreen'
                    },
                ]
                if service.UID not in services:
                    continue

                if first_time:
                    datalines.append(catline)
                    first_time = False

                # analyses found
                cat_count_early += services[service.UID]['count_early']
                cat_count_late += services[service.UID]['count_late']
                cat_count_undefined += services[service.UID]['count_undefined']
                cat_mins_early += services[service.UID]['mins_early']
                cat_mins_late += services[service.UID]['mins_late']

                count = services[service.UID]['count_early'] + \
                        services[service.UID]['count_late'] + \
                        services[service.UID]['count_undefined']

                dataline.append({'value': count, 'class': 'number'})
                dataline.append({
                    'value':
                    services[service.UID]['count_undefined'],
                    'class':
                    'number'
                })
                dataline.append({
                    'value': services[service.UID]['count_late'],
                    'class': 'number'
                })
                dataline.append({
                    'value': services[service.UID]['ave_late'],
                    'class': 'number'
                })
                dataline.append({
                    'value': services[service.UID]['count_early'],
                    'class': 'number'
                })
                dataline.append({
                    'value': services[service.UID]['ave_early'],
                    'class': 'number'
                })

                datalines.append(dataline)

            # category totals
            dataline = [
                {
                    'value': '%s - total' % (cat.Title),
                    'class': 'subtotal_label'
                },
            ]

            dataline.append({
                'value':
                cat_count_early + cat_count_late + cat_count_undefined,
                'class': 'subtotal_number'
            })

            dataline.append({
                'value': cat_count_undefined,
                'class': 'subtotal_number'
            })

            dataline.append({
                'value': cat_count_late,
                'class': 'subtotal_number'
            })

            if cat_count_late:
                dataitem = {
                    'value': cat_mins_late / cat_count_late,
                    'class': 'subtotal_number'
                }
            else:
                dataitem = {'value': 0, 'class': 'subtotal_number'}

            dataline.append(dataitem)

            dataline.append({
                'value': cat_count_early,
                'class': 'subtotal_number'
            })

            if cat_count_early:
                dataitem = {
                    'value': cat_mins_early / cat_count_early,
                    'class': 'subtotal_number'
                }
            else:
                dataitem = {'value': 0, 'class': 'subtotal_number'}

            dataline.append(dataitem)

            total_count_early += cat_count_early
            total_count_late += cat_count_late
            total_count_undefined += cat_count_undefined
            total_mins_early += cat_mins_early
            total_mins_late += cat_mins_late

        # footer data
        footlines = []
        footline = []
        footline = [
            {
                'value': _('Total'),
                'class': 'total'
            },
        ]

        footline.append({
            'value':
            total_count_early + total_count_late + total_count_undefined,
            'class': 'total number'
        })

        footline.append({
            'value': total_count_undefined,
            'class': 'total number'
        })

        footline.append({'value': total_count_late, 'class': 'total number'})

        if total_count_late:
            ave_mins = total_mins_late / total_count_late
            footline.append({
                'value': formatDuration(self.context, ave_mins),
                'class': 'total number'
            })
        else:
            footline.append({'value': ''})

        footline.append({'value': total_count_early, 'class': 'total number'})

        if total_count_early:
            ave_mins = total_mins_early / total_count_early
            footline.append({
                'value': formatDuration(self.context, ave_mins),
                'class': 'total number'
            })
        else:
            footline.append({'value': '', 'class': 'total number'})

        footlines.append(footline)

        self.report_content = {
            'headings': headings,
            'parms': parms,
            'formats': formats,
            'datalines': datalines,
            'footings': footlines
        }

        if self.request.get('output_format', '') == 'CSV':
            import csv
            import StringIO
            import datetime

            fieldnames = [
                'Analysis',
                'Count',
                'Undefined',
                'Late',
                'Average late',
                'Early',
                'Average early',
            ]
            output = StringIO.StringIO()
            dw = csv.DictWriter(output,
                                extrasaction='ignore',
                                fieldnames=fieldnames)
            dw.writerow(dict((fn, fn) for fn in fieldnames))
            for row in datalines:
                if len(row) == 1:
                    # category heading thingy
                    continue
                dw.writerow({
                    'Analysis': row[0]['value'],
                    'Count': row[1]['value'],
                    'Undefined': row[2]['value'],
                    'Late': row[3]['value'],
                    'Average late': row[4]['value'],
                    'Early': row[5]['value'],
                    'Average early': row[6]['value'],
                })
            report_data = output.getvalue()
            output.close()
            date = datetime.datetime.now().strftime("%Y%m%d%H%M")
            setheader = self.request.RESPONSE.setHeader
            setheader('Content-Type', 'text/csv')
            setheader("Content-Disposition",
                      "attachment;filename=\"analysestats_%s.csv\"" % date)
            self.request.RESPONSE.write(report_data)
        else:
            return {
                'report_title': t(headings['header']),
                'report_data': self.template()
            }
    def __call__(self):

        # get all the data into datalines
        sc = getToolByName(self.context, 'bika_setup_catalog')
        bac = getToolByName(self.context, 'bika_analysis_catalog')
        rc = getToolByName(self.context, 'reference_catalog')
        self.report_content = {}
        parm_lines = {}
        parms = []
        headings = {}
        headings['header'] = _("Analyses per sample type")
        headings['subheader'] = _(
            "Number of analyses requested per sample type")

        count_all = 0
        query = {'portal_type': 'Analysis'}
        client_title = None
        if 'ClientUID' in self.request.form:
            client_uid = self.request.form['ClientUID']
            query['getClientUID'] = client_uid
            client = rc.lookupObject(client_uid)
            client_title = client.Title()
        else:
            client = logged_in_client(self.context)
            if client:
                client_title = client.Title()
                query['getClientUID'] = client.UID()
        if client_title:
            parms.append({
                'title': _('Client'),
                'value': client_title,
                'type': 'text'
            })

        date_query = formatDateQuery(self.context, 'Requested')
        if date_query:
            query['created'] = date_query
            requested = formatDateParms(self.context, 'Requested')
            parms.append({
                'title': _('Requested'),
                'value': requested,
                'type': 'text'
            })

        workflow = getToolByName(self.context, 'portal_workflow')
        if 'bika_analysis_workflow' in self.request.form:
            query['review_state'] = self.request.form['bika_analysis_workflow']
            review_state = workflow.getTitleForStateOnType(
                self.request.form['bika_analysis_workflow'], 'Analysis')
            parms.append({
                'title': _('Status'),
                'value': review_state,
                'type': 'text'
            })

        if 'bika_cancellation_workflow' in self.request.form:
            query['cancellation_state'] = self.request.form[
                'bika_cancellation_workflow']
            cancellation_state = workflow.getTitleForStateOnType(
                self.request.form['bika_cancellation_workflow'], 'Analysis')
            parms.append({
                'title': _('Active'),
                'value': cancellation_state,
                'type': 'text'
            })

        if 'bika_worksheetanalysis_workflow' in self.request.form:
            query['worksheetanalysis_review_state'] = self.request.form[
                'bika_worksheetanalysis_workflow']
            ws_review_state = workflow.getTitleForStateOnType(
                self.request.form['bika_worksheetanalysis_workflow'],
                'Analysis')
            parms.append({
                'title': _('Assigned to worksheet'),
                'value': ws_review_state,
                'type': 'text'
            })

        # and now lets do the actual report lines
        formats = {
            'columns': 2,
            'col_heads': [_('Sample type'),
                          _('Number of analyses')],
            'class': '',
        }

        datalines = []
        for sampletype in sc(portal_type="SampleType",
                             sort_on='sortable_title'):
            query['getSampleTypeUID'] = sampletype.UID
            analyses = bac(query)
            count_analyses = len(analyses)

            dataline = []
            dataitem = {'value': sampletype.Title}
            dataline.append(dataitem)
            dataitem = {'value': count_analyses}

            dataline.append(dataitem)

            datalines.append(dataline)

            count_all += count_analyses

        # footer data
        footlines = []
        footline = []
        footitem = {'value': _('Total'), 'class': 'total_label'}
        footline.append(footitem)
        footitem = {'value': count_all}
        footline.append(footitem)
        footlines.append(footline)

        self.report_content = {
            'headings': headings,
            'parms': parms,
            'formats': formats,
            'datalines': datalines,
            'footings': footlines
        }

        if self.request.get('output_format', '') == 'CSV':
            import csv
            import StringIO
            import datetime

            fieldnames = [
                'Sample Type',
                'Analyses',
            ]
            output = StringIO.StringIO()
            dw = csv.DictWriter(output,
                                extrasaction='ignore',
                                fieldnames=fieldnames)
            dw.writerow(dict((fn, fn) for fn in fieldnames))
            for row in datalines:
                dw.writerow({
                    'Sample Type': row[0]['value'],
                    'Analyses': row[1]['value'],
                })
            report_data = output.getvalue()
            output.close()
            date = datetime.datetime.now().strftime("%Y%m%d%H%M")
            setheader = self.request.RESPONSE.setHeader
            setheader('Content-Type', 'text/csv')
            setheader(
                "Content-Disposition",
                "attachment;filename=\"analysespersampletype_%s.csv\"" % date)
            self.request.RESPONSE.write(report_data)
        else:
            return {
                'report_title': t(headings['header']),
                'report_data': self.template()
            }
    def __call__(self):
        # get all the data into datalines

        sc = getToolByName(self.context, "bika_setup_catalog")
        bc = getToolByName(self.context, "bika_analysis_catalog")
        rc = getToolByName(self.context, "reference_catalog")
        self.report_content = {}
        parms = []
        headings = {}
        headings["header"] = _("Analysis turnaround times")
        headings["subheader"] = _("The turnaround time of analyses")

        query = {"portal_type": "Analysis"}
        client_title = None
        if "ClientUID" in self.request.form:
            client_uid = self.request.form["ClientUID"]
            query["getClientUID"] = client_uid
            client = rc.lookupObject(client_uid)
            client_title = client.Title()
        else:
            client = logged_in_client(self.context)
            if client:
                client_title = client.Title()
                query["getClientUID"] = client.UID()
        if client_title:
            parms.append({"title": _("Client"), "value": client_title, "type": "text"})

        date_query = formatDateQuery(self.context, "Received")
        if date_query:
            query["created"] = date_query
            received = formatDateParms(self.context, "Received")
            parms.append({"title": _("Received"), "value": received, "type": "text"})

        query["review_state"] = "published"

        workflow = getToolByName(self.context, "portal_workflow")
        if "bika_worksheetanalysis_workflow" in self.request.form:
            query["worksheetanalysis_review_state"] = self.request.form["bika_worksheetanalysis_workflow"]
            ws_review_state = workflow.getTitleForStateOnType(
                self.request.form["bika_worksheetanalysis_workflow"], "Analysis"
            )
            parms.append({"title": _("Assigned to worksheet"), "value": ws_review_state, "type": "text"})

        # query all the analyses and increment the counts
        count_early = 0
        mins_early = 0
        count_late = 0
        mins_late = 0
        count_undefined = 0
        services = {}

        analyses = bc(query)
        for a in analyses:
            analysis = a.getObject()
            service_uid = analysis.getServiceUID()
            if service_uid not in services:
                services[service_uid] = {
                    "count_early": 0,
                    "count_late": 0,
                    "mins_early": 0,
                    "mins_late": 0,
                    "count_undefined": 0,
                }
            earliness = analysis.getEarliness()
            if earliness < 0:
                count_late = services[service_uid]["count_late"]
                mins_late = services[service_uid]["mins_late"]
                count_late += 1
                mins_late -= earliness
                services[service_uid]["count_late"] = count_late
                services[service_uid]["mins_late"] = mins_late
            if earliness > 0:
                count_early = services[service_uid]["count_early"]
                mins_early = services[service_uid]["mins_early"]
                count_early += 1
                mins_early += earliness
                services[service_uid]["count_early"] = count_early
                services[service_uid]["mins_early"] = mins_early
            if earliness == 0:
                count_undefined = services[service_uid]["count_undefined"]
                count_undefined += 1
                services[service_uid]["count_undefined"] = count_undefined

        # calculate averages
        for service_uid in services.keys():
            count_early = services[service_uid]["count_early"]
            mins_early = services[service_uid]["mins_early"]
            if count_early == 0:
                services[service_uid]["ave_early"] = ""
            else:
                avemins = (mins_early) / count_early
                services[service_uid]["ave_early"] = formatDuration(self.context, avemins)
            count_late = services[service_uid]["count_late"]
            mins_late = services[service_uid]["mins_late"]
            if count_late == 0:
                services[service_uid]["ave_late"] = ""
            else:
                avemins = mins_late / count_late
                services[service_uid]["ave_late"] = formatDuration(self.context, avemins)

        # and now lets do the actual report lines
        formats = {
            "columns": 7,
            "col_heads": [
                _("Analysis"),
                _("Count"),
                _("Undefined"),
                _("Late"),
                _("Average late"),
                _("Early"),
                _("Average early"),
            ],
            "class": "",
        }

        total_count_early = 0
        total_count_late = 0
        total_mins_early = 0
        total_mins_late = 0
        total_count_undefined = 0
        datalines = []

        for cat in sc(portal_type="AnalysisCategory", sort_on="sortable_title"):
            catline = [{"value": cat.Title, "class": "category_heading", "colspan": 7}]
            first_time = True
            cat_count_early = 0
            cat_count_late = 0
            cat_count_undefined = 0
            cat_mins_early = 0
            cat_mins_late = 0
            for service in sc(portal_type="AnalysisService", getCategoryUID=cat.UID, sort_on="sortable_title"):

                dataline = [{"value": service.Title, "class": "testgreen"}]
                if service.UID not in services:
                    continue

                if first_time:
                    datalines.append(catline)
                    first_time = False

                # analyses found
                cat_count_early += services[service.UID]["count_early"]
                cat_count_late += services[service.UID]["count_late"]
                cat_count_undefined += services[service.UID]["count_undefined"]
                cat_mins_early += services[service.UID]["mins_early"]
                cat_mins_late += services[service.UID]["mins_late"]

                count = (
                    services[service.UID]["count_early"]
                    + services[service.UID]["count_late"]
                    + services[service.UID]["count_undefined"]
                )

                dataline.append({"value": count, "class": "number"})
                dataline.append({"value": services[service.UID]["count_undefined"], "class": "number"})
                dataline.append({"value": services[service.UID]["count_late"], "class": "number"})
                dataline.append({"value": services[service.UID]["ave_late"], "class": "number"})
                dataline.append({"value": services[service.UID]["count_early"], "class": "number"})
                dataline.append({"value": services[service.UID]["ave_early"], "class": "number"})

                datalines.append(dataline)

            # category totals
            dataline = [{"value": "%s - total" % (cat.Title), "class": "subtotal_label"}]

            dataline.append(
                {"value": cat_count_early + cat_count_late + cat_count_undefined, "class": "subtotal_number"}
            )

            dataline.append({"value": cat_count_undefined, "class": "subtotal_number"})

            dataline.append({"value": cat_count_late, "class": "subtotal_number"})

            if cat_count_late:
                dataitem = {"value": cat_mins_late / cat_count_late, "class": "subtotal_number"}
            else:
                dataitem = {"value": 0, "class": "subtotal_number"}

            dataline.append(dataitem)

            dataline.append({"value": cat_count_early, "class": "subtotal_number"})

            if cat_count_early:
                dataitem = {"value": cat_mins_early / cat_count_early, "class": "subtotal_number"}
            else:
                dataitem = {"value": 0, "class": "subtotal_number"}

            dataline.append(dataitem)

            total_count_early += cat_count_early
            total_count_late += cat_count_late
            total_count_undefined += cat_count_undefined
            total_mins_early += cat_mins_early
            total_mins_late += cat_mins_late

        # footer data
        footlines = []
        footline = []
        footline = [{"value": _("Total"), "class": "total"}]

        footline.append(
            {"value": total_count_early + total_count_late + total_count_undefined, "class": "total number"}
        )

        footline.append({"value": total_count_undefined, "class": "total number"})

        footline.append({"value": total_count_late, "class": "total number"})

        if total_count_late:
            ave_mins = total_mins_late / total_count_late
            footline.append({"value": formatDuration(self.context, ave_mins), "class": "total number"})
        else:
            footline.append({"value": ""})

        footline.append({"value": total_count_early, "class": "total number"})

        if total_count_early:
            ave_mins = total_mins_early / total_count_early
            footline.append({"value": formatDuration(self.context, ave_mins), "class": "total number"})
        else:
            footline.append({"value": "", "class": "total number"})

        footlines.append(footline)

        self.report_content = {
            "headings": headings,
            "parms": parms,
            "formats": formats,
            "datalines": datalines,
            "footings": footlines,
        }

        if self.request.get("output_format", "") == "CSV":
            import csv
            import StringIO
            import datetime

            fieldnames = ["Analysis", "Count", "Undefined", "Late", "Average late", "Early", "Average early"]
            output = StringIO.StringIO()
            dw = csv.DictWriter(output, extrasaction="ignore", fieldnames=fieldnames)
            dw.writerow(dict((fn, fn) for fn in fieldnames))
            for row in datalines:
                if len(row) == 1:
                    # category heading thingy
                    continue
                dw.writerow(
                    {
                        "Analysis": row[0]["value"],
                        "Count": row[1]["value"],
                        "Undefined": row[2]["value"],
                        "Late": row[3]["value"],
                        "Average late": row[4]["value"],
                        "Early": row[5]["value"],
                        "Average early": row[6]["value"],
                    }
                )
            report_data = output.getvalue()
            output.close()
            date = datetime.datetime.now().strftime("%Y%m%d%H%M")
            setheader = self.request.RESPONSE.setHeader
            setheader("Content-Type", "text/csv")
            setheader("Content-Disposition", 'attachment;filename="analysestats_%s.csv"' % date)
            self.request.RESPONSE.write(report_data)
        else:
            return {"report_title": t(headings["header"]), "report_data": self.template()}
    def __call__(self):
        # get all the data into datalines

        sc = getToolByName(self.context, "bika_setup_catalog")
        bc = getToolByName(self.context, "bika_analysis_catalog")
        rc = getToolByName(self.context, "reference_catalog")
        self.report_content = {}
        parms = []
        headings = {}
        headings["header"] = _("Analyses per analysis service")
        headings["subheader"] = _("Number of analyses requested per analysis service")

        query = {"portal_type": "Analysis"}
        client_title = None
        if "ClientUID" in self.request.form:
            client_uid = self.request.form["ClientUID"]
            query["getClientUID"] = client_uid
            client = rc.lookupObject(client_uid)
            client_title = client.Title()
        else:
            client = logged_in_client(self.context)
            if client:
                client_title = client.Title()
                query["getClientUID"] = client.UID()
        if client_title:
            parms.append({"title": _("Client"), "value": client_title, "type": "text"})

        date_query = formatDateQuery(self.context, "Requested")
        if date_query:
            query["created"] = date_query
            requested = formatDateParms(self.context, "Requested")
            parms.append({"title": _("Requested"), "value": requested, "type": "text"})

        date_query = formatDateQuery(self.context, "Published")
        if date_query:
            query["getDatePublished"] = date_query
            published = formatDateParms(self.context, "Published")
            parms.append({"title": _("Published"), "value": published, "type": "text"})

        workflow = getToolByName(self.context, "portal_workflow")
        if "bika_analysis_workflow" in self.request.form:
            query["review_state"] = self.request.form["bika_analysis_workflow"]
            review_state = workflow.getTitleForStateOnType(self.request.form["bika_analysis_workflow"], "Analysis")
            parms.append({"title": _("Status"), "value": review_state, "type": "text"})

        if "bika_cancellation_workflow" in self.request.form:
            query["cancellation_state"] = self.request.form["bika_cancellation_workflow"]
            cancellation_state = workflow.getTitleForStateOnType(
                self.request.form["bika_cancellation_workflow"], "Analysis"
            )
            parms.append({"title": _("Active"), "value": cancellation_state, "type": "text"})

        if "bika_worksheetanalysis_workflow" in self.request.form:
            query["worksheetanalysis_review_state"] = self.request.form["bika_worksheetanalysis_workflow"]
            ws_review_state = workflow.getTitleForStateOnType(
                self.request.form["bika_worksheetanalysis_workflow"], "Analysis"
            )
            parms.append({"title": _("Assigned to worksheet"), "value": ws_review_state, "type": "text"})

        # and now lets do the actual report lines
        formats = {"columns": 2, "col_heads": [_("Analysis service"), _("Number of analyses")], "class": ""}

        datalines = []
        count_all = 0
        for cat in sc(portal_type="AnalysisCategory", sort_on="sortable_title"):
            dataline = [{"value": cat.Title, "class": "category_heading", "colspan": 2}]
            datalines.append(dataline)
            for service in sc(portal_type="AnalysisService", getCategoryUID=cat.UID, sort_on="sortable_title"):
                query["getServiceUID"] = service.UID
                analyses = bc(query)
                count_analyses = len(analyses)

                dataline = []
                dataitem = {"value": service.Title}
                dataline.append(dataitem)
                dataitem = {"value": count_analyses}

                dataline.append(dataitem)

                datalines.append(dataline)

                count_all += count_analyses

        # footer data
        footlines = []
        footline = []
        footitem = {"value": _("Total"), "class": "total_label"}
        footline.append(footitem)
        footitem = {"value": count_all}
        footline.append(footitem)
        footlines.append(footline)

        self.report_content = {
            "headings": headings,
            "parms": parms,
            "formats": formats,
            "datalines": datalines,
            "footings": footlines,
        }

        title = t(headings["header"])

        if self.request.get("output_format", "") == "CSV":
            import csv
            import StringIO
            import datetime

            fieldnames = ["Analysis Service", "Analyses"]
            output = StringIO.StringIO()
            dw = csv.DictWriter(output, extrasaction="ignore", fieldnames=fieldnames)
            dw.writerow(dict((fn, fn) for fn in fieldnames))
            for row in datalines:
                if len(row) == 1:
                    # category heading thingy
                    continue
                dw.writerow({"Analysis Service": row[0]["value"], "Analyses": row[1]["value"]})
            report_data = output.getvalue()
            output.close()
            date = datetime.datetime.now().strftime("%Y%m%d%H%M")
            setheader = self.request.RESPONSE.setHeader
            setheader("Content-Type", "text/csv")
            setheader("Content-Disposition", 'attachment;filename="analysesperservice_%s.csv"' % date)
            self.request.RESPONSE.write(report_data)
        else:
            return {"report_title": title, "report_data": self.template()}
Example #7
0
    def folderitems(self):
        self.categories = []

        analyses = self.context.getAnalyses(full_objects=True)
        self.analyses = dict([(a.getServiceUID(), a) for a in analyses])
        self.selected = self.analyses.keys()
        self.show_categories = \
            self.context.bika_setup.getCategoriseAnalysisServices()
        self.expand_all_categories = False

        wf = getToolByName(self.context, 'portal_workflow')
        mtool = getToolByName(self.context, 'portal_membership')

        self.allow_edit = mtool.checkPermission('Modify portal content',
                                                self.context)

        items = BikaListingView.folderitems(self)
        analyses = self.context.getAnalyses(full_objects=True)

        parts = self.context.getSample().objectValues('SamplePartition')
        partitions = [{'ResultValue': o.Title(),
                       'ResultText': o.getId()}
                      for o in parts
                      if wf.getInfoFor(o, 'cancellation_state', '') == 'active']
        for x in range(len(items)):
            if not 'obj' in items[x]:
                continue
            obj = items[x]['obj']

            cat = obj.getCategoryTitle()
            items[x]['category'] = cat
            if cat not in self.categories:
                self.categories.append(cat)

            items[x]['selected'] = items[x]['uid'] in self.selected

            items[x]['class']['Title'] = 'service_title'

            # js checks in row_data if an analysis may be removed.
            row_data = {}
            # keyword = obj.getKeyword()
            # if keyword in review_states.keys() \
            #    and review_states[keyword] not in ['sample_due',
            #                                       'to_be_sampled',
            #                                       'to_be_preserved',
            #                                       'sample_received',
            #                                       ]:
            #     row_data['disabled'] = True
            items[x]['row_data'] = json.dumps(row_data)

            calculation = obj.getCalculation()
            items[x]['Calculation'] = calculation and calculation.Title()

            locale = locales.getLocale('en')
            currency = self.context.bika_setup.getCurrency()
            symbol = locale.numbers.currencies[currency].symbol
            items[x]['before']['Price'] = symbol
            items[x]['Price'] = obj.getPrice()
            items[x]['class']['Price'] = 'nowrap'
            items[x]['Priority'] = ''

            if items[x]['selected']:
                items[x]['allow_edit'] = ['Partition', 'min', 'max', 'error']
                if not logged_in_client(self.context):
                    items[x]['allow_edit'].append('Price')

            items[x]['required'].append('Partition')
            items[x]['choices']['Partition'] = partitions

            if obj.UID() in self.analyses:
                analysis = self.analyses[obj.UID()]
                part = analysis.getSamplePartition()
                part = part and part or obj
                items[x]['Partition'] = part.Title()
                spec = self.get_spec_from_ar(self.context,
                                             analysis.getService().getKeyword())
                items[x]["min"] = spec["min"]
                items[x]["max"] = spec["max"]
                items[x]["error"] = spec["error"]
                # Add priority premium
                items[x]['Price'] = analysis.getPrice()
                priority = analysis.getPriority()
                items[x]['Priority'] = priority and priority.Title() or ''
            else:
                items[x]['Partition'] = ''
                items[x]["min"] = ''
                items[x]["max"] = ''
                items[x]["error"] = ''
                items[x]["Priority"] = ''

            after_icons = ''
            if obj.getAccredited():
                after_icons += "<img\
                src='%s/++resource++bika.lims.images/accredited.png'\
                title='%s'>" % (
                    self.portal_url,
                    t(_("Accredited"))
                )
            if obj.getReportDryMatter():
                after_icons += "<img\
                src='%s/++resource++bika.lims.images/dry.png'\
                title='%s'>" % (
                    self.portal_url,
                    t(_("Can be reported as dry matter"))
                )
            if obj.getAttachmentOption() == 'r':
                after_icons += "<img\
                src='%s/++resource++bika.lims.images/attach_reqd.png'\
                title='%s'>" % (
                    self.portal_url,
                    t(_("Attachment required"))
                )
            if obj.getAttachmentOption() == 'n':
                after_icons += "<img\
                src='%s/++resource++bika.lims.images/attach_no.png'\
                title='%s'>" % (
                    self.portal_url,
                    t(_('Attachment not permitted'))
                )
            if after_icons:
                items[x]['after']['Title'] = after_icons


            # Display analyses for this Analysis Service in results?
            ser = self.context.getAnalysisServiceSettings(obj.UID())
            items[x]['allow_edit'] = ['Hidden', ]
            items[x]['Hidden'] = ser.get('hidden', obj.getHidden())

        self.categories.sort()
        return items
Example #8
0
    def __call__(self):
        # get all the data into datalines

        pc = getToolByName(self.context, 'portal_catalog')
        rc = getToolByName(self.context, 'reference_catalog')
        self.report_content = {}
        parms = []
        headings = {}
        headings['header'] = _("Attachments")
        headings['subheader'] = _(
            "The attachments linked to analysis requests and analyses")

        count_all = 0
        query = {'portal_type': 'Attachment'}
        if 'ClientUID' in self.request.form:
            client_uid = self.request.form['ClientUID']
            query['getClientUID'] = client_uid
            client = rc.lookupObject(client_uid)
            client_title = client.Title()
        else:
            client = logged_in_client(self.context)
            if client:
                client_title = client.Title()
                query['getClientUID'] = client.UID()
            else:
                client_title = 'All'
        parms.append(
            {'title': _('Client'),
             'value': client_title,
             'type': 'text'})

        date_query = formatDateQuery(self.context, 'Loaded')
        if date_query:
            query['getDateLoaded'] = date_query
            loaded = formatDateParms(self.context, 'Loaded')
            parms.append(
                {'title': _('Loaded'),
                 'value': loaded,
                 'type': 'text'})

        # and now lets do the actual report lines
        formats = {'columns': 6,
                   'col_heads': [_('Request'),
                                 _('File'),
                                 _('Attachment type'),
                                 _('Content type'),
                                 _('Size'),
                                 _('Loaded'),
                   ],
                   'class': '',
        }

        datalines = []
        attachments = pc(query)
        for a_proxy in attachments:
            attachment = a_proxy.getObject()
            attachment_file = attachment.getAttachmentFile()
            icon = attachment_file.getBestIcon()
            filename = attachment_file.filename
            filesize = attachment_file.get_size()
            filesize = filesize / 1024
            sizeunit = "Kb"
            if filesize > 1024:
                filesize = filesize / 1024
                sizeunit = "Mb"
            dateloaded = attachment.getDateLoaded()
            dataline = []
            dataitem = {'value': attachment.getTextTitle()}
            dataline.append(dataitem)
            dataitem = {'value': filename,
                        'img_before': icon}
            dataline.append(dataitem)
            dataitem = {
            'value': attachment.getAttachmentType().Title() if attachment.getAttachmentType() else ''}
            dataline.append(dataitem)
            dataitem = {
            'value': self.context.lookupMime(attachment_file.getContentType())}
            dataline.append(dataitem)
            dataitem = {'value': '%s%s' % (filesize, sizeunit)}
            dataline.append(dataitem)
            dataitem = {'value': self.ulocalized_time(dateloaded)}
            dataline.append(dataitem)

            datalines.append(dataline)

            count_all += 1

        # footer data
        footlines = []
        footline = []
        footitem = {'value': _('Total'),
                    'colspan': 5,
                    'class': 'total_label'}
        footline.append(footitem)
        footitem = {'value': count_all}
        footline.append(footitem)
        footlines.append(footline)

        self.report_content = {
            'headings': headings,
            'parms': parms,
            'formats': formats,
            'datalines': datalines,
            'footings': footlines}

        if self.request.get('output_format', '') == 'CSV':
            import csv
            import StringIO
            import datetime

            fieldnames = [
                _('Request'),
                _('File'),
                _('Attachment type'),
                _('Content type'),
                _('Size'),
                _('Loaded'),
            ]
            output = StringIO.StringIO()
            dw = csv.DictWriter(output, fieldnames=fieldnames)
            dw.writerow(dict((fn, fn) for fn in fieldnames))
            for row in datalines:
                dw.writerow(row)
            report_data = output.getvalue()
            output.close()
            date = datetime.datetime.now().strftime("%Y%m%d%H%M")
            setheader = self.request.RESPONSE.setHeader
            setheader('Content-Type', 'text/csv')
            setheader("Content-Disposition",
                      "attachment;filename=\"analysesattachments_%s.csv\"" % date)
            self.request.RESPONSE.write(report_data)
        else:
            return {'report_title': t(headings['header']),
                    'report_data': self.template()}
    def __call__(self):
        # get all the data into datalines

        pc = getToolByName(self.context, 'portal_catalog')
        bac = getToolByName(self.context, 'bika_analysis_catalog')
        bc = getToolByName(self.context, 'bika_catalog')
        rc = getToolByName(self.context, 'reference_catalog')

        self.report_content = {}
        parm_lines = {}
        parms = []
        headings = {}
        count_all_ars = 0
        count_all_analyses = 0
        query = {}

        this_client = logged_in_client(self.context)

        if not this_client and 'ClientUID' in self.request.form:
            client_uid = self.request.form['ClientUID']
            this_client = rc.lookupObject(client_uid)
            parms.append(
                {'title': _('Client'),
                 'value': this_client.Title(),
                 'type': 'text'})

        if this_client:
            headings['header'] = _("Analysis requests and analyses")
            headings['subheader'] = _("Number of Analysis requests and analyses")
        else:
            headings['header'] = _("Analysis requests and analyses per client")
            headings['subheader'] = _(
                "Number of Analysis requests and analyses per client")

        date_query = formatDateQuery(self.context, 'Requested')
        if date_query:
            query['created'] = date_query
            requested = formatDateParms(self.context, 'Requested')
            parms.append(
                {'title': _('Requested'),
                 'value': requested,
                 'type': 'text'})

        workflow = getToolByName(self.context, 'portal_workflow')
        if 'bika_analysis_workflow' in self.request.form:
            query['review_state'] = self.request.form['bika_analysis_workflow']
            review_state = workflow.getTitleForStateOnType(
                self.request.form['bika_analysis_workflow'], 'Analysis')
            parms.append(
                {'title': _('Status'), 'value': review_state, 'type': 'text'})

        if 'bika_cancellation_workflow' in self.request.form:
            query['cancellation_state'] = self.request.form[
                'bika_cancellation_workflow']
            cancellation_state = workflow.getTitleForStateOnType(
                self.request.form['bika_cancellation_workflow'], 'Analysis')
            parms.append({'title': _('Active'), 'value': cancellation_state,
                          'type': 'text'})

        if 'bika_worksheetanalysis_workflow' in self.request.form:
            query['worksheetanalysis_review_state'] = self.request.form[
                'bika_worksheetanalysis_workflow']
            ws_review_state = workflow.getTitleForStateOnType(
                self.request.form['bika_worksheetanalysis_workflow'], 'Analysis')
            parms.append(
                {'title': _('Assigned to worksheet'), 'value': ws_review_state,
                 'type': 'text'})

        if 'bika_worksheetanalysis_workflow' in self.request.form:
            query['worksheetanalysis_review_state'] = self.request.form[
                'bika_worksheetanalysis_workflow']
            ws_review_state = workflow.getTitleForStateOnType(
                self.request.form['bika_worksheetanalysis_workflow'], 'Analysis')
            parms.append(
                {'title': _('Assigned to worksheet'), 'value': ws_review_state,
                 'type': 'text'})

        # and now lets do the actual report lines
        formats = {'columns': 3,
                   'col_heads': [_('Client'),
                                 _('Number of requests'),
                                 _('Number of analyses')],
                   'class': ''}

        datalines = []

        if this_client:
            c_proxies = pc(portal_type="Client", UID=this_client.UID())
        else:
            c_proxies = pc(portal_type="Client", sort_on='sortable_title')

        for client in c_proxies:
            query['getClientUID'] = client.UID
            dataline = [{'value': client.Title}, ]
            query['portal_type'] = 'AnalysisRequest'
            ars = bc(query)
            count_ars = len(ars)
            dataitem = {'value': count_ars}
            dataline.append(dataitem)

            query['portal_type'] = 'Analysis'
            analyses = bac(query)
            count_analyses = len(analyses)
            dataitem = {'value': count_analyses}
            dataline.append(dataitem)

            datalines.append(dataline)

            count_all_analyses += count_analyses
            count_all_ars += count_ars

        # footer data
        footlines = []
        if not this_client:
            footline = []
            footitem = {'value': _('Total'),
                        'class': 'total_label'}
            footline.append(footitem)

            footitem = {'value': count_all_ars}
            footline.append(footitem)
            footitem = {'value': count_all_analyses}
            footline.append(footitem)

            footlines.append(footline)

        self.report_content = {
            'headings': headings,
            'parms': parms,
            'formats': formats,
            'datalines': datalines,
            'footings': footlines}

        if self.request.get('output_format', '') == 'CSV':
            import csv
            import StringIO
            import datetime

            fieldnames = [
                'Client',
                'Analysis Requests',
                'Analyses',
            ]
            output = StringIO.StringIO()
            dw = csv.DictWriter(output, extrasaction='ignore',
                                fieldnames=fieldnames)
            dw.writerow(dict((fn, fn) for fn in fieldnames))
            for row in datalines:
                dw.writerow({
                    'Client': row[0]['value'],
                    'Analysis Requests': row[1]['value'],
                    'Analyses': row[2]['value'],
                })
            report_data = output.getvalue()
            output.close()
            date = datetime.datetime.now().strftime("%Y%m%d%H%M")
            setheader = self.request.RESPONSE.setHeader
            setheader('Content-Type', 'text/csv')
            setheader("Content-Disposition",
                      "attachment;filename=\"analysesperclient_%s.csv\"" % date)
            self.request.RESPONSE.write(report_data)
        else:
            return {'report_title': t(headings['header']),
                    'report_data': self.template()}