Пример #1
0
    def set(self, instance, value, **kwargs):
        """
        Check if value is an actual date/time value. If not, attempt
        to convert it to one; otherwise, set to None. Assign all
        properties passed as kwargs to object.
        """
        val = value
        if not value:
            val = None
        elif not isinstance(value, DateTime):
            for fmt in ['date_format_long', 'date_format_short']:
                fmtstr = instance.translate(fmt, domain='bika', mapping={})
                fmtstr = fmtstr.replace(r"${", '%').replace('}', '')
                try:
                    val = strptime(value, fmtstr)
                except ValueError:
                    continue
                try:
                    val = DateTime(*list(val)[:-6])
                except DateTimeError:
                    val = None
                if val.timezoneNaive():
                    # Use local timezone for tz naive strings
                    # see http://dev.plone.org/plone/ticket/10141
                    zone = val.localZone(safelocaltime(val.timeTime()))
                    parts = val.parts()[:-1] + (zone, )
                    val = DateTime(*parts)
                break
            else:
                logger.warning("DateTimeField failed to format date "
                               "string '%s' with '%s'" % (value, fmtstr))

        super(DateTimeField, self).set(instance, val, **kwargs)
Пример #2
0
    def _ws_data(self, ws):
        """ Creates an ws dict, accessible from the view and from each
            specific template.
            Keys: obj, id, url, template_title, remarks, date_printed,
                ars, createdby, analyst, printedby, analyses_titles,
                portal, laboratory
        """
        data = {
            'obj': ws,
            'id': ws.id,
            'url': ws.absolute_url(),
            'template_title': ws.getWorksheetTemplateTitle(),
            'remarks': ws.getRemarks(),
            'date_printed': self.ulocalized_time(DateTime(), long_format=1),
            'date_created': self.ulocalized_time(ws.created(), long_format=1)
        }

        # Sub-objects
        data['ars'] = self._analyses_data(ws)
        data['createdby'] = self._createdby_data(ws)
        data['analyst'] = self._analyst_data(ws)
        data['printedby'] = self._printedby_data(ws)
        ans = []
        for ar in data['ars']:
            ans.extend([an['title'] for an in ar['analyses']])
        data['analyses_titles'] = list(set(ans))

        portal = self.context.portal_url.getPortalObject()
        data['portal'] = {'obj': portal, 'url': portal.absolute_url()}
        data['laboratory'] = self._lab_data()
        return data
Пример #3
0
 def createInvoice(self, client_uid, items):
     """ Creates and invoice for a client and a set of items
     """
     invoice_id = self.generateUniqueId('Invoice')
     invoice = _createObjectByType("Invoice", self, invoice_id)
     invoice.edit(
         Client=client_uid,
         InvoiceDate=DateTime(),
     )
     invoice.processForm()
     invoice.invoice_lineitems = []
     for item in items:
         lineitem = InvoiceLineItem()
         if item.portal_type == 'AnalysisRequest':
             lineitem['ItemDate'] = item.getDatePublished()
             lineitem['OrderNumber'] = item.getRequestID()
             lineitem['AnalysisRequest'] = item
             description = get_invoice_item_description(item)
             lineitem['ItemDescription'] = description
         elif item.portal_type == 'SupplyOrder':
             lineitem['ItemDate'] = item.getDateDispatched()
             lineitem['OrderNumber'] = item.getOrderNumber()
             description = get_invoice_item_description(item)
             lineitem['ItemDescription'] = description
         lineitem['Subtotal'] = item.getSubtotal()
         lineitem['VATAmount'] = item.getVATAmount()
         lineitem['Total'] = item.getTotal()
         invoice.invoice_lineitems.append(lineitem)
     invoice.reindexObject()
     return invoice
Пример #4
0
def changeWorkflowState(content,
                        wf_id,
                        state_id,
                        acquire_permissions=False,
                        portal_workflow=None,
                        **kw):
    """Change the workflow state of an object
    @param content: Content obj which state will be changed
    @param state_id: name of the state to put on content
    @param acquire_permissions: True->All permissions unchecked and on riles and
                                acquired
                                False->Applies new state security map
    @param portal_workflow: Provide workflow tool (optimisation) if known
    @param kw: change the values of same name of the state mapping
    @return: None
    """

    if portal_workflow is None:
        portal_workflow = getToolByName(content, 'portal_workflow')

    # Might raise IndexError if no workflow is associated to this type
    found_wf = 0
    for wf_def in portal_workflow.getWorkflowsFor(content):
        if wf_id == wf_def.getId():
            found_wf = 1
            break
    if not found_wf:
        logger.error("%s: Cannot find workflow id %s" % (content, wf_id))

    wf_state = {
        'action': None,
        'actor': None,
        'comments': "Setting state to %s" % state_id,
        'review_state': state_id,
        'time': DateTime(),
    }

    # Updating wf_state from keyword args
    for k in kw.keys():
        # Remove unknown items
        if k not in wf_state:
            del kw[k]
    if 'review_state' in kw:
        del kw['review_state']
    wf_state.update(kw)

    portal_workflow.setStatusOf(wf_id, content, wf_state)

    if acquire_permissions:
        # Acquire all permissions
        for permission in content.possible_permissions():
            content.manage_permission(permission, acquire=1)
    else:
        # Setting new state permissions
        wf_def.updateRoleMappingsFor(content)

    # Map changes to the catalogs
    content.reindexObject(idxs=['allowedRolesAndUsers', 'review_state'])
    return
Пример #5
0
    def __call__(self, analyses):
        tray = 1
        now = DateTime().strftime('%Y%m%d-%H%M')
        bsc = getToolByName(self.context, 'bika_setup_catalog')
        uc = getToolByName(self.context, 'uid_catalog')
        instrument = self.context.getInstrument()
        norm = getUtility(IIDNormalizer).normalize
        filename = '%s-%s.csv' % (self.context.getId(),
                                  norm(instrument.getDataInterface()))
        listname = '%s_%s_%s' % (self.context.getId(), norm(
            instrument.Title()), now)
        options = {'dilute_factor': 1, 'method': 'F SO2 & T SO2'}
        for k, v in instrument.getDataInterfaceOptions():
            options[k] = v

        # for looking up "cup" number (= slot) of ARs
        parent_to_slot = {}
        layout = self.context.getLayout()
        for x in range(len(layout)):
            a_uid = layout[x]['analysis_uid']
            p_uid = uc(UID=a_uid)[0].getObject().aq_parent.UID()
            layout[x]['parent_uid'] = p_uid
            if not p_uid in parent_to_slot.keys():
                parent_to_slot[p_uid] = int(layout[x]['position'])

        # write rows, one per PARENT
        header = [listname, options['method']]
        rows = []
        rows.append(header)
        tmprows = []
        ARs_exported = []
        for x in range(len(layout)):
            # create batch header row
            c_uid = layout[x]['container_uid']
            p_uid = layout[x]['parent_uid']
            if p_uid in ARs_exported:
                continue
            cup = parent_to_slot[p_uid]
            tmprows.append(
                [tray, cup, p_uid, c_uid, options['dilute_factor'], ""])
            ARs_exported.append(p_uid)
        tmprows.sort(lambda a, b: cmp(a[1], b[1]))
        rows += tmprows

        ramdisk = StringIO()
        writer = csv.writer(ramdisk, delimiter=';')
        assert (writer)
        writer.writerows(rows)
        result = ramdisk.getvalue()
        ramdisk.close()

        #stream file to browser
        setheader = self.request.RESPONSE.setHeader
        setheader('Content-Length', len(result))
        setheader('Content-Type', 'text/comma-separated-values')
        setheader('Content-Disposition', 'inline; filename=%s' % filename)
        self.request.RESPONSE.write(result)
Пример #6
0
 def getCurrentState(self):
     workflow = getToolByName(self, 'portal_workflow')
     if self.getClosed():
         return InstrumentMaintenanceTaskStatuses.CLOSED
     elif workflow.getInfoFor(self, 'cancellation_state',
                              '') == 'cancelled':
         return InstrumentMaintenanceTaskStatuses.CANCELLED
     else:
         now = DateTime()
         dfrom = self.getDownFrom()
         dto = self.getDownTo() and self.getDownTo() or DateTime(
             9999, 12, 31)
         if (now > dto):
             return InstrumentMaintenanceTaskStatuses.OVERDUE
         if (now >= dfrom):
             return InstrumentMaintenanceTaskStatuses.PENDING
         else:
             return InstrumentMaintenanceTaskStatuses.INQUEUE
Пример #7
0
def strptime(context, value):
    """given a string, this function tries to return a DateTime.DateTime object
    with the date formats from i18n translations
    """
    val = ""
    for fmt in ['date_format_long', 'date_format_short']:
        fmtstr = context.translate(fmt, domain='bika', mapping={})
        fmtstr = fmtstr.replace(r"${", '%').replace('}', '')
        try:
            val = _strptime(value, fmtstr)
        except ValueError:
            continue
        try:
            val = DateTime(*list(val)[:-6])
        except DateTimeError:
            val = ""
        if val.timezoneNaive():
            # Use local timezone for tz naive strings
            # see http://dev.plone.org/plone/ticket/10141
            zone = val.localZone(safelocaltime(val.timeTime()))
            parts = val.parts()[:-1] + (zone,)
            val = DateTime(*parts)
        break
    else:
        logger.warning("DateTimeField failed to format date "
                       "string '%s' with '%s'" % (value, fmtstr))
    return val
Пример #8
0
    def set(self, instance, value, **kwargs):
        """
        Check if value is an actual date/time value. If not, attempt
        to convert it to one; otherwise, set to None. Assign all
        properties passed as kwargs to object.
        """
        val = value
        if not value:
            val = None
        elif not isinstance(value, DateTime):
            for fmt in ['date_format_long', 'date_format_short']:
                fmtstr = instance.translate(fmt, domain='bika', mapping={})
                fmtstr = fmtstr.replace(r"${", '%').replace('}', '')
                try:
                    val = strptime(value, fmtstr)
                except ValueError:
                    continue
                try:
                    val = DateTime(*list(val)[:-6])
                except DateTimeError:
                    val = None
                if val.timezoneNaive():
                    # Use local timezone for tz naive strings
                    # see http://dev.plone.org/plone/ticket/10141
                    zone = val.localZone(safelocaltime(val.timeTime()))
                    parts = val.parts()[:-1] + (zone,)
                    val = DateTime(*parts)
                break
            else:
                logger.warning("DateTimeField failed to format date "
                               "string '%s' with '%s'" % (value, fmtstr))

        super(DateTimeField, self).set(instance, val, **kwargs)
Пример #9
0
    def __call__(self):
        if 'submitted' in self.request:
            self.items = self.context.objectValues()
            self.pricelist_content = self.lineitems_pt()

            portal = context.portal_url.getPortalObject()
            lab = context.bika_labinfo.laboratory
            request = context.REQUEST

            ar_query_results = portal.portal_mailtemplates.getTemplate(
                'bika', request.mail_template)

            headers = {}
            headers['Date'] = DateTime().rfc822()
            from_addr = headers['From'] = formataddr(
                (encode_header(lab.Title()), lab.getEmailAddress())
            )

            if 'Contact_email_address' in request:
                contact_address = request.Contact_email_address
                msg = 'portal_status_message=Pricelist sent to %s' % (
                    contact_address)
            else:
                contact = context.reference_catalog.lookupObject(request.Contact_uid)
                contact_address = formataddr(
                    (encode_header(contact.getFullname()),
                      contact.getEmailAddress())
                )
                msg = 'portal_status_message=Pricelist sent to %s at %s' % (
                    contact.Title(), contact.getEmailAddress())

            to_addrs = []
            to_addr = headers['To'] = contact_address
            to_addrs.append(to_addr)
            # send copy to lab
            to_addrs.append(from_addr)

            to_addrs = tuple(to_addrs)
            info = {'request': request,
                    'pricelist': context,
                    'portal': portal}

            message = pmt.createMessage(
                'bika', request.mail_template, info, headers, text_format='html')
            sendmail(portal, from_addr, to_addrs, message)

            request.RESPONSE.redirect('%s?%s' % (context.absolute_url(), msg))

            return self.template()
        else:
            return self.form_template()
Пример #10
0
 def workflow_script_receive(self):
     workflow = getToolByName(self, 'portal_workflow')
     self.setDateReceived(DateTime())
     self.reindexObject(idxs=["review_state", "getDateReceived"])
     # Receive all self partitions that are still 'sample_due'
     parts = self.objectValues('SamplePartition')
     sample_due = [sp for sp in parts
                   if workflow.getInfoFor(sp, 'review_state') == 'sample_due']
     for sp in sample_due:
         workflow.doActionFor(sp, 'receive')
     # when a self is received, all associated
     # AnalysisRequests are also transitioned
     for ar in self.getAnalysisRequests():
         doActionFor(ar, "receive")
Пример #11
0
    def __init__(self, context, request):
        super(PricelistsView, self).__init__(context, request)
        self.catalog = 'portal_catalog'
        self.contentFilter = {'portal_type': 'Pricelist',
                              'sort_on': 'sortable_title'}
        self.context_actions = {}
        self.title = self.context.translate(_("Pricelists"))
        self.icon = self.portal_url + "/++resource++bika.lims.images/pricelist_big.png"
        self.description = ""
        self.show_sort_column = False
        self.show_select_row = False
        self.show_select_column = True
        self.pagesize = 25

        self.columns = {
            'Title': {'title': _('Title'),
                      'index': 'sortable_title'},
            'getEffectiveDate': {'title': _('Start Date'),
                            'index': 'getEffectiveDate',
                            'toggle': True},
            'getExpirationDate': {'title': _('End Date'),
                            'index': 'getExpirationDate',
                            'toggle': True},
        }

        now = DateTime()
        self.review_states = [
            {'id': 'default',
             'title': _('Active'),
             'contentFilter': {'getEffectiveDate': {'query': now,
                                                    'range': 'max'},
                               'getExpirationDate': {'query': now,
                                                     'range': 'min'},
                               'inactive_state': 'active'},
             'transitions': [{'id': 'deactivate'}, ],
             'columns': ['Title', 'getExpirationDate']},
            {'id': 'inactive',
             'title': _('Dormant'),
             'contentFilter': {'getEffectiveDate': {'query': now,
                                                    'range': 'min'},
                               'getExpirationDate': {'query': now,
                                                     'range': 'max'},
                               'inactive_state': 'inactive'},
             'transitions': [{'id': 'activate'}, ],
             'columns': ['Title', 'getExpirationDate']},
            {'id': 'all',
             'title': _('All'),
             'contentFilter': {},
             'columns': ['Title', 'getEffectiveDate', 'getExpirationDate']},
        ]
Пример #12
0
    def folderitems(self):
        items = super(LateAnalysesView, self).folderitems()
        mtool = getToolByName(self.context, 'portal_membership')
        member = mtool.getAuthenticatedMember()
        roles = member.getRoles()
        hideclientlink = 'RegulatoryInspector' in roles \
            and 'Manager' not in roles \
            and 'LabManager' not in roles \
            and 'LabClerk' not in roles

        for x in range(len(items)):
            if not items[x].has_key('obj'):
                continue
            obj = items[x]['obj']
            ar = obj.aq_parent
            sample = ar.getSample()
            client = ar.aq_parent
            contact = ar.getContact()
            items[x]['Analysis'] = obj.Title()
            items[x]['RequestID'] = ''
            items[x]['replace']['RequestID'] = "<a href='%s'>%s</a>" % \
                 (ar.absolute_url(), ar.Title())
            items[x]['Client'] = ''
            if hideclientlink == False:
                items[x]['replace']['Client'] = "<a href='%s'>%s</a>" % \
                     (client.absolute_url(), client.Title())
            items[x]['Contact'] = ''
            if contact:
                items[x]['replace']['Contact'] = "<a href='mailto:%s'>%s</a>" % \
                                                 (contact.getEmailAddress(),
                                                  contact.getFullname())
            items[x]['DateReceived'] = self.ulocalized_time(
                sample.getDateReceived())
            items[x]['DueDate'] = self.ulocalized_time(obj.getDueDate())

            late = DateTime() - obj.getDueDate()
            days = int(late / 1)
            hours = int((late % 1) * 24)
            mins = int((((late % 1) * 24) % 1) * 60)
            late_str = days and "%s day%s" % (days, days > 1 and 's'
                                              or '') or ""
            if days < 2:
                late_str += hours and " %s hour%s" % (hours, hours > 1 and 's'
                                                      or '') or ""
            if not days and not hours:
                late_str = "%s min%s" % (mins, mins > 1 and 's' or '')

            items[x]['Late'] = late_str
        return items
Пример #13
0
 def workflow_action_save_partitions_button(self):
     form = self.request.form
     # Sample Partitions or AR Manage Analyses: save Partition Table
     sample = self.context.portal_type == 'Sample' and self.context or\
         self.context.getSample()
     part_prefix = sample.getId() + "-P"
     nr_existing = len(sample.objectIds())
     nr_parts = len(form['PartTitle'][0])
     # add missing parts
     if nr_parts > nr_existing:
         for i in range(nr_parts - nr_existing):
             part = _createObjectByType("SamplePartition", sample, tmpID())
             part.setDateReceived = DateTime()
             part.processForm()
     # remove excess parts
     if nr_existing > nr_parts:
         for i in range(nr_existing - nr_parts):
             part = sample['%s%s' % (part_prefix, nr_existing - i)]
             for a in part.getBackReferences("AnalysisSamplePartition"):
                 a.setSamplePartition(None)
             sample.manage_delObjects([
                 '%s%s' % (part_prefix, nr_existing - i),
             ])
     # modify part container/preservation
     for part_uid, part_id in form['PartTitle'][0].items():
         part = sample["%s%s" %
                       (part_prefix, part_id.split(part_prefix)[1])]
         part.edit(
             Container=form['getContainer'][0][part_uid],
             Preservation=form['getPreservation'][0][part_uid],
         )
         part.reindexObject()
     objects = WorkflowAction._get_selected_items(self)
     if not objects:
         message = _("No items have been selected")
         self.context.plone_utils.addPortalMessage(message, 'info')
         if self.context.portal_type == 'Sample':
             # in samples his table is on 'Partitions' tab
             self.destination_url = self.context.absolute_url() +\
                 "/partitions"
         else:
             # in ar context this table is on 'ManageAnalyses' tab
             self.destination_url = self.context.absolute_url() +\
                 "/analyses"
         self.request.response.redirect(self.destination_url)
Пример #14
0
    def __call__(self):
        CheckAuthenticator(self.request)
        field = self.context.Schema()["Remarks"]
        value = self.request['value'].strip() + "\n\n"
        existing = self.context.getRemarks(
            mimetype='text/x-web-intelligent').strip()

        date = DateTime().rfc822()
        user = getSecurityManager().getUser()
        divider = "=== %s (%s)\n" % (date, user)

        remarks = convertWebIntelligentPlainTextToHtml(divider) + \
            convertWebIntelligentPlainTextToHtml(value) + \
            convertWebIntelligentPlainTextToHtml(existing)

        self.context.setRemarks(divider + value + existing,
                                mimetype='text/x-web-intelligent')

        return remarks.strip()
Пример #15
0
 def workflow_script_receive(self):
     if skip(self, "receive"):
         return
     sample = self.aq_parent
     workflow = getToolByName(self, 'portal_workflow')
     sample_state = workflow.getInfoFor(sample, 'review_state')
     self.setDateReceived(DateTime())
     self.reindexObject(idxs=[
         "getDateReceived",
     ])
     # Transition our analyses
     analyses = self.getBackReferences('AnalysisSamplePartition')
     for analysis in analyses:
         doActionFor(analysis, "receive")
     # if all sibling partitions are received, promote sample
     if not skip(sample, "receive", peek=True):
         due = [
             sp for sp in sample.objectValues("SamplePartition")
             if workflow.getInfoFor(sp, 'review_state') == 'sample_due'
         ]
         if sample_state == 'sample_due' and not due:
             doActionFor(sample, 'receive')
Пример #16
0
 def __call__(self):
     mtool = getToolByName(self.context, 'portal_membership')
     if mtool.checkPermission(AddPricelist, self.context):
         self.context_actions[_('Add')] = {
             'url': 'createObject?type_name=Pricelist',
             'icon': '++resource++bika.lims.images/add.png'
         }
     now = DateTime()
     if not mtool.checkPermission(ManageBika, self.context):
         self.show_select_column = False
         self.review_states = [
             {'id': 'default',
              'title': _('Active'),
              'contentFilter': {'getEffectiveDate': {'query': now,
                                                  'range': 'max'},
                                'getExpirationDate': {'query': now,
                                                   'range': 'min'},
                                'inactive_state': 'active'},
              'transitions': [{'id': 'deactivate'}, ],
              'columns': ['Title', 'getExpirationDate']},
             ]
     return super(PricelistsView, self).__call__()
Пример #17
0
    def applyWorksheetTemplate(self, wst):
        """ Add analyses to worksheet according to wst's layout.
            Will not overwrite slots which are filled already.
            If the selected template has an instrument assigned, it will
            only be applied to those analyses for which the instrument
            is allowed
        """
        rc = getToolByName(self, REFERENCE_CATALOG)
        bac = getToolByName(self, "bika_analysis_catalog")
        bc = getToolByName(self, 'bika_catalog')

        layout = self.getLayout()
        wstlayout = wst.getLayout()
        services = wst.getService()
        wst_service_uids = [s.UID() for s in services]

        analyses = bac(portal_type='Analysis',
                       getServiceUID=wst_service_uids,
                       review_state='sample_received',
                       worksheetanalysis_review_state='unassigned',
                       cancellation_state='active')
        sortedans = []
        for an in analyses:
            sortedans.append({
                'uid':
                an.UID,
                'duedate':
                an.getObject().getDueDate() or (DateTime() + 365),
                'brain':
                an
            })
        sortedans.sort(key=itemgetter('duedate'), reverse=False)
        # collect analyses from the first X ARs.
        ar_analyses = {}  # ar_uid : [analyses]
        ars = []  # for sorting

        wst_slots = [row['pos'] for row in wstlayout if row['type'] == 'a']
        ws_slots = [row['position'] for row in layout if row['type'] == 'a']
        nr_slots = len(wst_slots) - len(ws_slots)
        instr = self.getInstrument() if self.getInstrument(
        ) else wst.getInstrument()
        for analysis in sortedans:
            analysis = analysis['brain']
            if instr and analysis.getObject().isInstrumentAllowed(
                    instr) == False:
                # Exclude those analyses for which the ws selected
                # instrument is not allowed
                continue
            ar = analysis.getRequestID
            if ar in ar_analyses:
                ar_analyses[ar].append(analysis.getObject())
            else:
                if len(ar_analyses.keys()) < nr_slots:
                    ars.append(ar)
                    ar_analyses[ar] = [
                        analysis.getObject(),
                    ]

        positions = [pos for pos in wst_slots if pos not in ws_slots]
        for ar in ars:
            for analysis in ar_analyses[ar]:
                self.addAnalysis(analysis, position=positions[ars.index(ar)])

        # find best maching reference samples for Blanks and Controls
        for t in ('b', 'c'):
            form_key = t == 'b' and 'blank_ref' or 'control_ref'
            ws_slots = [row['position'] for row in layout if row['type'] == t]
            for row in [
                    r for r in wstlayout
                    if r['type'] == t and r['pos'] not in ws_slots
            ]:
                reference_definition_uid = row[form_key]
                samples = bc(
                    portal_type='ReferenceSample',
                    review_state='current',
                    inactive_state='active',
                    getReferenceDefinitionUID=reference_definition_uid)
                if not samples:
                    break
                samples = [s.getObject() for s in samples]
                if t == 'b':
                    samples = [s for s in samples if s.getBlank()]
                else:
                    samples = [s for s in samples if not s.getBlank()]
                complete_reference_found = False
                references = {}
                for reference in samples:
                    reference_uid = reference.UID()
                    references[reference_uid] = {}
                    references[reference_uid]['services'] = []
                    references[reference_uid]['count'] = 0
                    specs = reference.getResultsRangeDict()
                    for service_uid in wst_service_uids:
                        if service_uid in specs:
                            references[reference_uid]['services'].append(
                                service_uid)
                            references[reference_uid]['count'] += 1
                    if references[reference_uid]['count'] == len(
                            wst_service_uids):
                        complete_reference_found = True
                        break
                if complete_reference_found:
                    supported_uids = wst_service_uids
                    self.addReferences(int(row['pos']), reference,
                                       supported_uids)
                else:
                    # find the most complete reference sample instead
                    reference_keys = references.keys()
                    no_of_services = 0
                    reference = None
                    for key in reference_keys:
                        if references[key]['count'] > no_of_services:
                            no_of_services = references[key]['count']
                            reference = key
                    if reference:
                        reference = rc.lookupObject(reference)
                        supported_uids = [
                            s.UID() for s in reference.getServices()
                            if s.UID() in wst_service_uids
                        ]
                        self.addReferences(int(row['pos']), reference,
                                           supported_uids)

        # fill duplicate positions
        layout = self.getLayout()
        ws_slots = [row['position'] for row in layout if row['type'] == 'd']
        for row in [
                r for r in wstlayout
                if r['type'] == 'd' and r['pos'] not in ws_slots
        ]:
            dest_pos = int(row['pos'])
            src_pos = int(row['dup'])
            if src_pos in [int(slot['position']) for slot in layout]:
                self.addDuplicateAnalyses(src_pos, dest_pos)

        # Apply the wst instrument to all analyses and ws
        if instr:
            self.setInstrument(instr, True)
Пример #18
0
    def folderitems(self, full_objects=False):
        workflow = getToolByName(self.context, "portal_workflow")
        items = BikaListingView.folderitems(self)
        mtool = getToolByName(self.context, 'portal_membership')
        member = mtool.getAuthenticatedMember()
        roles = member.getRoles()
        hideclientlink = 'RegulatoryInspector' in roles \
            and 'Manager' not in roles \
            and 'LabManager' not in roles \
            and 'LabClerk' not in roles

        for x in range(len(items)):
            if 'obj' not in items[x]:
                continue
            obj = items[x]['obj']
            sample = obj.getSample()

            if getSecurityManager().checkPermission(EditResults, obj):
                url = obj.absolute_url() + "/manage_results"
            else:
                url = obj.absolute_url()

            items[x]['Client'] = obj.aq_parent.Title()
            if (hideclientlink is False):
                items[x]['replace']['Client'] = "<a href='%s'>%s</a>" % \
                    (obj.aq_parent.absolute_url(), obj.aq_parent.Title())
            items[x]['Creator'] = self.user_fullname(obj.Creator())
            items[x]['getRequestID'] = obj.getRequestID()
            items[x]['replace']['getRequestID'] = "<a href='%s'>%s</a>" % \
                 (url, items[x]['getRequestID'])
            items[x]['getSample'] = sample
            items[x]['replace']['getSample'] = \
                "<a href='%s'>%s</a>" % (sample.absolute_url(), sample.Title())

            if obj.getAnalysesNum():
                items[x]['getAnalysesNum'] = str(
                    obj.getAnalysesNum()[0]) + '/' + str(
                        obj.getAnalysesNum()[1])
            else:
                items[x]['getAnalysesNum'] = ''

            batch = obj.getBatch()
            if batch:
                items[x]['BatchID'] = batch.getBatchID()
                items[x]['replace']['BatchID'] = "<a href='%s'>%s</a>" % \
                     (batch.absolute_url(), items[x]['BatchID'])
            else:
                items[x]['BatchID'] = ''

            val = obj.Schema().getField('SubGroup').get(obj)
            items[x]['SubGroup'] = val.Title() if val else ''

            samplingdate = obj.getSample().getSamplingDate()
            items[x]['SamplingDate'] = self.ulocalized_time(samplingdate,
                                                            long_format=1)
            items[x]['getDateReceived'] = self.ulocalized_time(
                obj.getDateReceived())
            items[x]['getDatePublished'] = self.ulocalized_time(
                obj.getDatePublished())

            deviation = sample.getSamplingDeviation()
            items[x]['SamplingDeviation'] = deviation and deviation.Title(
            ) or ''
            priority = obj.getPriority()
            items[x]['Priority'] = ''  # priority.Title()

            items[x]['getStorageLocation'] = sample.getStorageLocation(
            ) and sample.getStorageLocation().Title() or ''
            items[x]['AdHoc'] = sample.getAdHoc() and True or ''

            after_icons = ""
            state = workflow.getInfoFor(obj, 'worksheetanalysis_review_state')
            if state == 'assigned':
                after_icons += "<img src='%s/++resource++bika.lims.images/worksheet.png' title='%s'/>" % \
                    (self.portal_url, t(_("All analyses assigned")))
            if workflow.getInfoFor(obj, 'review_state') == 'invalid':
                after_icons += "<img src='%s/++resource++bika.lims.images/delete.png' title='%s'/>" % \
                    (self.portal_url, t(_("Results have been withdrawn")))
            if obj.getLate():
                after_icons += "<img src='%s/++resource++bika.lims.images/late.png' title='%s'>" % \
                    (self.portal_url, t(_("Late Analyses")))
            if samplingdate > DateTime():
                after_icons += "<img src='%s/++resource++bika.lims.images/calendar.png' title='%s'>" % \
                    (self.portal_url, t(_("Future dated sample")))
            if obj.getInvoiceExclude():
                after_icons += "<img src='%s/++resource++bika.lims.images/invoice_exclude.png' title='%s'>" % \
                    (self.portal_url, t(_("Exclude from invoice")))
            if sample.getSampleType().getHazardous():
                after_icons += "<img src='%s/++resource++bika.lims.images/hazardous.png' title='%s'>" % \
                    (self.portal_url, t(_("Hazardous")))
            if after_icons:
                items[x]['after']['getRequestID'] = after_icons

            items[x]['Created'] = self.ulocalized_time(obj.created())

            contact = obj.getContact()
            if contact:
                items[x]['ClientContact'] = contact.Title()
                items[x]['replace']['ClientContact'] = "<a href='%s'>%s</a>" % \
                    (contact.absolute_url(), contact.Title())
            else:
                items[x]['ClientContact'] = ""

            SamplingWorkflowEnabled = sample.getSamplingWorkflowEnabled()
            if SamplingWorkflowEnabled and not samplingdate > DateTime():
                datesampled = self.ulocalized_time(sample.getDateSampled())
                if not datesampled:
                    datesampled = self.ulocalized_time(DateTime(),
                                                       long_format=1)
                    items[x]['class']['getDateSampled'] = 'provisional'
                sampler = sample.getSampler().strip()
                if sampler:
                    items[x]['replace']['getSampler'] = self.user_fullname(
                        sampler)
                if 'Sampler' in member.getRoles() and not sampler:
                    sampler = member.id
                    items[x]['class']['getSampler'] = 'provisional'
            else:
                datesampled = ''
                sampler = ''
            items[x]['getDateSampled'] = datesampled
            items[x]['getSampler'] = sampler

            # sampling workflow - inline edits for Sampler and Date Sampled
            checkPermission = self.context.portal_membership.checkPermission
            state = workflow.getInfoFor(obj, 'review_state')
            if state == 'to_be_sampled' \
                    and checkPermission(SampleSample, obj) \
                    and not samplingdate > DateTime():
                items[x]['required'] = ['getSampler', 'getDateSampled']
                items[x]['allow_edit'] = ['getSampler', 'getDateSampled']
                samplers = getUsers(sample,
                                    ['Sampler', 'LabManager', 'Manager'])
                username = member.getUserName()
                users = [({
                    'ResultValue': u,
                    'ResultText': samplers.getValue(u)
                }) for u in samplers]
                items[x]['choices'] = {'getSampler': users}
                Sampler = sampler and sampler or \
                    (username in samplers.keys() and username) or ''
                items[x]['getSampler'] = Sampler

            # These don't exist on ARs
            # XXX This should be a list of preservers...
            items[x]['getPreserver'] = ''
            items[x]['getDatePreserved'] = ''

            # inline edits for Preserver and Date Preserved
            checkPermission = self.context.portal_membership.checkPermission
            if checkPermission(PreserveSample, obj):
                items[x]['required'] = ['getPreserver', 'getDatePreserved']
                items[x]['allow_edit'] = ['getPreserver', 'getDatePreserved']
                preservers = getUsers(obj,
                                      ['Preserver', 'LabManager', 'Manager'])
                username = member.getUserName()
                users = [({
                    'ResultValue': u,
                    'ResultText': preservers.getValue(u)
                }) for u in preservers]
                items[x]['choices'] = {'getPreserver': users}
                preserver = username in preservers.keys() and username or ''
                items[x]['getPreserver'] = preserver
                items[x]['getDatePreserved'] = self.ulocalized_time(
                    DateTime(), long_format=1)
                items[x]['class']['getPreserver'] = 'provisional'
                items[x]['class']['getDatePreserved'] = 'provisional'

            # Submitting user may not verify results
            if items[x]['review_state'] == 'to_be_verified' and \
               not checkPermission(VerifyOwnResults, obj):
                self_submitted = False
                try:
                    review_history = list(
                        workflow.getInfoFor(obj, 'review_history'))
                    review_history.reverse()
                    for event in review_history:
                        if event.get('action') == 'submit':
                            if event.get('actor') == member.getId():
                                self_submitted = True
                            break
                    if self_submitted:
                        items[x]['after']['state_title'] = \
                             "<img src='++resource++bika.lims.images/submitted-by-current-user.png' title='%s'/>" % \
                             t(_("Cannot verify: Submitted by current user"))
                except Exception:
                    pass

        # Hide Preservation/Sampling workflow actions if the edit columns
        # are not displayed.
        toggle_cols = self.get_toggle_cols()
        new_states = []
        for i, state in enumerate(self.review_states):
            if state['id'] == self.review_state:
                if 'getSampler' not in toggle_cols \
                   or 'getDateSampled' not in toggle_cols:
                    if 'hide_transitions' in state:
                        state['hide_transitions'].append('sample')
                    else:
                        state['hide_transitions'] = [
                            'sample',
                        ]
                if 'getPreserver' not in toggle_cols \
                   or 'getDatePreserved' not in toggle_cols:
                    if 'hide_transitions' in state:
                        state['hide_transitions'].append('preserve')
                    else:
                        state['hide_transitions'] = [
                            'preserve',
                        ]
            new_states.append(state)
        self.review_states = new_states

        return items
Пример #19
0
    def export_file(self, info):
        plone_view = self.restrictedTraverse('@@plone')

        """ create the output file """
        delimiter = ','

        # make filename unique
        now = DateTime()
        filename = 'BikaResults%s.csv' % (now.strftime('%Y%m%d-%H%M%S'))

        if self.bika_setup.getARAttachmentOption() == 'n':
            allow_ar_attach = False
        else:
            allow_ar_attach = True

        if self.bika_setup.getAnalysisAttachmentOption() == 'n':
            allow_analysis_attach = False
        else:
            allow_analysis_attach = True

        # group the analyses
        analysisrequests = info['analysis_requests']
        ars = {}
        services = {}
        categories = {}
        dry_matter = 0
        for ar in analysisrequests:
            ar_id = ar.getId()
            ars[ar_id] = {}
            ars[ar_id]['Analyses'] = {}
            ars[ar_id]['Price'] = 0
            ars[ar_id]['Count'] = 0
            if ar.getReportDryMatter():
                dry_matter = 1
                ars[ar_id]['DM'] = True
            else:
                ars[ar_id]['DM'] = False


            analyses = {}
            # extract the list of analyses in this batch
            for analysis in ar.getPublishedAnalyses():
                ars[ar_id]['Price'] += analysis.getPrice()
                ars[ar_id]['Count'] += 1
                service = analysis.Title()
                analyses[service] = {}
                analyses[service]['AsIs'] = analysis.getResult()
                analyses[service]['DM'] = analysis.getResultDM() or None
                analyses[service]['attach'] = analysis.getAttachment() or []
                if not services.has_key(service):
                    service_obj = analysis.getService()
                    category = service_obj.getCategoryTitle()
                    category_uid = service_obj.getCategoryUID()

                    if not categories.has_key(category):
                        categories[category] = []
                    categories[category].append(service)
                    services[service] = {}
                    services[service]['unit'] = service_obj.getUnit()
                    services[service]['DM'] = service_obj.getReportDryMatter()
                    services[service]['DMOn'] = False
                    if allow_analysis_attach:
                        if service_obj.getAttachmentOption() == 'n':
                            services[service]['attach'] = False
                        else:
                            services[service]['attach'] = True
                if services[service]['DM'] == True \
                and ar.getReportDryMatter():
                    services[service]['DMOn'] = True

            ars[ar_id]['Analyses'] = analyses

        # sort by category and title
        c_array = categories.keys()
        c_array.sort(lambda x, y:cmp(x.lower(), y.lower()))

        client = analysisrequests[0].aq_parent
        client_id = client.getClientID()
        client_name = client.Title()

        contact = info['contact']
        contact_id = contact.getUsername()
        contact_name = contact.getFullname()

        rows = []

        # header labels
        header = ['Header', 'Import/Export', 'Filename', 'Client', \
                  'Client ID', 'Contact', 'Contact ID', 'CC Recipients', 'CCEmails']
        rows.append(header)

        # header values
        cc_contacts = [cc.getUsername() for cc in ar.getCCContact()]
        ccs = ', '.join(cc_contacts)
        header = ['Header Data', 'Export', filename, client_name, \
                  client_id, contact_name, contact_id, ccs, ar.getCCEmails(), \
                  '']
        rows.append(header)

        # category headers
        s_array = []
        header = ['', '', '', '', '', '', '', '', '', '', '']
        for cat_name in c_array:
            service_array = categories[cat_name]
            service_array.sort(lambda x, y:cmp(x.lower(), y.lower()))
            for service_name in service_array:
                header.append(cat_name)
                if services[service_name]['DMOn']:
                    header.append('')
                if services[service_name]['attach']:
                    header.append('')
            s_array.extend(service_array)
        rows.append(header)

        # column headers
        header = ['Samples', 'Order ID', 'Client Reference', 'Client SID', 'Sample Type', \
                  'Sample Point', 'Sampling Date', 'Bika Sample ID', \
                  'Bika AR ID', 'Date Received', 'Date Published']

        for service_name in s_array:
            if services[service_name]['unit']:
                analysis_service = '%s (%s)' % (service_name, services[service_name]['unit'])
            else:
                analysis_service = service_name
            if services[service_name]['DMOn']:
                analysis_service = '%s [As Fed]' % (analysis_service)
            header.append(analysis_service)
            if services[service_name]['DMOn']:
                analysis_dm = '%s [Dry]' % (service_name)
                header.append(analysis_dm)
            if services[service_name]['attach']:
                header.append('Attachments')
        count_cell = len(header)
        header.append('Total number of analyses')
        header.append('Price excl VAT')
        if allow_ar_attach:
            header.append('Attachments')


        rows.append(header)


        # detail lines
        total_count = 0
        total_price = 0
        count = 1
        for ar in analysisrequests:
            sample_num = 'Sample %s' % count
            ar_id = ar.getId()
            sample = ar.getSample()
            sample_id = sample.getId()
            sampletype = sample.getSampleType().Title()
            samplepoint = sample.getSamplePoint() and sample.getSamplePoint().Title() or ''
            datereceived = plone_view.toLocalizedTime(ar.getDateReceived(), \
                           long_format = 1)
            datepublished = plone_view.toLocalizedTime(ar.getDatePublished(), \
                           long_format = 1)
            if sample.getDateSampled():
                datesampled = plone_view.toLocalizedTime(sample.getDateSampled(), long_format = 1)
            else:
                datesampled = None

            # create detail line
            detail = [sample_num, ar.getClientOrderNumber(), \
                      sample.getClientReference(), sample.getClientSampleID(), sampletype, \
                      samplepoint, datesampled, sample_id, ar_id, \
                      datereceived, datepublished]

            for service_name in s_array:
                if ars[ar_id]['Analyses'].has_key(service_name):
                    detail.append(ars[ar_id]['Analyses'][service_name]['AsIs'])
                    if services[service_name]['DMOn']:
                        detail.append(ars[ar_id]['Analyses'][service_name]['DM'])
                    if allow_analysis_attach:
                        if services[service_name]['attach'] == True:
                            attachments = ''
                            for attach in ars[ar_id]['Analyses'][service_name]['attach']:
                                file = attach.getAttachmentFile()
                                fname = getattr(file, 'filename')
                                attachments += fname
                            detail.append(attachments)
                else:
                    detail.append(' ')
                    if services[service_name]['DMOn']:
                        detail.append(' ')
                    if services[service_name]['attach'] == True:
                        detail.append(' ')

            for i in range(len(detail), count_cell):
                detail.append('')
            detail.append(ars[ar_id]['Count'])
            detail.append(ars[ar_id]['Price'])
            total_count += ars[ar_id]['Count']
            total_price += ars[ar_id]['Price']

            if allow_ar_attach:
                attachments = ''
                for attach in ar.getAttachment():
                    file = attach.getAttachmentFile()
                    fname = getattr(file, 'filename')
                    if attachments:
                        attachments += ', '
                    attachments += fname
                detail.append(attachments)

            rows.append(detail)
            count += 1

        detail = []
        for i in range(count_cell - 1):
            detail.append('')
        detail.append('Total')
        detail.append(total_count)
        detail.append(total_price)
        rows.append(detail)

        #convert lists to csv string
        ramdisk = StringIO()
        writer = csv.writer(ramdisk, delimiter = delimiter, \
                quoting = csv.QUOTE_NONNUMERIC)
        assert(writer)

        writer.writerows(rows)
        result = ramdisk.getvalue()
        ramdisk.close()

        file_data = {}
        file_data['file'] = result
        file_data['file_name'] = filename
        return file_data
Пример #20
0
    def import_file(self, csvfile, filename, client_id, state):
        slash = filename.rfind('\\')
        full_name = filename[slash + 1:]
        ext = full_name.rfind('.')
        if ext == -1:
            actual_name = full_name
        else:
            actual_name = full_name[:ext]
        log = []
        r = self.portal_catalog(portal_type = 'Client', id = client_id)
        if len(r) == 0:
            log.append('   Could not find Client %s' % client_id)
            return '\n'.join(log)
        client = r[0].getObject()
        workflow = getToolByName(self, 'portal_workflow')
        updateable_states = ['sample_received', 'assigned']
        reader = csv.reader(csvfile)
        samples = []
        sample_headers = None
        batch_headers = None
        row_count = 0
        sample_count = 0
        batch_remarks = []

        for row in reader:
            row_count = row_count + 1
            if not row: continue
            # a new batch starts
            if row_count == 1:
                if row[0] == 'Header':
                    continue
                else:
                    msg = '%s invalid batch header' % row
#                    transaction_note(msg)
                    return state.set(status = 'failure', portal_status_message = msg)
            if row_count == 2:
                msg = None
                if row[1] != 'Import':
                    msg = 'Invalid batch header - Import required in cell B2'
#                    transaction_note(msg)
                    return state.set(status = 'failure', portal_status_message = msg)
                full_name = row[2]
                ext = full_name.rfind('.')
                if ext == -1:
                    entered_name = full_name
                else:
                    entered_name = full_name[:ext]
                if entered_name.lower() != actual_name.lower():
                    msg = 'Actual filename, %s, does not match entered filename, %s' % (actual_name, row[2])
#                    transaction_note(msg)
                    return state.set(status = 'failure', portal_status_message = msg)

                batch_headers = row[0:]
                arimport_id = self.generateUniqueId('ARImport')
                client.invokeFactory(id = arimport_id, type_name = 'ARImport')
                arimport = client._getOb(arimport_id)
                arimport.processForm()
                continue
            if row_count == 3:
                sample_count = sample_count + 1
                sample_headers = row[9:]
                continue
            if row_count == 4:
                continue
            if row_count == 5:
                continue
            if row_count == 6:
                continue

            samples.append(row)

        pad = 8192 * ' '
        REQUEST = self.REQUEST
        REQUEST.RESPONSE.write(self.progress_bar(REQUEST = REQUEST))
        REQUEST.RESPONSE.write('<input style="display: none;" id="progressType" value="Analysis request import">')
        REQUEST.RESPONSE.write('<input style="display: none;" id="progressDone" value="Validating...">')
        REQUEST.RESPONSE.write(pad + '<input style="display: none;" id="inputTotal" value="%s">' % len(samples))

        row_count = 0
        next_id = self.generateUniqueId('ARImportItem', batch_size = len(samples))
        (prefix, next_num) = next_id.split('_')
        next_num = int(next_num)
        for sample in samples:
            row_count = row_count + 1
            REQUEST.RESPONSE.write(pad + '<input style="display: none;" name="inputProgress" value="%s">' % row_count)
            item_remarks = []
            analyses = []
            for i in range(9, len(sample)):
                if sample[i] != '1':
                    continue
                analyses.append(sample_headers[(i - 9)])
            if len(analyses) > 0:
                aritem_id = '%s_%s' % (prefix, (str(next_num)))
                arimport.invokeFactory(id = aritem_id, type_name = 'ARImportItem')
                aritem = arimport._getOb(aritem_id)
                aritem.processForm()
                aritem.edit(
                    SampleName = sample[0],
                    ClientRef = sample[1],
                    ClientSid = sample[2],
                    SampleDate = sample[3],
                    SampleType = sample[4],
                    PickingSlip = sample[5],
                    ReportDryMatter = sample[6],
                    )

                aritem.setRemarks(item_remarks)
                aritem.setAnalyses(analyses)
                next_num += 1

        arimport.edit(
            ImportOption = 'c',
            FileName = batch_headers[2],
            ClientTitle = batch_headers[3],
            ClientID = batch_headers[4],
            ContactID = batch_headers[5],
            CCContactID = batch_headers[6],
            CCEmails = batch_headers[7],
            OrderID = batch_headers[8],
            QuoteID = batch_headers[9],
            SamplePoint = batch_headers[10],
            Remarks = batch_remarks,
            Analyses = sample_headers,
            DateImported = DateTime(),
            )

        valid = self.validate_arimport_c(arimport)
        REQUEST.RESPONSE.write('<script>document.location.href="%s/client_arimports?portal_status_message=%s%%20imported"</script>' % (client.absolute_url(), arimport_id))
Пример #21
0
 def workflow_script_dispatch(self):
     """ dispatch order """
     self.setDateDispatched(DateTime())
Пример #22
0
 def current_date(self):
     return DateTime()
Пример #23
0
    def validateIt(self):
        rc = getToolByName(self, 'reference_catalog')
        pc = getToolByName(self, 'portal_catalog')
        bsc = getToolByName(self, 'bika_setup_catalog')
        client = self.aq_parent
        batch_remarks = []
        valid_batch = True
        uid = self.UID()
        batches = pc({
                    'portal_type': 'ARImport',
                    'path': {'query': '/'.join(client.getPhysicalPath())},
                    })
        for brain in batches:
            if brain.UID == uid:
                continue
            batch = brain.getObject()
            if batch.getOrderID() != self.getOrderID():
                continue
            if batch.getStatus():
                # then a previous valid batch exists
                batch_remarks.append(
                    '\n' + 'Duplicate order %s' % self.getOrderID())
                valid_batch = False
                break

        # validate client
        if self.getClientID() != client.getClientID():
            batch_remarks.append(
                '\n' + 'Client ID should be %s' %client.getClientID())
            valid_batch = False

        # validate contact
        contact_found = False
        cc_contact_found = False

        if self.getContact():
            contact_found = True
        else:
            contactid = self.getContactID()
            for contact in client.objectValues('Contact'):
                if contact.getUsername() == contactid:
                    self.edit(Contact=contact)
                    contact_found = True
                    #break

        if self.getCCContact():
            cc_contact_found = True
        else:
            if self.getCCContactID():
                cccontact_uname = self.getCCContactID()
                for contact in client.objectValues('Contact'):
                    if contact.getUsername() == cccontact_uname:
                        self.edit(CCContact=contact)
                        cc_contact_found = True
                        break

        cccontact_uname = self.getCCContactID()

        if not contact_found:
            batch_remarks.append('\n' + 'Contact invalid')
            valid_batch = False
        if cccontact_uname != None and \
           cccontact_uname != '':
            if not cc_contact_found:
                batch_remarks.append('\n' + 'CC contact invalid')
                valid_batch = False

        # validate sample point
        samplepoint = self.getSamplePoint()
        if samplepoint != None:
            points = pc(portal_type='SamplePoint',
                Title=samplepoint)

        sampletypes = \
            [p.Title for p in pc(portal_type="SampleType")]
        containertypes = \
            [p.Title for p in bsc(portal_type="ContainerType")]
        service_keys = []
        dependant_services = {}

        services = bsc(portal_type = "AnalysisService",
                       inactive_state = 'active')
        for brain in services:
            service = brain.getObject()
            service_keys.append(service.getKeyword())
            calc = service.getCalculation()
            if calc:
                dependencies = calc.getDependentServices()
                if dependencies:
                    dependant_services[service.getKeyword()] = dependencies
        aritems = self.objectValues('ARImportItem')
        for aritem in aritems:
            item_remarks = []
            valid_item = True
            #validate sample type
            if aritem.getSampleType() not in sampletypes:
                batch_remarks.append('\n%s: Sample type %s invalid' %(
                    aritem.getSampleName(), aritem.getSampleType()))
                item_remarks.append(
                    '\nSample type %s invalid' %(aritem.getSampleType()))
                valid_item = False
            #validate container type
            if aritem.getContainerType() not in containertypes:
                batch_remarks.append(
                    '\n%s: Container type %s invalid' %(
                        aritem.getSampleName(), aritem.getContainerType()))
                item_remarks.append(
                    '\nContainer type %s invalid' %(aritem.getContainerType()))
                valid_item = False
            #validate Sample Date
            try:
                date_items = aritem.getSampleDate().split('/')
                test_date = DateTime(int(date_items[2]), int(date_items[1]), int(date_items[0]))
            except:
                valid_item = False
                batch_remarks.append('\n' + '%s: Sample date %s invalid' %(aritem.getSampleName(), aritem.getSampleDate()))
                item_remarks.append('\n' + 'Sample date %s invalid' %(aritem.getSampleDate()))

            #validate Priority
            invalid_priority = False
            try:
                priorities = self.bika_setup_catalog(
                    portal_type = 'ARPriority',
                    sortable_title = aritem.Priority.lower(),
                    )
                if len(priorities) < 1:
                    invalid_priority = True
            except:
                invalid_priority = True

            if invalid_priority:
                valid_item = False
                batch_remarks.append('\n' + '%s: Priority %s invalid' % (
                    aritem.getSampleName(), aritem.Priority))
                item_remarks.append('\n' + 'Priority %s invalid' % (
                    aritem.Priority))

            #Validate option specific fields
            if self.getImportOption() == 'c':
                analyses = aritem.getAnalyses()
                for analysis in analyses:
                    if analysis not in service_keys:
                        batch_remarks.append('\n' + '%s: Analysis %s invalid' %(aritem.getSampleName(), analysis))
                        item_remarks.append('\n' + 'Analysis %s invalid' %(analysis))
                        valid_item = False
                    # validate analysis dependancies
                    reqd_analyses = []
                    if dependant_services.has_key(analysis):
                        reqd_analyses = \
                            [s.getKeyword() for s in dependant_services[analysis]]
                    reqd_titles = ''
                    for reqd in reqd_analyses:
                        if (reqd not in analyses):
                            if reqd_titles != '':
                                reqd_titles += ', '
                            reqd_titles += reqd
                    if reqd_titles != '':
                        valid_item = False
                        batch_remarks.append('\n' + '%s: %s needs %s' \
                            %(aritem.getSampleName(), analysis, reqd_titles))
                        item_remarks.append('\n' + '%s needs %s' \
                            %(analysis, reqd_titles))

                # validate analysisrequest dependancies
                if aritem.getReportDryMatter().lower() == 'y':
                    required = self.get_analysisrequest_dependancies('DryMatter')
                    reqd_analyses = required['keys']
                    reqd_titles = ''
                    for reqd in reqd_analyses:
                        if reqd not in analyses:
                            if reqd_titles != '':
                                reqd_titles += ', '
                            reqd_titles += reqd

                    if reqd_titles != '':
                        valid_item = False
                        batch_remarks.append('\n' + '%s: Report as Dry Matter needs %s' \
                            %(aritem.getSampleName(), reqd_titles))
                        item_remarks.append('\n' + 'Report as Dry Matter needs %s' \
                            %(reqd_titles))
            elif self.getImportOption() == 'p':
                analyses = aritem.getAnalysisProfile()
                if len(analyses) == 0:
                    valid_item = False
                    item_remarks.append('\n%s: No Profile provided' \
                        % aritem.getSampleName())
                    batch_remarks.append('\n%s: No Profile provided' \
                        % aritem.getSampleName())
                elif len(analyses) > 1:
                    valid_item = False
                    item_remarks.append('\n%s: Only one Profile allowed' \
                        % aritem.getSampleName())
                    batch_remarks.append('\n%s: Only one Profile allowed' \
                        % aritem.getSampleName())
                else:
                    if not self._findProfileKey(analyses[0]):
                        valid_item = False
                        item_remarks.append('\n%s: unknown Profile %s' \
                            % (aritem.getSampleName(), analyses[0]))
                        batch_remarks.append('\n%s: unknown Profile %s' \
                            % (aritem.getSampleName(), analyses[0]))

            aritem.setRemarks(item_remarks)
            #print item_remarks
            if not valid_item:
                valid_batch = False
        if self.getNumberSamples() != len(aritems):
            valid_batch = False
            batch_remarks.append('\nNumber of samples specified (%s) does no match number listed (%s)' % (self.getNumberSamples(), len(aritems)))
        self.edit(
            Remarks=batch_remarks,
            Status=valid_batch)

        #print batch_remarks
        return valid_batch
Пример #24
0
    def _submit_arimport_c(self):
        """ load the classic import layout """

        ars = []
        samples = []
        valid_batch = True
        client = self.aq_parent
        contact_obj = None
        cc_contact_obj = None

        # validate contact
        for contact in client.objectValues('Contact'):
            if contact.getUsername() == self.getContactID():
                contact_obj = contact
            if self.getCCContactID() == None:
                if contact_obj != None:
                    break
            else:
                if contact.getUsername() == self.getCCContactID():
                    cc_contact_obj = contact
                    if contact_obj != None:
                        break

        if contact_obj == None:
            valid_batch = False

        # get Keyword to ServiceId Map
        services = {}
        for service in self.bika_setup_catalog(
                portal_type = 'AnalysisService'):
            obj = service.getObject()
            keyword = obj.getKeyword()
            if keyword:
                services[keyword] = '%s:%s' % (obj.UID(), obj.getPrice())

        samplepoints = self.bika_setup_catalog(
            portal_type = 'SamplePoint',
            Title = self.getSamplePoint())
        if not samplepoints:
            valid_batch = False

        aritems = self.objectValues('ARImportItem')
        request = self.REQUEST
        title = 'Submitting AR Import'
        bar = ProgressBar(
                self, request, title, description='')
        event.notify(InitialiseProgressBar(bar))

        SamplingWorkflowEnabled = \
            self.bika_setup.getSamplingWorkflowEnabled()
        row_count = 0
        item_count =len(aritems)
        prefix = 'Sample'
        for aritem in aritems:
            row_count += 1
            # set up analyses
            analyses = []
            for analysis in aritem.getAnalyses(full_objects=True):
                if services.has_key(analysis):
                    analyses.append(services[analysis])
                else:
                    valid_batch = False

            sampletypes = self.portal_catalog(
                portal_type = 'SampleType',
                sortable_title = aritem.getSampleType().lower(),
                )
            if not sampletypes:
                valid_batch = False
                return
            sampletypeuid = sampletypes[0].getObject().UID()
            if aritem.getSampleDate():
                date_items = aritem.getSampleDate().split('/')
                sample_date = DateTime(
                    int(date_items[2]), int(date_items[1]), int(date_items[0]))
            else:
                sample_date = None

            sample_id = '%s-%s' % (prefix, tmpID())
            sample = _createObjectByType("Sample", client, sample_id)
            sample.unmarkCreationFlag()
            sample.edit(
                SampleID = sample_id,
                ClientReference = aritem.getClientRef(),
                ClientSampleID = aritem.getClientSid(),
                SampleType = aritem.getSampleType(),
                DateSampled = sample_date,
                SamplingDate = sample_date,
                DateReceived = DateTime(),
                )
            sample._renameAfterCreation()
            #sp_id = client.invokeFactory('SamplePoint', id=tmpID())
            #sp = client[sp_id]
            #sp.edit(title=self.getSamplePoint())
            sample.setSamplePoint(self.getSamplePoint())
            sample.setSampleID(sample.getId())
            event.notify(ObjectInitializedEvent(sample))
            sample.at_post_create_script()
            sample_uid = sample.UID()
            samples.append(sample_id)
            aritem.setSample(sample_uid)

            priorities = self.bika_setup_catalog(
                portal_type = 'ARPriority',
                sortable_title = aritem.Priority.lower(),
                )
            if len(priorities) < 1:
                logger.warning(
                    'Invalid Priority: validation should have prevented this')

            #Create AR
            ar_id = tmpID()
            ar = _createObjectByType("AnalysisRequest", client, ar_id)
            if aritem.getReportDryMatter().lower() == 'y':
                report_dry_matter = True
            else:
                report_dry_matter = False
            ar.unmarkCreationFlag()
            ar.edit(
                RequestID = ar_id,
                Contact = self.getContact(),
                CCContact = self.getCCContact(),
                CCEmails = self.getCCEmailsInvoice(),
                ClientOrderNumber = self.getOrderID(),
                ReportDryMatter = report_dry_matter,
                Analyses = analyses,
                Priority = priorities[0].getObject(),
                )
            ar.setSample(sample_uid)
            sample = ar.getSample()
            ar.setSampleType(sampletypeuid)
            ar_uid = ar.UID()
            aritem.setAnalysisRequest(ar_uid)
            ars.append(ar_id)
            ar._renameAfterCreation()

            self._add_services_to_ar(ar, analyses)

            progress_index = float(row_count)/float(item_count)*100.0
            progress = ProgressState(request, progress_index)
            event.notify(UpdateProgressEvent(progress))
            #TODO REmove for production - just to look pretty
            #time.sleep(1)
        self.setDateApplied(DateTime())
        self.reindexObject()
Пример #25
0
 def workflow_transition_expire(self):
     self.setDateExpired(DateTime())
     self.reindexObject(idxs=[
         "review_state",
         "getDateExpired",
     ])
Пример #26
0
 def workflow_script_expire(self):
     """ expire sample """
     self.setDateExpired(DateTime())
     self.reindexObject()
Пример #27
0
 def workflow_script_dispose(self):
     """ dispose sample """
     self.setDateDisposed(DateTime())
     self.reindexObject()
Пример #28
0
 def current_date(self):
     """ return current date """
     return DateTime()
Пример #29
0
    def import_file_s(self, csvfile, client_id, state):

        log = []
        r = self.portal_catalog(portal_type = 'Client', id = client_id)
        if len(r) == 0:
            log.append('   Could not find Client %s' % client_id)
            return '\n'.join(log)
        client = r[0].getObject()
        workflow = getToolByName(self, 'portal_workflow')
        reader = csv.reader(csvfile)
        samples = []
        sample_headers = None
        batch_headers = None
        row_count = 0
        sample_count = 0
        batch_remarks = []
        in_footers = False
        last_rows = False
        temp_row = False
        temperature = ''

        for row in reader:
            row_count = row_count + 1
            if not row: continue

            if last_rows:
                continue
            if in_footers:
                continue
                if temp_row:
                    temperature = row[8]
                    temp_row = False
                    last_rows = True
                if row[8] == 'Temperature on Arrival:':
                    temp_row = True
                    continue

            if row_count > 11:
                if row[0] == '':
                    in_footers = True

            if row_count == 5:
                client_orderid = row[10]
                continue

            if row_count < 7:
                continue

            if row_count == 7:
                if row[0] != 'Client Name':
                    log.append('  Invalid file')
                    return '\n'.join(log)
                batch_headers = row[0:]
                arimport_id = self.generateUniqueId('ARImport')
                client.invokeFactory(id = arimport_id, type_name = 'ARImport')
                arimport = client._getOb(arimport_id)
                clientname = row[1]
                clientphone = row[5]
                continue

            if row_count == 8:
                clientaddress = row[1]
                clientfax = row[5]
                continue
            if row_count == 9:
                clientcity = row[1]
                clientemail = row[5]
                continue
            if row_count == 10:
                contact = row[1]
                ccemail = row[5]
                continue
            if row_count == 11:
                continue


            if not in_footers:
                samples.append(row)

        pad = 8192 * ' '
        REQUEST = self.REQUEST
        REQUEST.RESPONSE.write(self.progress_bar(REQUEST = REQUEST))
        REQUEST.RESPONSE.write('<input style="display: none;" id="progressType" value="Analysis request import">')
        REQUEST.RESPONSE.write('<input style="display: none;" id="progressDone" value="Validating...">')
        REQUEST.RESPONSE.write(pad + '<input style="display: none;" id="inputTotal" value="%s">' % len(samples))

        row_count = 0
        for sample in samples:
            row_count = row_count + 1
            REQUEST.RESPONSE.write(pad + '<input style="display: none;" name="inputProgress" value="%s">' % row_count)

            profiles = []
            for profile in sample[6:8]:
                if profile != None:
                    profiles.append(profile.strip())

            analyses = []
            for analysis in sample[8:11]:
                if analysis != None:
                    analyses.append(analysis.strip())

            aritem_id = self.generateUniqueId('ARImportItem')
            arimport.invokeFactory(id = aritem_id, type_name = 'ARImportItem')
            aritem = arimport._getOb(aritem_id)
            aritem.edit(
                ClientRef = sample[0],
                ClientRemarks = sample[1],
                ClientSid = sample[2],
                SampleDate = sample[3],
                SampleType = sample[4],
                NoContainers = sample[5],
                AnalysisProfile = profiles,
                Analyses = analyses,
                )
            aritem.processForm()


        arimport.edit(
            ImportOption = 's',
            ClientTitle = clientname,
            ClientID = client_id,
            ClientPhone = clientphone,
            ClientFax = clientfax,
            ClientAddress = clientaddress,
            ClientCity = clientcity,
            ClientEmail = clientemail,
            ContactName = contact,
            CCEmails = ccemail,
            Remarks = batch_remarks,
            OrderID = client_orderid,
            Temperature = temperature,
            DateImported = DateTime(),
            )
        arimport.processForm()

        valid = self.validate_arimport_s(arimport)
        REQUEST.RESPONSE.write('<script>document.location.href="%s/client_arimports?portal_status_message=%s%%20imported"</script>' % (client.absolute_url(), arimport_id))
Пример #30
0
    def _submit_arimport_p(self):
        """ load the profiles import layout """

        ars = []
        samples = []
        valid_batch = False
        client = self.aq_parent
        contact_obj = None
        cc_contact_obj = None

        # validate contact
        for contact in client.objectValues('Contact'):
            if contact.getUsername() == self.getContactID():
                contact_obj = contact
            if self.getCCContactID() == None:
                if contact_obj != None:
                    break
            else:
                if contact.getUsername() == self.getCCContactID():
                    cc_contact_obj = contact
                    if contact_obj != None:
                        break

        if contact_obj == None:
            valid_batch = False

        # get Keyword to ServiceId Map
        services = {}
        service_uids = {}

        for service in self.bika_setup_catalog(
                portal_type = 'AnalysisService'):
            obj = service.getObject()
            keyword = obj.getKeyword()
            if keyword:
                services[keyword] = '%s:%s' % (obj.UID(), obj.getPrice())
            service_uids[obj.UID()] = '%s:%s' % (obj.UID(), obj.getPrice())

        samplepoints = self.bika_setup_catalog(
            portal_type = 'SamplePoint',
            Title = self.getSamplePoint())
        if not samplepoints:
            valid_batch = False

        profiles = {}
        aritems = self.objectValues('ARImportItem')

        request = self.REQUEST
        title = 'Submitting AR Import'
        bar = ProgressBar(
                self, request, title, description='')
        event.notify(InitialiseProgressBar(bar))

        row_count = 0
        item_count = len(aritems)
        prefix = 'Sample'
        for aritem in aritems:
            # set up analyses
            ar_profile = None
            analyses = []
            row_count += 1

            for profilekey in aritem.getAnalysisProfile():
                this_profile = None
                if not profiles.has_key(profilekey):
                    profiles[profilekey] = []
                    # there is no profilekey index
                    l_prox = self._findProfileKey(profilekey)
                    if l_prox:
                        profiles[profilekey] = \
                                [s.UID() for s in l_prox.getService()]
                        this_profile = l_prox
                    else:
                        #TODO This will not find it!!
                        # there is no profilekey index
                        c_prox = self.bika_setup_catalog(
                                    portal_type = 'AnalysisProfile',
                                    getClientUID = client.UID(),
                                    getProfileKey = profilekey)
                        if c_prox:
                            obj = c_prox[0].getObject()
                            profiles[profilekey] = \
                                    [s.UID() for s in obj.getService()]
                            this_profile = obj

                if ar_profile is None:
                    ar_profile = obj
                else:
                    ar_profile = None
                profile = profiles[profilekey]
                for analysis in profile:
                    if not service_uids.has_key(analysis):
                        service = tool.lookupObject(analysis)
                        keyword = service.getKeyword()
                        service_uids[obj.UID()] = '%s:%s' % (obj.UID(), obj.getPrice())
                        if keyword:
                            services[keyword] = '%s:%s' % (obj.UID(), obj.getPrice())

                    if service_uids.has_key(analysis):
                        if not service_uids[analysis] in analyses:
                            analyses.append(service_uids[analysis])
                    else:
                        valid_batch = False

            for analysis in aritem.getAnalyses(full_objects=True):
                if not services.has_key(analysis):
                    for service in self.bika_setup_catalog(
                            portal_type = 'AnalysisService',
                            getKeyword = analysis):
                        obj = service.getObject()
                        services[analysis] = '%s:%s' % (obj.UID(), obj.getPrice())
                        service_uids[obj.UID()] = '%s:%s' % (obj.UID(), obj.getPrice())

                if services.has_key(analysis):
                    analyses.append(services[analysis])
                else:
                    valid_batch = False

            sampletypes = self.portal_catalog(
                portal_type = 'SampleType',
                sortable_title = aritem.getSampleType().lower(),
                )
            if not sampletypes:
                valid_batch = False
                return
            sampletypeuid = sampletypes[0].getObject().UID()

            if aritem.getSampleDate():
                date_items = aritem.getSampleDate().split('/')
                sample_date = DateTime(
                    int(date_items[2]), int(date_items[0]), int(date_items[1]))
            else:
                sample_date = None

            sample_id = '%s-%s' % (prefix, tmpID())
            sample = _createObjectByType("Sample", client, sample_id)
            sample.unmarkCreationFlag()
            sample.edit(
                SampleID = sample_id,
                ClientReference = aritem.getClientRef(),
                ClientSampleID = aritem.getClientSid(),
                SampleType = aritem.getSampleType(),
                DateSampled = sample_date,
                SamplingDate = sample_date,
                DateReceived = DateTime(),
                Remarks = aritem.getClientRemarks(),
                )
            sample._renameAfterCreation()
            sample.setSamplePoint(self.getSamplePoint())
            sample.setSampleID(sample.getId())
            event.notify(ObjectInitializedEvent(sample))
            sample.at_post_create_script()
            sample_uid = sample.UID()
            samples.append(sample_id)
            aritem.setSample(sample_uid)

            priorities = self.bika_setup_catalog(
                portal_type = 'ARPriority',
                sortable_title = aritem.Priority.lower(),
                )
            if len(priorities) < 1:
                logger.warning(
                    'Invalid Priority: validation should have prevented this')
                priority = ''
            else:
                priority = priorities[0].getObject()

            ar_id = tmpID()
            ar = _createObjectByType("AnalysisRequest", client, ar_id)
            report_dry_matter = False

            ar.unmarkCreationFlag()
            ar.edit(
                RequestID = ar_id,
                Contact = self.getContact(),
                CCContact = self.getCCContact(),
                CCEmails = self.getCCEmailsInvoice(),
                ClientOrderNumber = self.getOrderID(),
                ReportDryMatter = report_dry_matter,
                Profile = ar_profile,
                Analyses = analyses,
                Remarks = aritem.getClientRemarks(),
                Priority = priority,
                )
            ar.setSample(sample_uid)
            sample = ar.getSample()
            ar.setSampleType(sampletypeuid)
            ar_uid = ar.UID()
            aritem.setAnalysisRequest(ar_uid)
            ars.append(ar_id)
            ar._renameAfterCreation()
            progress_index = float(row_count)/float(item_count)*100.0
            progress = ProgressState(request, progress_index)
            event.notify(UpdateProgressEvent(progress))
            self._add_services_to_ar(ar, analyses)

        self.setDateApplied(DateTime())
        self.reindexObject()
Пример #31
0
    def _ar_data(self, ar, excludearuids=[]):
        """ Creates an ar dict, accessible from the view and from each
            specific template.
        """
        data = {
            'obj':
            ar,
            'id':
            ar.getRequestID(),
            'client_order_num':
            ar.getClientOrderNumber(),
            'client_reference':
            ar.getClientReference(),
            'client_sampleid':
            ar.getClientSampleID(),
            'adhoc':
            ar.getAdHoc(),
            'composite':
            ar.getComposite(),
            'report_drymatter':
            ar.getReportDryMatter(),
            'invoice_exclude':
            ar.getInvoiceExclude(),
            'date_received':
            self.ulocalized_time(ar.getDateReceived(), long_format=1),
            'remarks':
            ar.getRemarks(),
            'member_discount':
            ar.getMemberDiscount(),
            'date_sampled':
            self.ulocalized_time(ar.getDateSampled(), long_format=1),
            'date_published':
            self.ulocalized_time(DateTime(), long_format=1),
            'invoiced':
            ar.getInvoiced(),
            'late':
            ar.getLate(),
            'subtotal':
            ar.getSubtotal(),
            'vat_amount':
            ar.getVATAmount(),
            'totalprice':
            ar.getTotalPrice(),
            'invalid':
            ar.isInvalid(),
            'url':
            ar.absolute_url(),
            'remarks':
            to_utf8(ar.getRemarks()),
            'footer':
            to_utf8(self.context.bika_setup.getResultFooter()),
            'prepublish':
            False,
            'child_analysisrequest':
            None,
            'parent_analysisrequest':
            None,
            'resultsinterpretation':
            ar.getResultsInterpretation()
        }

        # Sub-objects
        excludearuids.append(ar.UID())
        puid = ar.getRawParentAnalysisRequest()
        if puid and puid not in excludearuids:
            data['parent_analysisrequest'] = self._ar_data(
                ar.getParentAnalysisRequest(), excludearuids)
        cuid = ar.getRawChildAnalysisRequest()
        if cuid and cuid not in excludearuids:
            data['child_analysisrequest'] = self._ar_data(
                ar.getChildAnalysisRequest(), excludearuids)

        wf = getToolByName(ar, 'portal_workflow')
        allowed_states = ['verified', 'published']
        data['prepublish'] = wf.getInfoFor(
            ar, 'review_state') not in allowed_states

        data['contact'] = self._contact_data(ar)
        data['client'] = self._client_data(ar)
        data['sample'] = self._sample_data(ar)
        data['batch'] = self._batch_data(ar)
        data['specifications'] = self._specs_data(ar)
        data['analyses'] = self._analyses_data(ar, ['verified', 'published'])
        data['qcanalyses'] = self._qcanalyses_data(ar,
                                                   ['verified', 'published'])
        data['points_of_capture'] = sorted(
            set([an['point_of_capture'] for an in data['analyses']]))
        data['categories'] = sorted(
            set([an['category'] for an in data['analyses']]))
        data['haspreviousresults'] = len([
            an['previous_results']
            for an in data['analyses'] if an['previous_results']
        ]) > 0
        data['hasblanks'] = len([
            an['reftype'] for an in data['qcanalyses'] if an['reftype'] == 'b'
        ]) > 0
        data['hascontrols'] = len([
            an['reftype'] for an in data['qcanalyses'] if an['reftype'] == 'c'
        ]) > 0
        data['hasduplicates'] = len([
            an['reftype'] for an in data['qcanalyses'] if an['reftype'] == 'd'
        ]) > 0

        # Categorize analyses
        data['categorized_analyses'] = {}
        data['department_analyses'] = {}
        for an in data['analyses']:
            poc = an['point_of_capture']
            cat = an['category']
            pocdict = data['categorized_analyses'].get(poc, {})
            catlist = pocdict.get(cat, [])
            catlist.append(an)
            pocdict[cat] = catlist
            data['categorized_analyses'][poc] = pocdict

            # Group by department too
            anobj = an['obj']
            dept = anobj.getService().getDepartment() if anobj.getService(
            ) else None
            if dept:
                dept = dept.UID()
                dep = data['department_analyses'].get(dept, {})
                dep_pocdict = dep.get(poc, {})
                dep_catlist = dep_pocdict.get(cat, [])
                dep_catlist.append(an)
                dep_pocdict[cat] = dep_catlist
                dep[poc] = dep_pocdict
                data['department_analyses'][dept] = dep

        # Categorize qcanalyses
        data['categorized_qcanalyses'] = {}
        for an in data['qcanalyses']:
            qct = an['reftype']
            poc = an['point_of_capture']
            cat = an['category']
            qcdict = data['categorized_qcanalyses'].get(qct, {})
            pocdict = qcdict.get(poc, {})
            catlist = pocdict.get(cat, [])
            catlist.append(an)
            pocdict[cat] = catlist
            qcdict[poc] = pocdict
            data['categorized_qcanalyses'][qct] = qcdict

        data['reporter'] = self._reporter_data(ar)
        data['managers'] = self._managers_data(ar)

        portal = self.context.portal_url.getPortalObject()
        data['portal'] = {'obj': portal, 'url': portal.absolute_url()}
        data['laboratory'] = self._lab_data()

        #results interpretation
        ri = {}
        if (ar.getResultsInterpretationByDepartment(None)):
            ri[''] = ar.getResultsInterpretationByDepartment(None)
        depts = ar.getDepartments()
        for dept in depts:
            ri[dept.Title()] = ar.getResultsInterpretationByDepartment(dept)
        data['resultsinterpretationdepts'] = ri

        return data
Пример #32
0
    def __call__(self):
        """Create and render selected report
        """

        # if there's an error, we return productivity.pt which requires these.
        self.selection_macros = SelectionMacrosView(self.context, self.request)
        self.additional_reports = []
        adapters = getAdapters((self.context, ), IProductivityReport)
        for name, adapter in adapters:
            report_dict = adapter(self.context, self.request)
            report_dict['id'] = name
            self.additional_reports.append(report_dict)

        report_id = self.request.get('report_id', '')
        if not report_id:
            message = _("No report specified in request")
            self.logger.error(message)
            self.context.plone_utils.addPortalMessage(message, 'error')
            return self.template()

        self.date = DateTime()
        username = self.context.portal_membership.getAuthenticatedMember().getUserName()
        self.reporter = self.user_fullname(username)
        self.reporter_email = self.user_email(username)

        # signature image
        self.reporter_signature = ""
        c = [x for x in self.bika_setup_catalog(portal_type='LabContact')
             if x.getObject().getUsername() == username]
        if c:
            sf = c[0].getObject().getSignature()
            if sf:
                self.reporter_signature = sf.absolute_url() + "/Signature"

        lab = self.context.bika_setup.laboratory
        self.laboratory = lab
        self.lab_title = lab.getName()
        self.lab_address = lab.getPrintAddress()
        self.lab_email = lab.getEmailAddress()
        self.lab_url = lab.getLabURL()

        client = logged_in_client(self.context)
        if client:
            clientuid = client.UID()
            self.client_title = client.Title()
            self.client_address = client.getPrintAddress()
        else:
            clientuid = None
            self.client_title = None
            self.client_address = None

        # Render form output

        # the report can add file names to this list; they will be deleted
        # once the PDF has been generated.  temporary plot image files, etc.
        self.request['to_remove'] = []

        if "report_module" in self.request:
            module = self.request["report_module"]
        else:
            module = "bika.lims.browser.reports.%s" % report_id
        try:
            exec ("from %s import Report" % module)
            # required during error redirect: the report must have a copy of
            # additional_reports, because it is used as a surrogate view.
            Report.additional_reports = self.additional_reports
        except ImportError:
            message = "Report %s.Report not found (shouldn't happen)" % module
            self.logger.error(message)
            self.context.plone_utils.addPortalMessage(message, 'error')
            return self.template()

        # Report must return dict with:
        # - report_title - title string for pdf/history listing
        # - report_data - rendered report
        output = Report(self.context, self.request)()

        # if CSV output is chosen, report returns None
        if not output:
            return

        if type(output) in (str, unicode, bytes):
            # remove temporary files
            for f in self.request['to_remove']:
                os.remove(f)
            return output

        # The report output gets pulled through report_frame.pt
        self.reportout = output['report_data']
        framed_output = self.frame_template()

        # this is the good part
        result = createPdf(framed_output)

        # remove temporary files
        for f in self.request['to_remove']:
            os.remove(f)

        if result:
            # Create new report object
            reportid = self.aq_parent.generateUniqueId('Report')
            report = _createObjectByType("Report", self.aq_parent, reportid)
            report.edit(Client=clientuid)
            report.processForm()

            # write pdf to report object
            report.edit(title=output['report_title'], ReportFile=result)
            report.reindexObject()

            fn = "%s - %s" % (self.date.strftime(self.date_format_short),
                              _u(output['report_title']))

            setheader = self.request.RESPONSE.setHeader
            setheader('Content-Type', 'application/pdf')
            setheader("Content-Disposition",
                      "attachment;filename=\"%s\"" % _c(fn))
            self.request.RESPONSE.write(result)

        return
Пример #33
0
    def publishFromHTML(self, aruid, results_html):
        # The AR can be published only and only if allowed
        uc = getToolByName(self.context, 'uid_catalog')
        ars = uc(UID=aruid)
        if not ars or len(ars) != 1:
            return []

        ar = ars[0].getObject()
        wf = getToolByName(ar, 'portal_workflow')
        allowed_states = ['verified', 'published']
        # Publish/Republish allowed?
        if wf.getInfoFor(ar, 'review_state') not in allowed_states:
            # Pre-publish allowed?
            if not ar.getAnalyses(review_state=allowed_states):
                return []

        # HTML written to debug file
#         debug_mode = App.config.getConfiguration().debug_mode "Commented by Yasir"
        debug_mode = True  #" Added by Yasir"
        if debug_mode:
            tmp_fn = tempfile.mktemp(suffix=".html")
            logger.debug("Writing HTML for %s to %s" % (ar.Title(), tmp_fn))
            open(tmp_fn, "wb").write(results_html)

        # Create the pdf report (will always be attached to the AR)
        # we must supply the file ourself so that createPdf leaves it alone.
        # This version replaces 'attachment' links; probably not required,
        # so it's repeated below, without these localise_images.
        # cleanup, results_html_for_pdf = self.localise_images(results_html)
        # pdf_fn = tempfile.mktemp(suffix=".pdf")
        # pdf_report = createPdf(htmlreport=results_html_for_pdf, outfile=pdf_fn)
        # for fn in cleanup:
        #     os.remove(fn)

        # Create the pdf report (will always be attached to the AR)
        # we must supply the file ourself so that createPdf leaves it alone.
        pdf_fn = tempfile.mktemp(suffix=".pdf")
        pdf_report = createPdf(htmlreport=results_html, outfile=pdf_fn)

        # PDF written to debug file
        if debug_mode:
            logger.debug("Writing PDF for %s to %s" % (ar.Title(), pdf_fn))
        else:
            os.remove(pdf_fn)

        recipients = []
        contact = ar.getContact()
        lab = ar.bika_setup.laboratory
        if pdf_report:
            if contact:
                recipients = [{
                    'UID':
                    contact.UID(),
                    'Username':
                    to_utf8(contact.getUsername()),
                    'Fullname':
                    to_utf8(contact.getFullname()),
                    'EmailAddress':
                    to_utf8(contact.getEmailAddress()),
                    'PublicationModes':
                    contact.getPublicationPreference()
                }]
            reportid = ar.generateUniqueId('ARReport')
            report = _createObjectByType("ARReport", ar, reportid)
            report.edit(AnalysisRequest=ar.UID(),
                        Pdf=pdf_report,
                        Html=results_html,
                        Recipients=recipients)
            report.unmarkCreationFlag()
            renameAfterCreation(report)

            # Set status to prepublished/published/republished
            status = wf.getInfoFor(ar, 'review_state')
            transitions = {'verified': 'publish', 'published': 'republish'}
            transition = transitions.get(status, 'prepublish')
            try:
                wf.doActionFor(ar, transition)
            except WorkflowException:
                pass

            # compose and send email.
            # The managers of the departments for which the current AR has
            # at least one AS must receive always the pdf report by email.
            # https://github.com/bikalabs/Bika-LIMS/issues/1028
            mime_msg = MIMEMultipart('related')
            mime_msg['Subject'] = self.get_mail_subject(ar)[0]
            mime_msg['From'] = formataddr(
                (encode_header(lab.getName()), lab.getEmailAddress()))
            mime_msg.preamble = 'This is a multi-part MIME message.'
            msg_txt = MIMEText(results_html, _subtype='html')
            mime_msg.attach(msg_txt)

            to = []
            mngrs = ar.getResponsible()
            for mngrid in mngrs['ids']:
                name = mngrs['dict'][mngrid].get('name', '')
                email = mngrs['dict'][mngrid].get('email', '')
                if (email != ''):
                    to.append(formataddr((encode_header(name), email)))

            if len(to) > 0:
                # Send the email to the managers
                mime_msg['To'] = ','.join(to)
                attachPdf(mime_msg, pdf_report, pdf_fn)

                try:
                    host = getToolByName(ar, 'MailHost')
                    host.send(mime_msg.as_string(), immediate=True)
                except SMTPServerDisconnected as msg:
                    logger.warn("SMTPServerDisconnected: %s." % msg)
                except SMTPRecipientsRefused as msg:
                    raise WorkflowException(str(msg))

        # Send report to recipients
        recips = self.get_recipients(ar)
        for recip in recips:
            if 'email' not in recip.get('pubpref', []) \
                    or not recip.get('email', ''):
                continue

            title = encode_header(recip.get('title', ''))
            email = recip.get('email')
            formatted = formataddr((title, email))

            # Create the new mime_msg object, cause the previous one
            # has the pdf already attached
            mime_msg = MIMEMultipart('related')
            mime_msg['Subject'] = self.get_mail_subject(ar)[0]
            mime_msg['From'] = formataddr(
                (encode_header(lab.getName()), lab.getEmailAddress()))
            mime_msg.preamble = 'This is a multi-part MIME message.'
            msg_txt = MIMEText(results_html, _subtype='html')
            mime_msg.attach(msg_txt)
            mime_msg['To'] = formatted

            # Attach the pdf to the email if requested
            if pdf_report and 'pdf' in recip.get('pubpref'):
                attachPdf(mime_msg, pdf_report, pdf_fn)

            # For now, I will simply ignore mail send under test.
            if hasattr(self.portal, 'robotframework'):
                continue

            msg_string = mime_msg.as_string()

            # content of outgoing email written to debug file
            if debug_mode:
                tmp_fn = tempfile.mktemp(suffix=".email")
                logger.debug("Writing MIME message for %s to %s" %
                             (ar.Title(), tmp_fn))
                open(tmp_fn, "wb").write(msg_string)

            try:
                host = getToolByName(ar, 'MailHost')
                host.send(msg_string, immediate=True)
            except SMTPServerDisconnected as msg:
                logger.warn("SMTPServerDisconnected: %s." % msg)
            except SMTPRecipientsRefused as msg:
                raise WorkflowException(str(msg))

        ar.setDatePublished(DateTime())

        return [ar]