def generate_delivery_pdf(context, ars_or_samples): if not ars_or_samples: logger.warn("No Analysis Requests or Samples provided") return if ISample.providedBy(ars_or_samples) or \ IAnalysisRequest.providedBy(ars_or_samples): return generate_delivery_pdf(context, [ars_or_samples]) if not isinstance(ars_or_samples, list): logger.warn("Type not supported: {}".format(repr(ars_or_samples))) return html = DeliveryFormPdf(context, context.REQUEST, analysis_requests=ars_or_samples).template() html = safe_unicode(html).encode("utf-8") filename = "delivery" pdf_fn = tempfile.mktemp(suffix=".pdf") pdf = createPdf(htmlreport=html, outfile=pdf_fn) if not pdf: ar_ids = map(lambda ar: ar.id, ars_or_samples) logger.warn( "Unable to generate the PDF of delivery form for {}".format( ' '.join(ar_ids))) return None def _attach_to_ar(pdf, ar_brain_or_obj): ar = api.get_object(ar_brain_or_obj) attid = ar.aq_parent.generateUniqueId('Attachment') att = _createObjectByType("Attachment", ar.aq_parent, attid) att.setAttachmentFile(open(pdf_fn)) # Awkward workaround to rename the file attf = att.getAttachmentFile() attf.filename = '%s.pdf' % filename att.setAttachmentFile(attf) att.unmarkCreationFlag() renameAfterCreation(att) atts = ar.getAttachment() + [att] if ar.getAttachment() else [att] atts = [a.UID() for a in atts] ar.setAttachment(atts) for ar_or_sample in ars_or_samples: # Attach the pdf to the Analysis Request if ISample.providedBy(ar_or_sample): for ar in ar_or_sample.getAnalysisRequests(): _attach_to_ar(pdf, ar) elif IAnalysisRequest.providedBy(ar_or_sample): _attach_to_ar(pdf, ar_or_sample) return pdf_fn
def __call__(self, action, services): """The objects passed in are Analysis Services and the context is the Analysis Request """ sample = self.context if not IAnalysisRequest.providedBy(sample): return self.redirect(message=_("No changes made"), level="warning") # Get form values form = self.request.form prices = form.get("Price", [None])[0] hidden = map(lambda o: {"uid": o, "hidden": self.is_hidden(o)}, services) specs = map(lambda service: self.get_specs(service), services) # Set new analyses to the sample uids = map(api.get_uid, services) sample.setAnalysisServicesSettings(hidden) sample.setAnalyses(uids, prices=prices, specs=specs) # Just in case new analyses have been added while the Sample was in a # "non-open" state (e.g. "to_be_verified") self.do_action("rollback_to_receive", [sample]) # Reindex the analyses that have been added for analysis in sample.objectValues("Analysis"): analysis.reindexObject() # Redirect the user to success page self.success([sample])
def send_panic_email(view): ar = view.context if not IAnalysisRequest.providedBy(ar): return False if not ar.has_analyses_in_panic(): addMessage(view, _("No results exceed the panic levels"), 'warning') return False # Send an alert email laboratory = view.context.bika_setup.laboratory subject = view.request.get('subject') to = view.request.get('to') body = view.request.get('email_body') body = "<br/>".join(body.split("\r\n")) mime_msg = MIMEMultipart('related') mime_msg['Subject'] = subject mime_msg['From'] = formataddr( (encode_header(laboratory.getName()), laboratory.getEmailAddress())) mime_msg['To'] = to msg_txt = MIMEText(safe_unicode(body).encode('utf-8'), _subtype='html') mime_msg.preamble = 'This is a multi-part MIME message.' mime_msg.attach(msg_txt) try: host = getToolByName(view.context, 'MailHost') host.send(mime_msg.as_string(), immediate=True) except Exception, msg: ar = view.context.id logger.error("Panic level email %s: %s" % (ar, str(msg))) message = _('Unable to send an email to alert client ' 'that some results exceeded the panic levels') \ + (": %s" % str(msg)) addMessage(view, message, 'warning') return False
def search_analysis_from(container, keywords): """Searches an analysis with the specified keyword within the container """ # Base query query = dict(getKeyword=keywords, portal_type="Analysis", review_state=["unassigned", "assigned"]) # Build the query container = api.get_object(container) container_uid = api.get_uid(container) if IAnalysisRequest.providedBy(container): query.update({"getAncestorsUIDs": container_uid}) elif IWorksheet.providedBy(container): query.update({"getWorksheetUID": container_uid}) else: path = api.get_path(container) raise ValueError("Could not get analyses from {}".format(path)) # Search for a unique result analyses = api.search(query, CATALOG_ANALYSIS_LISTING) if len(analyses) == 1: return api.get_object(analyses[0]) return None
def create_retest(ar): """Creates a retest (Analysis Request) from an invalidated Analysis Request :param ar: The invalidated Analysis Request :type ar: IAnalysisRequest :rtype: IAnalysisRequest """ if not ar: raise ValueError("Source Analysis Request cannot be None") if not IAnalysisRequest.providedBy(ar): raise ValueError("Type not supported: {}".format(repr(type(ar)))) if ar.getRetest(): # Do not allow the creation of another retest! raise ValueError("Retest already set") if not ar.isInvalid(): # Analysis Request must be in 'invalid' state raise ValueError("Cannot do a retest from an invalid Analysis Request" .format(repr(ar))) # 0. Open the actions pool actions_pool = ActionHandlerPool.get_instance() actions_pool.queue_pool() # 1. Create the Retest (Analysis Request) ignore = ['Analyses', 'DatePublished', 'Invalidated', 'Sample'] retest = _createObjectByType("AnalysisRequest", ar.aq_parent, tmpID()) retest.setSample(ar.getSample()) copy_field_values(ar, retest, ignore_fieldnames=ignore) renameAfterCreation(retest) # 2. Copy the analyses from the source intermediate_states = ['retracted', 'reflexed'] for an in ar.getAnalyses(full_objects=True): if (api.get_workflow_status_of(an) in intermediate_states): # Exclude intermediate analyses continue nan = _createObjectByType("Analysis", retest, an.getKeyword()) # Make a copy ignore_fieldnames = ['DataAnalysisPublished'] copy_field_values(an, nan, ignore_fieldnames=ignore_fieldnames) nan.unmarkCreationFlag() push_reindex_to_actions_pool(nan) # 3. Assign the source to retest retest.setInvalidated(ar) # 4. Transition the retest to "sample_received"! changeWorkflowState(retest, 'bika_ar_workflow', 'sample_received') # 5. Reindex and other stuff push_reindex_to_actions_pool(retest) push_reindex_to_actions_pool(retest.aq_parent) # 6. Resume the actions pool actions_pool.resume() return retest
def sample(self): if self._sample == self._marker: # Maybe current context is a Sample? if IAnalysisRequest.providedBy(self.context): self._sample = self.context return self._sample # Try with the uid uid = self.request.get("uid") sample = self.get_object_by_uid(uid) if sample and IAnalysisRequest.providedBy(sample): self._sample = sample else: self._sample = None return self._sample
def _folder_item_assigned_worksheet(self, analysis_brain, item): """Adds an icon to the item dict if the analysis is assigned to a worksheet and if the icon is suitable for the current context :param analysis_brain: Brain that represents an analysis :param item: analysis' dictionary counterpart that represents a row """ if not IAnalysisRequest.providedBy(self.context): # We want this icon to only appear if the context is an AR return if analysis_brain.worksheetanalysis_review_state != 'assigned': # No need to go further. This analysis is not assigned to any WS return analysis_obj = self.get_object(analysis_brain) worksheet = analysis_obj.getBackReferences('WorksheetAnalysis') if not worksheet: # No worksheet assigned. Do nothing return worksheet = worksheet[0] title = t( _("Assigned to: ${worksheet_id}", mapping={'worksheet_id': safe_unicode(worksheet.id)})) img = get_image('worksheet.png', title=title) anchor = get_link(worksheet.absolute_url(), img) self._append_html_element(item, 'state_title', anchor)
def is_auto_partition_required(self, brain_or_object): """Returns whether the passed in object needs to be partitioned """ obj = api.get_object(brain_or_object) if not IAnalysisRequest.providedBy(obj): return False template = obj.getTemplate() return template and template.getAutoPartition()
def in_panic(self): """Returns whether the Sample has at least one analysis """ if self._in_panic is None: self._in_panic = False if IAnalysisRequest.providedBy(self.context): self._in_panic = utils.has_analyses_in_panic(self.context) return self._in_panic
def is_object_allowed(self, object_brain_uid): """Returns whether the type of object can be stored or not in this container. This function returns true if the object is allowed, even if the container already contains the object """ # TODO Filer by sample type, volume, etc. # Only objects from IAnalysisRequest are allowed obj = api.get_object(object_brain_uid) return IAnalysisRequest.providedBy(obj)
def after_verify(obj): """Event fired after receive (Process) transition is triggered """ logger.info("*** Custom after_verify transition ***") if IAnalysis.providedBy(obj) or IDuplicateAnalysis.providedBy(obj): analysis_events.after_verify(obj) if IAnalysisRequest.providedBy(obj): _promote_transition(obj, "verify")
def after_submit(obj): """Event fired after submit transition is triggered """ logger.info("*** Custom after_submit transition ***") if IAnalysis.providedBy(obj) or IDuplicateAnalysis.providedBy(obj): analysis_events.after_submit(obj) if IAnalysisRequest.providedBy(obj): _promote_transition(obj, "submit")
def __init__(self, context, request, analysis_requests=None): super(DeliveryFormPdf, self).__init__(context, request) self.analysis_requests = analysis_requests if not self.analysis_requests: if ISample.providedBy(context): self.analysis_requests = context.getAnalysisRequests() elif IAnalysisRequest.providedBy(context): self.analysis_requests = [context]
def __init__(self, context, request): super(RequisitionFormPdf, self).__init__(context, request) self.analysis_requests = [] if ISample.providedBy(context): self.analysis_requests = context.getAnalysisRequests() elif IAnalysisRequest.providedBy(context): self.analysis_requests = [context]
def __init__(self, context, request, analysis_requests=None, lab_department=None): super(InternalDeliveryFormPdf, self).__init__(context, request) self.analysis_requests = analysis_requests self.lab_department = lab_department if not self.analysis_requests: if ISample.providedBy(context): self.analysis_requests = context.getAnalysisRequests() elif IAnalysisRequest.providedBy(context): self.analysis_requests = [context]
def get_sample(instance): """Returns the sample associated to this instance, if any. Otherwise, returns None""" if ISample.providedBy(instance): return instance if IAnalysisRequest.providedBy(instance): return get_sample(instance.getSample()) if ISamplePartition.providedBy(instance): return get_sample(instance.aq_parent) return None
def create_retest(ar): """Creates a retest (Analysis Request) from an invalidated Analysis Request :param ar: The invalidated Analysis Request :type ar: IAnalysisRequest :rtype: IAnalysisRequest """ if not ar: raise ValueError("Source Analysis Request cannot be None") if not IAnalysisRequest.providedBy(ar): raise ValueError("Type not supported: {}".format(repr(type(ar)))) if ar.getRetest(): # Do not allow the creation of another retest! raise ValueError("Retest already set") if not ar.isInvalid(): # Analysis Request must be in 'invalid' state raise ValueError( "Cannot do a retest from an invalid Analysis Request".format( repr(ar))) # 1. Create the Retest (Analysis Request) ignore = ['Analyses', 'DatePublished', 'Invalidated', 'Sample'] retest = _createObjectByType("AnalysisRequest", ar.aq_parent, tmpID()) retest.setSample(ar.getSample()) copy_field_values(ar, retest, ignore_fieldnames=ignore) renameAfterCreation(retest) # 2. Copy the analyses from the source criteria = dict(full_objects=True, retracted=False, reflexed=False) for an in ar.getAnalyses(**criteria): nan = _createObjectByType("Analysis", retest, an.getKeyword()) # Make a copy ignore_fieldnames = ['Verificators', 'DataAnalysisPublished'] copy_field_values(an, nan, ignore_fieldnames=ignore_fieldnames) nan.unmarkCreationFlag() # Set the workflow state of the analysis to 'sample_received'. Since we # keep the results of the previous analyses, these will be preserved, # only awaiting for their submission changeWorkflowState(nan, 'bika_analysis_workflow', 'sample_received') nan.reindexObject() # 3. Assign the source to retest retest.setInvalidated(ar) # 4. Transition the retest to "sample_received"! changeWorkflowState(retest, 'bika_ar_workflow', 'sample_received') # 5. Reindex and other stuff retest.reindexObject() retest.aq_parent.reindexObject() return retest
def fix_ar_sample_workflow(brain_or_object): """Re-set the state of an AR, Sample and SamplePartition to match the least-early state of all contained valid/current analyses. Ignores retracted/rejected/cancelled analyses. """ def log_change_state(ar_id, obj_id, src, dst): msg = "While fixing {ar_id}: " \ "state changed for {obj_id}: " \ "{src} -> {dst}".format(**locals()) ar = get_object(brain_or_object) if not IAnalysisRequest.providedBy(ar): return wf = api.get_tool('portal_workflow') arwf = wf['bika_ar_workflow'] anwf = wf['bika_analysis_workflow'] swf = wf['bika_sample_workflow'] ignored = ['retracted', 'rejected'] tmp = filter(lambda x: x[0] not in ignored, arwf.states.items()) arstates = OrderedDict(tmp) tmp = filter(lambda x: x[0] not in ignored, swf.states.items()) samplestates = OrderedDict(tmp) tmp = filter(lambda x: x[0] in arstates, anwf.states.items()) anstates = OrderedDict(tmp) # find least-early analysis state # !!! Assumes states in definitions are roughly ordered earliest to latest ar_dest_state = arstates.items()[0][0] for anstate in anstates: if ar.getAnalyses(review_state=anstate): ar_dest_state = anstate # Force state of AR ar_state = get_review_status(ar) if ar_state != ar_dest_state: changeWorkflowState(ar, arwf.id, ar_dest_state) log_change_state(ar.id, ar.id, ar_state, ar_dest_state) # Force state of Sample sample = ar.getSample() sample_state = get_review_status(sample) if ar_dest_state in samplestates: changeWorkflowState(sample, swf.id, ar_dest_state) log_change_state(ar.id, sample.id, sample_state, ar_dest_state) # Force states of Partitions for part in sample.objectValues(): part_state = get_review_status(part) if get_review_status(part) != ar_dest_state: changeWorkflowState(sample, swf.id, ar_dest_state) log_change_state(ar.id, part.id, part_state, ar_dest_state)
def render(self): if IAnalysisRequest.providedBy(self.context): self.in_panic = self.context.has_analyses_in_panic() if not self.in_panic: return "" self.panic_email_sent = bapi.get_field_value( instance=self.context, field_name='PanicEmailAlertSent', default=False) self.ar_uid = api.get_uid(self.context) return self.template()
def _client_address(self, client): client_address = client.getPostalAddress() if not client_address: ar = self.getAnalysisRequestObj() if not IAnalysisRequest.providedBy(ar): return "" # Data from the first contact contact = ar.getContact() if contact and contact.getBillingAddress(): client_address = contact.getBillingAddress() elif contact and contact.getPhysicalAddress(): client_address = contact.getPhysicalAddress() return self.format_address(client_address)
def after_process(obj): """Event fired after process (Process) transition is triggered """ logger.info("*** Custom after_process transition ***") if IAnalysisRequest.providedBy(obj): # Generate a derived AR (and Sample) for every single partition create_requests_from_partitions(obj) elif ISample.providedBy(obj): # We do not permit partitioning directly from Sample! # sample_events._cascade_transition(obj, 'process') pass
def after_publish(obj): """Event fired after receive (Process) transition is triggered """ logger.info("*** Custom after_publish transition ***") if IAnalysisRequest.providedBy(obj): # Transition Analyses to sample_due ans = obj.getAnalyses(full_objects=True) for analysis in ans: doActionFor(analysis, 'publish') # Promote to parent AR parent_ar = obj.getPrimaryAnalysisRequest() if parent_ar: doActionFor(parent_ar, "publish")
def after_send_to_lab(obj): """ Event fired after send_to_lab transition is triggered. """ logger.info("*** Custom after_send_to_lab transition ***") if IAnalysisRequest.providedBy(obj): # Promote to sample sample = obj.getSample() if sample: doActionFor(sample, 'send_to_lab') elif ISample.providedBy(obj): sample_events._cascade_transition(obj, 'send_to_lab')
def workflow_action_download_requisition(self): if ISample.providedBy(self.context): # TODO, Concatenate the PDFs of all ocntaine ARs logger.info("This is a sample!") elif IAnalysisRequest.providedBy(self.context): # Redirect to the requisition PDF req_att = self.get_last_requisition_attachment(self.context) if not req_att: return self.destination_url = '{}/at_download/AttachmentFile'.format( req_att.absolute_url()) self.request.response.redirect(self.destination_url)
def after_send_to_pot(obj): """Event fired after sending to point of testing """ logger.info("*** Custom after_send_to_pot transition ***") if IAnalysisRequest.providedBy(obj): # Transition Analyses to sample_due ans = obj.getAnalyses(full_objects=True, cancellation_state='active') for analysis in ans: doActionFor(analysis, 'sample_due') # Promote to parent AR _promote_cascade(obj, "send_to_pot") elif ISample.providedBy(obj): sample_events._cascade_transition(obj, 'send_to_pot')
def workflow_action_create_partitions(self): """Redirects the user to the partition magic view """ uids = list() if IAnalysisRequest.providedBy(self.context): uids = [api.get_uid(self.context)] else: uids = self.get_selected_uids() if not uids: message = "No items selected".format(repr(type(self.context))) self.redirect(message=message, level="error") # Redirect to the partitioning magic view url = "{}/partition_magic?uids={}".format(self.back_url, ",".join(uids)) self.redirect(redirect_url=url)
def generate_requisition_pdf(ar_or_sample): if not ar_or_sample: logger.warn("No Analysis Request or Sample provided") return if ISample.providedBy(ar_or_sample): for ar in ar_or_sample.getAnalysisRequests(): generate_requisition_pdf(ar) return elif not IAnalysisRequest.providedBy(ar_or_sample): logger.warn("Type not supported: {}".format(repr(ar_or_sample))) return html = RequisitionFormPdf(ar_or_sample, ar_or_sample.REQUEST).template() html = safe_unicode(html).encode('utf-8') filename = '%s-requisition' % ar_or_sample.id pdf_fn = tempfile.mktemp(suffix=".pdf") pdf = createPdf(htmlreport=html, outfile=pdf_fn) if not pdf: logger.warn( "Unable to generate the PDF of requisition form for {}".format( ar_or_sample.id)) return # Attach the pdf to the Analysis Request attid = ar_or_sample.aq_parent.generateUniqueId('Attachment') att = _createObjectByType("Attachment", ar_or_sample.aq_parent, attid) att.setAttachmentFile(open(pdf_fn)) att.setReportOption('i') # Ignore in report # Try to assign the Requisition Attachment Type query = dict(portal_type='AttachmentType', title='Requisition') brains = api.search(query, 'bika_setup_catalog') if brains: att_type = api.get_object(brains[0]) att.setAttachmentType(att_type) # Awkward workaround to rename the file attf = att.getAttachmentFile() attf.filename = '%s.pdf' % filename att.setAttachmentFile(attf) att.unmarkCreationFlag() renameAfterCreation(att) atts = ar_or_sample.getAttachment() + [att] if \ ar_or_sample.getAttachment() else [att] atts = [a.UID() for a in atts] ar_or_sample.setAttachment(atts) os.remove(pdf_fn)
def __call__(self): plone.protect.CheckAuthenticator(self.request) ar = api.get_object_by_uid(self.request.get('uid', None), None) or \ self.context if not ar or not IAnalysisRequest.providedBy(ar): return self.template() # Set the default recipients for the email self.recipients = self.get_recipients(ar) # Set the subject self.subject = self.context.translate( _("Some results from ${ar} exceeded panic range", mapping={"ar": ar.getId()})) # Set the body of the message self.body = self.get_body_message(ar) return self.template()
def after_receive(obj): """Event fired after receive (Process) transition is triggered """ logger.info("*** Custom after_receive transition ***") if IAnalysisRequest.providedBy(obj): # Transition Analyses to sample_due ans = obj.getAnalyses(full_objects=True, cancellation_state='active') for analysis in ans: doActionFor(analysis, 'receive') # Promote to parent AR _promote_cascade(obj, "receive") elif ISample.providedBy(obj): sample_events._cascade_transition(obj, 'receive')
def __call__(self, action, objects): """The objects passed in are Analysis Services and the context is the Analysis Request """ sample = self.context if not IAnalysisRequest.providedBy(sample): return self.redirect(message=_("No changes made"), level="warning") # NOTE: https://github.com/senaite/senaite.core/issues/1276 # # Explicitly lookup the UIDs from the request, because the default # behavior of the method `get_uids` in `WorkflowActionGenericAdapter` # falls back to the UID of the current context if no UIDs were # submitted, which is in that case an `AnalysisRequest`. uids = self.get_uids_from_request() services = map(api.get_object, uids) # Get form values form = self.request.form prices = form.get("Price", [None])[0] hidden = map( lambda o: { "uid": api.get_uid(o), "hidden": self.is_hidden(o) }, services) specs = map(lambda service: self.get_specs(service), services) # Set new analyses to the sample sample.setAnalysisServicesSettings(hidden) sample.setAnalyses(uids, prices=prices, specs=specs, hidden=hidden) # Just in case new analyses have been added while the Sample was in a # "non-open" state (e.g. "to_be_verified") self.do_action("rollback_to_receive", [sample]) # Reindex the analyses that have been added for analysis in sample.objectValues("Analysis"): analysis.reindexObject() # Reindex the Sample sample.reindexObject() # Redirect the user to success page self.success([sample])