def __call__(self): wf = getToolByName(self.context, 'portal_workflow') part = _createObjectByType("SamplePartition", self.context, tmpID()) part.processForm() SamplingWorkflowEnabled = part.bika_setup.getSamplingWorkflowEnabled() ## We force the object to have the same state as the parent sample_state = wf.getInfoFor(self.context, 'review_state') changeWorkflowState(part, "bika_sample_workflow", sample_state) self.request.RESPONSE.redirect(self.context.absolute_url() + "/partitions") return
def ObjectInitializedEventHandler(instance, event): # This handler fires for DuplicateAnalysis because # DuplicateAnalysis also provides IAnalysis. # DuplicateAnalysis doesn't have analysis_workflow. if instance.portal_type == "DuplicateAnalysis": return if instance.portal_type == 'Analysis': alsoProvides(instance, IRoutineAnalysis) workflow = getToolByName(instance, 'portal_workflow') ar = instance.aq_parent ar_state = workflow.getInfoFor(ar, 'review_state') ar_ws_state = workflow.getInfoFor(ar, 'worksheetanalysis_review_state') # Set the state of the analysis depending on the state of the AR. if ar_state in ('sample_registered', 'to_be_sampled', 'sampled', 'to_be_preserved', 'sample_due', 'sample_received'): changeWorkflowState(instance, "bika_analysis_workflow", ar_state) elif ar_state in ('to_be_verified'): # Apply to AR only; we don't want this transition to cascade. if 'workflow_skiplist' not in ar.REQUEST: ar.REQUEST['workflow_skiplist'] = [] ar.REQUEST['workflow_skiplist'].append("retract all analyses") workflow.doActionFor(ar, 'retract') ar.REQUEST['workflow_skiplist'].remove("retract all analyses") if ar_ws_state == 'assigned': workflow.doActionFor(ar, 'unassign') skip(ar, 'unassign', unskip=True) instance.updateDueDate() return
def workflow_script_reject(self): """Copy real analyses to RejectAnalysis, with link to real create a new worksheet, with the original analyses, and new duplicates and references to match the rejected worksheet. """ if skip(self, "reject"): return utils = getToolByName(self, 'plone_utils') workflow = self.portal_workflow def copy_src_fields_to_dst(src, dst): # These will be ignored when copying field values between analyses ignore_fields = [ 'UID', 'id', 'title', 'allowDiscussion', 'subject', 'description', 'location', 'contributors', 'creators', 'effectiveDate', 'expirationDate', 'language', 'rights', 'creation_date', 'modification_date', 'Layout', # ws 'Analyses', # ws ] fields = src.Schema().fields() for field in fields: fieldname = field.getName() if fieldname in ignore_fields: continue getter = getattr( src, 'get' + fieldname, src.Schema().getField(fieldname).getAccessor(src)) setter = getattr( dst, 'set' + fieldname, dst.Schema().getField(fieldname).getMutator(dst)) if getter is None or setter is None: # ComputedField continue setter(getter()) analysis_positions = {} for item in self.getLayout(): analysis_positions[item['analysis_uid']] = item['position'] old_layout = [] new_layout = [] # New worksheet worksheets = self.aq_parent new_ws = _createObjectByType('Worksheet', worksheets, tmpID()) new_ws.unmarkCreationFlag() new_ws_id = renameAfterCreation(new_ws) copy_src_fields_to_dst(self, new_ws) new_ws.edit(Number=new_ws_id, Remarks=self.getRemarks()) # Objects are being created inside other contexts, but we want their # workflow handlers to be aware of which worksheet this is occurring in. # We save the worksheet in request['context_uid']. # We reset it again below.... be very sure that this is set to the # UID of the containing worksheet before invoking any transitions on # analyses. self.REQUEST['context_uid'] = new_ws.UID() # loop all analyses analyses = self.getAnalyses() new_ws_analyses = [] old_ws_analyses = [] for analysis in analyses: # Skip published or verified analyses review_state = workflow.getInfoFor(analysis, 'review_state', '') if review_state in ['published', 'verified', 'retracted']: old_ws_analyses.append(analysis.UID()) old_layout.append({ 'position': position, 'type': 'a', 'analysis_uid': analysis.UID(), 'container_uid': analysis.aq_parent.UID() }) continue # Normal analyses: # - Create matching RejectAnalysis inside old WS # - Link analysis to new WS in same position # - Copy all field values # - Clear analysis result, and set Retested flag if analysis.portal_type == 'Analysis': reject = _createObjectByType('RejectAnalysis', self, tmpID()) reject.unmarkCreationFlag() reject_id = renameAfterCreation(reject) copy_src_fields_to_dst(analysis, reject) reject.setAnalysis(analysis) reject.reindexObject() analysis.edit( Result=None, Retested=True, ) analysis.reindexObject() position = analysis_positions[analysis.UID()] old_ws_analyses.append(reject.UID()) old_layout.append({ 'position': position, 'type': 'r', 'analysis_uid': reject.UID(), 'container_uid': self.UID() }) new_ws_analyses.append(analysis.UID()) new_layout.append({ 'position': position, 'type': 'a', 'analysis_uid': analysis.UID(), 'container_uid': analysis.aq_parent.UID() }) # Reference analyses # - Create a new reference analysis in the new worksheet # - Transition the original analysis to 'rejected' state if analysis.portal_type == 'ReferenceAnalysis': service_uid = analysis.getService().UID() reference = analysis.aq_parent reference_type = analysis.getReferenceType() new_analysis_uid = reference.addReferenceAnalysis( service_uid, reference_type) position = analysis_positions[analysis.UID()] old_ws_analyses.append(analysis.UID()) old_layout.append({ 'position': position, 'type': reference_type, 'analysis_uid': analysis.UID(), 'container_uid': reference.UID() }) new_ws_analyses.append(new_analysis_uid) new_layout.append({ 'position': position, 'type': reference_type, 'analysis_uid': new_analysis_uid, 'container_uid': reference.UID() }) workflow.doActionFor(analysis, 'reject') new_reference = reference.uid_catalog( UID=new_analysis_uid)[0].getObject() workflow.doActionFor(new_reference, 'assign') analysis.reindexObject() # Duplicate analyses # - Create a new duplicate inside the new worksheet # - Transition the original analysis to 'rejected' state if analysis.portal_type == 'DuplicateAnalysis': src_analysis = analysis.getAnalysis() ar = src_analysis.aq_parent service = src_analysis.getService() duplicate_id = new_ws.generateUniqueId('DuplicateAnalysis') new_duplicate = _createObjectByType('DuplicateAnalysis', new_ws, duplicate_id) new_duplicate.unmarkCreationFlag() copy_src_fields_to_dst(analysis, new_duplicate) workflow.doActionFor(new_duplicate, 'assign') new_duplicate.reindexObject() position = analysis_positions[analysis.UID()] old_ws_analyses.append(analysis.UID()) old_layout.append({ 'position': position, 'type': 'd', 'analysis_uid': analysis.UID(), 'container_uid': self.UID() }) new_ws_analyses.append(new_duplicate.UID()) new_layout.append({ 'position': position, 'type': 'd', 'analysis_uid': new_duplicate.UID(), 'container_uid': new_ws.UID() }) workflow.doActionFor(analysis, 'reject') analysis.reindexObject() new_ws.setAnalyses(new_ws_analyses) new_ws.setLayout(new_layout) new_ws.replaces_rejected_worksheet = self.UID() for analysis in new_ws.getAnalyses(): review_state = workflow.getInfoFor(analysis, 'review_state', '') if review_state == 'to_be_verified': changeWorkflowState(analysis, "bika_analysis_workflow", "sample_received") self.REQUEST['context_uid'] = self.UID() self.setLayout(old_layout) self.setAnalyses(old_ws_analyses) self.replaced_by = new_ws.UID()
def workflow_action_save_analyses_button(self): form = self.request.form workflow = getToolByName(self.context, 'portal_workflow') bsc = self.context.bika_setup_catalog action, came_from = WorkflowAction._get_form_workflow_action(self) # AR Manage Analyses: save Analyses ar = self.context sample = ar.getSample() objects = WorkflowAction._get_selected_items(self) if not objects: message = _("No analyses have been selected") self.context.plone_utils.addPortalMessage(message, 'info') self.destination_url = self.context.absolute_url() + "/analyses" self.request.response.redirect(self.destination_url) return Analyses = objects.keys() prices = form.get("Price", [None])[0] # Hidden analyses? # https://jira.bikalabs.com/browse/LIMS-1324 outs = [] hiddenans = form.get('Hidden', {}) for uid in Analyses: hidden = hiddenans.get(uid, '') hidden = True if hidden == 'on' else False outs.append({'uid':uid, 'hidden':hidden}) ar.setAnalysisServicesSettings(outs) specs = {} if form.get("min", None): for service_uid in Analyses: service = bsc(UID=service_uid)[0].getObject() keyword = service.getKeyword() specs[service_uid] = { "min": form["min"][0][service_uid], "max": form["max"][0][service_uid], "error": form["error"][0][service_uid], "keyword": keyword, "uid": service_uid, } else: for service_uid in Analyses: service = bsc(UID=service_uid)[0].getObject() keyword = service.getKeyword() specs[service_uid] = {"min": "", "max": "", "error": "", "keyword": keyword, "uid": service_uid} new = ar.setAnalyses(Analyses, prices=prices, specs=specs.values()) # link analyses and partitions # If Bika Setup > Analyses > 'Display individual sample # partitions' is checked, no Partitions available. # https://github.com/bikalabs/Bika-LIMS/issues/1030 if 'Partition' in form: for service_uid, service in objects.items(): part_id = form['Partition'][0][service_uid] part = sample[part_id] analysis = ar[service.getKeyword()] analysis.setSamplePartition(part) analysis.reindexObject() if new: for analysis in new: # if the AR has progressed past sample_received, we need to bring it back. ar_state = workflow.getInfoFor(ar, 'review_state') if ar_state in ('attachment_due', 'to_be_verified'): # Apply to AR only; we don't want this transition to cascade. ar.REQUEST['workflow_skiplist'].append("retract all analyses") workflow.doActionFor(ar, 'retract') ar.REQUEST['workflow_skiplist'].remove("retract all analyses") ar_state = workflow.getInfoFor(ar, 'review_state') # Then we need to forward new analyses state analysis.updateDueDate() changeWorkflowState(analysis, 'bika_analysis_workflow', ar_state) message = PMF("Changes saved.") self.context.plone_utils.addPortalMessage(message, 'info') self.destination_url = self.context.absolute_url() self.request.response.redirect(self.destination_url)
def cloneAR(self, ar): newar = _createObjectByType("AnalysisRequest", ar.aq_parent, tmpID()) newar.title = ar.title newar.description = ar.description newar.setContact(ar.getContact()) newar.setCCContact(ar.getCCContact()) newar.setCCEmails(ar.getCCEmails()) newar.setBatch(ar.getBatch()) newar.setTemplate(ar.getTemplate()) newar.setProfile(ar.getProfile()) newar.setSamplingDate(ar.getSamplingDate()) newar.setSampleType(ar.getSampleType()) newar.setSamplePoint(ar.getSamplePoint()) newar.setStorageLocation(ar.getStorageLocation()) newar.setSamplingDeviation(ar.getSamplingDeviation()) newar.setPriority(ar.getPriority()) newar.setSampleCondition(ar.getSampleCondition()) newar.setSample(ar.getSample()) newar.setClientOrderNumber(ar.getClientOrderNumber()) newar.setClientReference(ar.getClientReference()) newar.setClientSampleID(ar.getClientSampleID()) newar.setDefaultContainerType(ar.getDefaultContainerType()) newar.setAdHoc(ar.getAdHoc()) newar.setComposite(ar.getComposite()) newar.setReportDryMatter(ar.getReportDryMatter()) newar.setInvoiceExclude(ar.getInvoiceExclude()) newar.setAttachment(ar.getAttachment()) newar.setInvoice(ar.getInvoice()) newar.setDateReceived(ar.getDateReceived()) newar.setMemberDiscount(ar.getMemberDiscount()) # Set the results for each AR analysis ans = ar.getAnalyses(full_objects=True) for an in ans: nan = _createObjectByType("Analysis", newar, an.getKeyword()) nan.setService(an.getService()) nan.setCalculation(an.getCalculation()) nan.setInterimFields(an.getInterimFields()) nan.setResult(an.getResult()) nan.setResultDM(an.getResultDM()) nan.setRetested = False, nan.setMaxTimeAllowed(an.getMaxTimeAllowed()) nan.setDueDate(an.getDueDate()) nan.setDuration(an.getDuration()) nan.setReportDryMatter(an.getReportDryMatter()) nan.setAnalyst(an.getAnalyst()) nan.setInstrument(an.getInstrument()) nan.setSamplePartition(an.getSamplePartition()) nan.unmarkCreationFlag() notify(ObjectInitializedEvent(nan)) changeWorkflowState(nan, 'bika_analysis_workflow', 'to_be_verified') nan.reindexObject() newar.reindexObject() newar.aq_parent.reindexObject() renameAfterCreation(newar) newar.setRequestID(newar.getId()) if hasattr(ar, 'setChildAnalysisRequest'): ar.setChildAnalysisRequest(newar) newar.setParentAnalysisRequest(ar) return newar
def workflow_action_retract_ar(self): workflow = getToolByName(self.context, 'portal_workflow') # AR should be retracted # Can't transition inactive ARs if not isActive(self.context): message = _('Item is inactive.') self.context.plone_utils.addPortalMessage(message, 'info') self.request.response.redirect(self.context.absolute_url()) return # 1. Copies the AR linking the original one and viceversa ar = self.context newar = self.cloneAR(ar) # 2. The old AR gets a status of 'invalid' workflow.doActionFor(ar, 'retract_ar') # 3. The new AR copy opens in status 'to be verified' changeWorkflowState(newar, 'bika_ar_workflow', 'to_be_verified') # 4. The system immediately alerts the client contacts who ordered # the results, per email and SMS, that a possible mistake has been # picked up and is under investigation. # A much possible information is provided in the email, linking # to the AR online. laboratory = self.context.bika_setup.laboratory lab_address = "<br/>".join(laboratory.getPrintAddress()) mime_msg = MIMEMultipart('related') mime_msg['Subject'] = t(_("Erroneus result publication from ${request_id}", mapping={"request_id": ar.getRequestID()})) mime_msg['From'] = formataddr( (encode_header(laboratory.getName()), laboratory.getEmailAddress())) to = [] contact = ar.getContact() if contact: to.append(formataddr((encode_header(contact.Title()), contact.getEmailAddress()))) for cc in ar.getCCContact(): formatted = formataddr((encode_header(cc.Title()), cc.getEmailAddress())) if formatted not in to: to.append(formatted) managers = self.context.portal_groups.getGroupMembers('LabManagers') for bcc in managers: user = self.portal.acl_users.getUser(bcc) if user: uemail = user.getProperty('email') ufull = user.getProperty('fullname') formatted = formataddr((encode_header(ufull), uemail)) if formatted not in to: to.append(formatted) mime_msg['To'] = ','.join(to) aranchor = "<a href='%s'>%s</a>" % (ar.absolute_url(), ar.getRequestID()) naranchor = "<a href='%s'>%s</a>" % (newar.absolute_url(), newar.getRequestID()) addremarks = ('addremarks' in self.request and ar.getRemarks()) \ and ("<br/><br/>" + _("Additional remarks:") + "<br/>" + ar.getRemarks().split("===")[1].strip() + "<br/><br/>") \ or '' sub_d = dict(request_link=aranchor, new_request_link=naranchor, remarks=addremarks, lab_address=lab_address) body = Template("Some errors have been detected in the results report " "published from the Analysis Request $request_link. The Analysis " "Request $new_request_link has been created automatically and the " "previous has been invalidated.<br/>The possible mistake " "has been picked up and is under investigation.<br/><br/>" "$remarks $lab_address").safe_substitute(sub_d) msg_txt = MIMEText(safe_unicode(body).encode('utf-8'), _subtype='html') mime_msg.preamble = 'This is a multi-part MIME message.' mime_msg.attach(msg_txt) try: host = getToolByName(self.context, 'MailHost') host.send(mime_msg.as_string(), immediate=True) except Exception as msg: message = _('Unable to send an email to alert lab ' 'client contacts that the Analysis Request has been ' 'retracted: ${error}', mapping={'error': safe_unicode(msg)}) self.context.plone_utils.addPortalMessage(message, 'warning') message = _('${items} invalidated.', mapping={'items': ar.getRequestID()}) self.context.plone_utils.addPortalMessage(message, 'warning') self.request.response.redirect(newar.absolute_url())
def workflow_action_save_analyses_button(self): form = self.request.form workflow = getToolByName(self.context, 'portal_workflow') bsc = self.context.bika_setup_catalog action, came_from = WorkflowAction._get_form_workflow_action(self) # AR Manage Analyses: save Analyses ar = self.context sample = ar.getSample() objects = WorkflowAction._get_selected_items(self) if not objects: message = _("No analyses have been selected") self.context.plone_utils.addPortalMessage(message, 'info') self.destination_url = self.context.absolute_url() + "/analyses" self.request.response.redirect(self.destination_url) return Analyses = objects.keys() prices = form.get("Price", [None])[0] # Hidden analyses? # https://jira.bikalabs.com/browse/LIMS-1324 outs = [] hiddenans = form.get('Hidden', {}) for uid in Analyses: hidden = hiddenans.get(uid, '') hidden = True if hidden == 'on' else False outs.append({'uid': uid, 'hidden': hidden}) ar.setAnalysisServicesSettings(outs) specs = {} if form.get("min", None): for service_uid in Analyses: service = bsc(UID=service_uid)[0].getObject() keyword = service.getKeyword() specs[service_uid] = { "min": form["min"][0][service_uid], "max": form["max"][0][service_uid], "error": form["error"][0][service_uid], "keyword": keyword, "uid": service_uid, } else: for service_uid in Analyses: service = bsc(UID=service_uid)[0].getObject() keyword = service.getKeyword() specs[service_uid] = { "min": "", "max": "", "error": "", "keyword": keyword, "uid": service_uid } new = ar.setAnalyses(Analyses, prices=prices, specs=specs.values()) # link analyses and partitions # If Bika Setup > Analyses > 'Display individual sample # partitions' is checked, no Partitions available. # https://github.com/bikalabs/Bika-LIMS/issues/1030 if 'Partition' in form: for service_uid, service in objects.items(): part_id = form['Partition'][0][service_uid] part = sample[part_id] analysis = ar[service.getKeyword()] analysis.setSamplePartition(part) analysis.reindexObject() if new: for analysis in new: # if the AR has progressed past sample_received, we need to bring it back. ar_state = workflow.getInfoFor(ar, 'review_state') if ar_state in ('attachment_due', 'to_be_verified'): # Apply to AR only; we don't want this transition to cascade. ar.REQUEST['workflow_skiplist'].append( "retract all analyses") workflow.doActionFor(ar, 'retract') ar.REQUEST['workflow_skiplist'].remove( "retract all analyses") ar_state = workflow.getInfoFor(ar, 'review_state') # Then we need to forward new analyses state analysis.updateDueDate() changeWorkflowState(analysis, 'bika_analysis_workflow', ar_state) message = PMF("Changes saved.") self.context.plone_utils.addPortalMessage(message, 'info') self.destination_url = self.context.absolute_url() self.request.response.redirect(self.destination_url)
def workflow_action_retract_ar(self): workflow = getToolByName(self.context, 'portal_workflow') # AR should be retracted # Can't transition inactive ARs if not isActive(self.context): message = _('Item is inactive.') self.context.plone_utils.addPortalMessage(message, 'info') self.request.response.redirect(self.context.absolute_url()) return # 1. Copies the AR linking the original one and viceversa ar = self.context newar = self.cloneAR(ar) # 2. The old AR gets a status of 'invalid' workflow.doActionFor(ar, 'retract_ar') # 3. The new AR copy opens in status 'to be verified' changeWorkflowState(newar, 'bika_ar_workflow', 'to_be_verified') # 4. The system immediately alerts the client contacts who ordered # the results, per email and SMS, that a possible mistake has been # picked up and is under investigation. # A much possible information is provided in the email, linking # to the AR online. laboratory = self.context.bika_setup.laboratory lab_address = "<br/>".join(laboratory.getPrintAddress()) mime_msg = MIMEMultipart('related') mime_msg['Subject'] = t( _("Erroneus result publication from ${request_id}", mapping={"request_id": ar.getRequestID()})) mime_msg['From'] = formataddr((encode_header(laboratory.getName()), laboratory.getEmailAddress())) to = [] contact = ar.getContact() if contact: to.append( formataddr((encode_header(contact.Title()), contact.getEmailAddress()))) for cc in ar.getCCContact(): formatted = formataddr( (encode_header(cc.Title()), cc.getEmailAddress())) if formatted not in to: to.append(formatted) managers = self.context.portal_groups.getGroupMembers('LabManagers') for bcc in managers: user = self.portal.acl_users.getUser(bcc) if user: uemail = user.getProperty('email') ufull = user.getProperty('fullname') formatted = formataddr((encode_header(ufull), uemail)) if formatted not in to: to.append(formatted) mime_msg['To'] = ','.join(to) aranchor = "<a href='%s'>%s</a>" % (ar.absolute_url(), ar.getRequestID()) naranchor = "<a href='%s'>%s</a>" % (newar.absolute_url(), newar.getRequestID()) addremarks = ('addremarks' in self.request and ar.getRemarks()) \ and ("<br/><br/>" + _("Additional remarks:") + "<br/>" + ar.getRemarks().split("===")[1].strip() + "<br/><br/>") \ or '' sub_d = dict(request_link=aranchor, new_request_link=naranchor, remarks=addremarks, lab_address=lab_address) body = Template( "Some errors have been detected in the results report " "published from the Analysis Request $request_link. The Analysis " "Request $new_request_link has been created automatically and the " "previous has been invalidated.<br/>The possible mistake " "has been picked up and is under investigation.<br/><br/>" "$remarks $lab_address").safe_substitute(sub_d) msg_txt = MIMEText(safe_unicode(body).encode('utf-8'), _subtype='html') mime_msg.preamble = 'This is a multi-part MIME message.' mime_msg.attach(msg_txt) try: host = getToolByName(self.context, 'MailHost') host.send(mime_msg.as_string(), immediate=True) except Exception as msg: message = _( 'Unable to send an email to alert lab ' 'client contacts that the Analysis Request has been ' 'retracted: ${error}', mapping={'error': safe_unicode(msg)}) self.context.plone_utils.addPortalMessage(message, 'warning') message = _('${items} invalidated.', mapping={'items': ar.getRequestID()}) self.context.plone_utils.addPortalMessage(message, 'warning') self.request.response.redirect(newar.absolute_url())
def workflow_script_reject(self): """Copy real analyses to RejectAnalysis, with link to real create a new worksheet, with the original analyses, and new duplicates and references to match the rejected worksheet. """ if skip(self, "reject"): return utils = getToolByName(self, 'plone_utils') workflow = self.portal_workflow def copy_src_fields_to_dst(src, dst): # These will be ignored when copying field values between analyses ignore_fields = ['UID', 'id', 'title', 'allowDiscussion', 'subject', 'description', 'location', 'contributors', 'creators', 'effectiveDate', 'expirationDate', 'language', 'rights', 'creation_date', 'modification_date', 'Layout', # ws 'Analyses', # ws ] fields = src.Schema().fields() for field in fields: fieldname = field.getName() if fieldname in ignore_fields: continue getter = getattr(src, 'get'+fieldname, src.Schema().getField(fieldname).getAccessor(src)) setter = getattr(dst, 'set'+fieldname, dst.Schema().getField(fieldname).getMutator(dst)) if getter is None or setter is None: # ComputedField continue setter(getter()) analysis_positions = {} for item in self.getLayout(): analysis_positions[item['analysis_uid']] = item['position'] old_layout = [] new_layout = [] # New worksheet worksheets = self.aq_parent new_ws = _createObjectByType('Worksheet', worksheets, tmpID()) new_ws.unmarkCreationFlag() new_ws_id = renameAfterCreation(new_ws) copy_src_fields_to_dst(self, new_ws) new_ws.edit( Number = new_ws_id, Remarks = self.getRemarks() ) # Objects are being created inside other contexts, but we want their # workflow handlers to be aware of which worksheet this is occurring in. # We save the worksheet in request['context_uid']. # We reset it again below.... be very sure that this is set to the # UID of the containing worksheet before invoking any transitions on # analyses. self.REQUEST['context_uid'] = new_ws.UID() # loop all analyses analyses = self.getAnalyses() new_ws_analyses = [] old_ws_analyses = [] for analysis in analyses: # Skip published or verified analyses review_state = workflow.getInfoFor(analysis, 'review_state', '') if review_state in ['published', 'verified', 'retracted']: old_ws_analyses.append(analysis.UID()) old_layout.append({'position': position, 'type':'a', 'analysis_uid':analysis.UID(), 'container_uid':analysis.aq_parent.UID()}) continue # Normal analyses: # - Create matching RejectAnalysis inside old WS # - Link analysis to new WS in same position # - Copy all field values # - Clear analysis result, and set Retested flag if analysis.portal_type == 'Analysis': reject = _createObjectByType('RejectAnalysis', self, tmpID()) reject.unmarkCreationFlag() reject_id = renameAfterCreation(reject) copy_src_fields_to_dst(analysis, reject) reject.setAnalysis(analysis) reject.reindexObject() analysis.edit( Result = None, Retested = True, ) analysis.reindexObject() position = analysis_positions[analysis.UID()] old_ws_analyses.append(reject.UID()) old_layout.append({'position': position, 'type':'r', 'analysis_uid':reject.UID(), 'container_uid':self.UID()}) new_ws_analyses.append(analysis.UID()) new_layout.append({'position': position, 'type':'a', 'analysis_uid':analysis.UID(), 'container_uid':analysis.aq_parent.UID()}) # Reference analyses # - Create a new reference analysis in the new worksheet # - Transition the original analysis to 'rejected' state if analysis.portal_type == 'ReferenceAnalysis': service_uid = analysis.getService().UID() reference = analysis.aq_parent reference_type = analysis.getReferenceType() new_analysis_uid = reference.addReferenceAnalysis(service_uid, reference_type) position = analysis_positions[analysis.UID()] old_ws_analyses.append(analysis.UID()) old_layout.append({'position': position, 'type':reference_type, 'analysis_uid':analysis.UID(), 'container_uid':reference.UID()}) new_ws_analyses.append(new_analysis_uid) new_layout.append({'position': position, 'type':reference_type, 'analysis_uid':new_analysis_uid, 'container_uid':reference.UID()}) workflow.doActionFor(analysis, 'reject') new_reference = reference.uid_catalog(UID=new_analysis_uid)[0].getObject() workflow.doActionFor(new_reference, 'assign') analysis.reindexObject() # Duplicate analyses # - Create a new duplicate inside the new worksheet # - Transition the original analysis to 'rejected' state if analysis.portal_type == 'DuplicateAnalysis': src_analysis = analysis.getAnalysis() ar = src_analysis.aq_parent service = src_analysis.getService() duplicate_id = new_ws.generateUniqueId('DuplicateAnalysis') new_duplicate = _createObjectByType('DuplicateAnalysis', new_ws, duplicate_id) new_duplicate.unmarkCreationFlag() copy_src_fields_to_dst(analysis, new_duplicate) workflow.doActionFor(new_duplicate, 'assign') new_duplicate.reindexObject() position = analysis_positions[analysis.UID()] old_ws_analyses.append(analysis.UID()) old_layout.append({'position': position, 'type':'d', 'analysis_uid':analysis.UID(), 'container_uid':self.UID()}) new_ws_analyses.append(new_duplicate.UID()) new_layout.append({'position': position, 'type':'d', 'analysis_uid':new_duplicate.UID(), 'container_uid':new_ws.UID()}) workflow.doActionFor(analysis, 'reject') analysis.reindexObject() new_ws.setAnalyses(new_ws_analyses) new_ws.setLayout(new_layout) new_ws.replaces_rejected_worksheet = self.UID() for analysis in new_ws.getAnalyses(): review_state = workflow.getInfoFor(analysis, 'review_state', '') if review_state == 'to_be_verified': changeWorkflowState(analysis, "bika_analysis_workflow", "sample_received") self.REQUEST['context_uid'] = self.UID() self.setLayout(old_layout) self.setAnalyses(old_ws_analyses) self.replaced_by = new_ws.UID()