def workflow_script_assign(self): # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "assign"): return workflow = getToolByName(self, "portal_workflow") self.reindexObject(idxs=["worksheetanalysis_review_state", ]) rc = getToolByName(self, REFERENCE_CATALOG) wsUID = self.REQUEST["context_uid"] ws = rc.lookupObject(wsUID) # retract the worksheet to 'open' ws_state = workflow.getInfoFor(ws, "review_state") if ws_state != "open": if "workflow_skiplist" not in self.REQUEST: self.REQUEST["workflow_skiplist"] = ["retract all analyses", ] else: self.REQUEST["workflow_skiplist"].append("retract all analyses") allowed_transitions = [t["id"] for t in workflow.getTransitionsFor(ws)] if "retract" in allowed_transitions: workflow.doActionFor(ws, "retract") # If all analyses in this AR have been assigned, # escalate the action to the parent AR if not skip(self, "assign", peek=True): if not self.getAnalyses(worksheetanalysis_review_state="unassigned"): try: allowed_transitions = [t["id"] for t in workflow.getTransitionsFor(self)] if "assign" in allowed_transitions: workflow.doActionFor(self, "assign") except: pass
def workflow_script_open(self, state_info): skip(self, 'open') # reset everything and return to open state self.setDateReceived(None) self.reindexObject(idxs=[ "getDateReceived", ])
def workflow_script_verify(self): # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "verify"): return workflow = getToolByName(self, "portal_workflow") if workflow.getInfoFor(self, 'cancellation_state', 'active') == "cancelled": return False self.reindexObject(idxs=[ "review_state", ]) # If all analyses in this AR are verified # escalate the action to the parent AR ar = self.aq_parent if not skip(ar, "verify", peek=True): all_verified = True for a in ar.getAnalyses(): if a.review_state in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due", "to_be_verified"): all_verified = False break if all_verified: if not "verify all analyses" in self.REQUEST[ 'workflow_skiplist']: self.REQUEST["workflow_skiplist"].append( "verify all analyses") workflow.doActionFor(ar, "verify") # If this is on a worksheet and all it's other analyses are verified, # then verify the worksheet. ws = self.getBackReferences("WorksheetAnalysis") if ws: ws = ws[0] ws_state = workflow.getInfoFor(ws, "review_state") if ws_state == "to_be_verified" and not skip( ws, "verify", peek=True): all_verified = True for a in ws.getAnalyses(): if workflow.getInfoFor(a, "review_state") in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due", "to_be_verified", "assigned"): # Note: referenceanalyses and duplicateanalyses can # still have review_state = "assigned". all_verified = False break if all_verified: if not "verify all analyses" in self.REQUEST[ 'workflow_skiplist']: self.REQUEST["workflow_skiplist"].append( "verify all analyses") workflow.doActionFor(ws, "verify")
def after_cancel(obj): if skip(obj, "cancel"): return workflow = getToolByName(obj, "portal_workflow") # If it is assigned to a worksheet, unassign it. state = workflow.getInfoFor(obj, 'worksheetanalysis_review_state') if state == 'assigned': ws = obj.getWorksheet() skip(obj, "cancel", unskip=True) ws.removeAnalysis(obj) obj.reindexObject() _reindex_request(obj)
def workflow_script_cancel(self): if skip(self, "cancel"): return # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return workflow = getToolByName(self, "portal_workflow") self.reindexObject(idxs=["worksheetanalysis_review_state", ]) # If it is assigned to a worksheet, unassign it. if workflow.getInfoFor(self, 'worksheetanalysis_review_state') == 'assigned': ws = self.getBackReferences("WorksheetAnalysis")[0] skip(self, "cancel", unskip=True) ws.removeAnalysis(self)
def workflow_script_cancel(self): if skip(self, "cancel"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') self.reindexObject(idxs=["cancellation_state", ]) sample_c_state = workflow.getInfoFor(sample, 'cancellation_state') # if all sibling partitions are cancelled, cancel sample if not skip(sample, "cancel", peek=True): active = [sp for sp in sample.objectValues("SamplePartition") if workflow.getInfoFor(sp, 'cancellation_state') == 'active'] if sample_c_state == 'active' and not active: workflow.doActionFor(sample, 'cancel')
def workflow_script_cancel(self): if skip(self, "cancel"): return workflow = getToolByName(self, "portal_workflow") parts = self.objectValues("SamplePartition") self.reindexObject(idxs=["cancellation_state"]) # Cancel all partitions for sp in [sp for sp in parts if workflow.getInfoFor(sp, "cancellation_state") == "active"]: workflow.doActionFor(sp, "cancel") # cancel all ARs for this self. ars = self.getAnalysisRequests() for ar in ars: if not skip(ar, "cancel", peek=True): ar_state = workflow.getInfoFor(ar, "cancellation_state") if ar_state == "active": workflow.doActionFor(ar, "cancel")
def workflow_script_sample_due(self): if skip(self, "sample_due"): return # All associated AnalysisRequests are also transitioned for ar in self.getAnalysisRequests(): doActionFor(ar, "sample_due") ar.reindexObject()
def workflow_script_publish(self): workflow = getToolByName(self, "portal_workflow") if workflow.getInfoFor(self, 'cancellation_state', 'active') == "cancelled": return False # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "publish"): return endtime = DateTime() self.setDateAnalysisPublished(endtime) starttime = self.aq_parent.getDateReceived() starttime = starttime or self.created() service = self.getService() maxtime = service.getMaxTimeAllowed() # set the instance duration value to default values # in case of no calendars or max hours if maxtime: duration = (endtime - starttime) * 24 * 60 maxtime_delta = int(maxtime.get("hours", 0)) * 86400 maxtime_delta += int(maxtime.get("hours", 0)) * 3600 maxtime_delta += int(maxtime.get("minutes", 0)) * 60 earliness = duration - maxtime_delta else: earliness = 0 duration = 0 self.setDuration(duration) self.setEarliness(earliness) self.reindexObject()
def workflow_script_verify(self): if skip(self, "verify"): return workflow = getToolByName(self, 'portal_workflow') self.reindexObject(idxs=["review_state", ]) if not "verify all analyses" in self.REQUEST['workflow_skiplist']: # verify all analyses in this self. analyses = self.getAnalyses() for analysis in analyses: state = workflow.getInfoFor(analysis, 'review_state', '') if state != 'to_be_verified': continue if (hasattr(analysis, 'getNumberOfVerifications') and hasattr(analysis, 'getNumberOfRequiredVerifications')): # For the 'verify' transition to (effectively) take place, # we need to check if the required number of verifications # for the analysis is, at least, the number of verifications # performed previously +1 success = True revers = analysis.getNumberOfRequiredVerifications() nmvers = analysis.getNumberOfVerifications() username=getToolByName(self,'portal_membership').getAuthenticatedMember().getUserName() analysis.addVerificator(username) if revers-nmvers <= 1: success, message = doActionFor(analysis, 'verify') if not success: # If failed, delete last verificator. analysis.deleteLastVerificator() else: doActionFor(analysis, 'verify')
def workflow_script_verify(self): if skip(self, "verify"): return workflow = getToolByName(self, 'portal_workflow') self.reindexObject(idxs=["review_state", ]) if not "verify all analyses" in self.REQUEST['workflow_skiplist']: # verify all analyses in this self. analyses = self.getAnalyses() for analysis in analyses: state = workflow.getInfoFor(analysis, 'review_state', '') if state != 'to_be_verified': continue if (hasattr(analysis, 'getNumberOfVerifications') and hasattr(analysis, 'getNumberOfRequiredVerifications')): # For the 'verify' transition to (effectively) take place, # we need to check if the required number of verifications # for the analysis is, at least, the number of verifications # performed previously +1 success = True revers = analysis.getNumberOfRequiredVerifications() nmvers = analysis.getNumberOfVerifications() analysis.setNumberOfVerifications(nmvers+1) if revers-nmvers <= 1: success, message = doActionFor(analysis, 'verify') if not success: # If failed, restore to the previous number analysis.setNumberOfVerifications(nmvers) else: doActionFor(analysis, 'verify')
def workflow_script_attach(self): if skip(self, "attach"): return self.reindexObject(idxs=[ "review_state", ]) # Don't cascade. Shouldn't be attaching WSs for now (if ever). return
def workflow_script_verify(self): # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "verify"): return workflow = getToolByName(self, "portal_workflow") if workflow.getInfoFor(self, 'cancellation_state', 'active') == "cancelled": return False self.reindexObject(idxs=["review_state", ]) # If all analyses in this AR are verified # escalate the action to the parent AR ar = self.aq_parent if not skip(ar, "verify", peek=True): all_verified = True for a in ar.getAnalyses(): if a.review_state in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due", "to_be_verified"): all_verified = False break if all_verified: if not "verify all analyses" in self.REQUEST['workflow_skiplist']: self.REQUEST["workflow_skiplist"].append("verify all analyses") workflow.doActionFor(ar, "verify") # If this is on a worksheet and all it's other analyses are verified, # then verify the worksheet. ws = self.getBackReferences("WorksheetAnalysis") if ws: ws = ws[0] ws_state = workflow.getInfoFor(ws, "review_state") if ws_state == "to_be_verified" and not skip(ws, "verify", peek=True): all_verified = True for a in ws.getAnalyses(): if workflow.getInfoFor(a, "review_state") in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due", "to_be_verified", "assigned"): # Note: referenceanalyses and duplicateanalyses can # still have review_state = "assigned". all_verified = False break if all_verified: if not "verify all analyses" in self.REQUEST['workflow_skiplist']: self.REQUEST["workflow_skiplist"].append("verify all analyses") workflow.doActionFor(ws, "verify")
def workflow_script_cancel(self): if skip(self, "cancel"): return workflow = getToolByName(self, 'portal_workflow') parts = self.objectValues('SamplePartition') self.reindexObject(idxs=["cancellation_state", ]) # Cancel all partitions for sp in [sp for sp in parts if workflow.getInfoFor(sp, 'cancellation_state') == 'active']: workflow.doActionFor(sp, 'cancel') # cancel all ARs for this self. ars = self.getAnalysisRequests() for ar in ars: if not skip(ar, "cancel", peek=True): ar_state = workflow.getInfoFor(ar, 'cancellation_state') if ar_state == 'active': workflow.doActionFor(ar, 'cancel')
def workflow_script_reinstate(self): if skip(self, "reinstate"): return workflow = getToolByName(self, 'portal_workflow') parts = self.objectValues('SamplePartition') self.reindexObject(idxs=["cancellation_state", ]) # Re-instate all self partitions for sp in [sp for sp in parts if workflow.getInfoFor(sp, 'cancellation_state') == 'cancelled']: workflow.doActionFor(sp, 'reinstate') # reinstate all ARs for this self. ars = self.getAnalysisRequests() for ar in ars: if not skip(ar, "reinstate", peek=True): ar_state = workflow.getInfoFor(ar, 'cancellation_state') if ar_state == 'cancelled': workflow.doActionFor(ar, 'reinstate')
def workflow_script_receive(self): if skip(self, "receive"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') sample_state = workflow.getInfoFor(sample, 'review_state') self.setDateReceived(DateTime()) self.reindexObject(idxs=["getDateReceived", ]) # Transition our analyses analyses = self.getBackReferences('AnalysisSamplePartition') for analysis in analyses: doActionFor(analysis, "receive") # if all sibling partitions are received, promote sample if not skip(sample, "receive", peek=True): due = [sp for sp in sample.objectValues("SamplePartition") if workflow.getInfoFor(sp, 'review_state') == 'sample_due'] if sample_state == 'sample_due' and not due: doActionFor(sample, 'receive')
def workflow_script_attach(self): # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "attach"): return workflow = getToolByName(self, "portal_workflow") self.reindexObject(idxs=[ "review_state", ]) # If all analyses in this AR have been attached # escalate the action to the parent AR ar = self.aq_parent ar_state = workflow.getInfoFor(ar, "review_state") if ar_state == "attachment_due" and not skip(ar, "attach", peek=True): can_attach = True for a in ar.getAnalyses(): if a.review_state in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due",): can_attach = False break if can_attach: workflow.doActionFor(ar, "attach") # If assigned to a worksheet and all analyses on the worksheet have been attached, # then attach the worksheet. ws = self.getBackReferences('WorksheetAnalysis') if ws: ws = ws[0] ws_state = workflow.getInfoFor(ws, "review_state") if ws_state == "attachment_due" and not skip( ws, "attach", peek=True): can_attach = True for a in ws.getAnalyses(): if workflow.getInfoFor(a, "review_state") in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due", "assigned",): # Note: referenceanalyses and duplicateanalyses can still have review_state = "assigned". can_attach = False break if can_attach: workflow.doActionFor(ws, "attach")
def workflow_script_receive(self): workflow = getToolByName(self, "portal_workflow") if workflow.getInfoFor(self, 'cancellation_state', 'active') == "cancelled": return False # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "receive"): return self.updateDueDate() self.reindexObject()
def workflow_script_submit(self): """ Method triggered after a 'submit' transition for the current analysis is performed. Responsible of triggering cascade actions such as transitioning dependent analyses, transitioning worksheets, etc depending on the current analysis and other analyses that belong to the same Analysis Request or Worksheet. This function is called automatically by bika.lims.workfow.AfterTransitionEventHandler """ # The analyses that depends on this analysis to calculate their results # must be transitioned too, otherwise the user will be forced to submit # them individually. Note that the automatic transition of dependents # must only take place if all their dependencies have been submitted # already. for dependent in self.getDependents(): # If this submit transition has already been done for this # dependent analysis within the current request, continue. if skip(dependent, 'submit', peek=True): continue # TODO Workflow. All below and inside this loop should be moved to # a guard_submit_transition inside analysis # If this dependent has already been submitted, omit if dependent.getSubmittedBy(): continue # The dependent cannot be transitioned if doesn't have result if not dependent.getResult(): continue # If the calculation associated to the dependent analysis requires # the manual introduction of interim fields, do not transition the # dependent automatically, force the user to do it manually. calculation = dependent.getCalculation() if calculation and calculation.getInterimFields(): continue # All dependencies from this dependent analysis are ok? deps = dependent.getDependencies() dsub = [ dep for dep in deps if wasTransitionPerformed(dep, 'submit') ] if len(deps) == len(dsub): # The statuses of all dependencies of this dependent are ok # (at least, all of them have been submitted already) doActionFor(dependent, 'submit') # Do all the reflex rules process self._reflex_rule_process('submit') # Delegate the transition of Worksheet to base class AbstractAnalysis super(AbstractRoutineAnalysis, self).workflow_script_submit()
def workflow_script_verify(self): if skip(self, "verify"): return workflow = getToolByName(self, 'portal_workflow') self.reindexObject(idxs=["review_state", ]) if not "verify all analyses" in self.REQUEST['workflow_skiplist']: # verify all analyses in this self. analyses = self.getAnalyses() for analysis in analyses: if workflow.getInfoFor(analysis, 'review_state', '') != 'to_be_verified': continue doActionFor(analysis, "verify")
def workflow_script_attach(self): # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "attach"): return workflow = getToolByName(self, "portal_workflow") self.reindexObject(idxs=["review_state", ]) # If all analyses in this AR have been attached # escalate the action to the parent AR ar = self.aq_parent ar_state = workflow.getInfoFor(ar, "review_state") if ar_state == "attachment_due" and not skip(ar, "attach", peek=True): can_attach = True for a in ar.getAnalyses(): if a.review_state in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due",): can_attach = False break if can_attach: workflow.doActionFor(ar, "attach") # If assigned to a worksheet and all analyses on the worksheet have been attached, # then attach the worksheet. ws = self.getBackReferences('WorksheetAnalysis') if ws: ws = ws[0] ws_state = workflow.getInfoFor(ws, "review_state") if ws_state == "attachment_due" and not skip(ws, "attach", peek=True): can_attach = True for a in ws.getAnalyses(): if workflow.getInfoFor(a, "review_state") in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due", "assigned",): # Note: referenceanalyses and duplicateanalyses can still have review_state = "assigned". can_attach = False break if can_attach: workflow.doActionFor(ws, "attach")
def workflow_script_retract(self): if skip(self, "retract"): return workflow = getToolByName(self, 'portal_workflow') self.reindexObject(idxs=["review_state", ]) if not "retract all analyses" in self.REQUEST['workflow_skiplist']: # retract all analyses in this self. # (NB: don't retract if it's verified) analyses = self.getAnalyses() for analysis in analyses: if workflow.getInfoFor(analysis, 'review_state', '') not in ('attachment_due', 'to_be_verified',): continue doActionFor(analysis, 'retract')
def workflow_script_to_be_preserved(self): if skip(self, "to_be_preserved"): return workflow = getToolByName(self, "portal_workflow") parts = self.objectValues("SamplePartition") # Transition our children tbs = [sp for sp in parts if workflow.getInfoFor(sp, "review_state") == "to_be_preserved"] for sp in tbs: doActionFor(sp, "to_be_preserved") # All associated AnalysisRequests are also transitioned for ar in self.getAnalysisRequests(): doActionFor(ar, "to_be_preserved") ar.reindexObject()
def workflow_script_verify(self): if skip(self, "verify"): return workflow = getToolByName(self, 'portal_workflow') self.reindexObject(idxs=["review_state", ]) if not "verify all analyses" in self.REQUEST['workflow_skiplist']: # verify all analyses in this self. analyses = self.getAnalyses() for analysis in analyses: state = workflow.getInfoFor(analysis, 'review_state', '') if state != 'to_be_verified': continue doActionFor(analysis, "verify")
def workflow_script_verify(self): if skip(self, "verify"): return workflow = getToolByName(self, "portal_workflow") self.reindexObject(idxs=["review_state"]) if not "verify all analyses" in self.REQUEST["workflow_skiplist"]: # verify all analyses in this self. analyses = self.getAnalyses() for analysis in analyses: state = workflow.getInfoFor(analysis, "review_state", "") if state != "to_be_verified": continue doActionFor(analysis, "verify")
def workflow_script_sample(self): if skip(self, "sample"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') # Transition our analyses analyses = self.getBackReferences('AnalysisSamplePartition') for analysis in analyses: doActionFor(analysis, "sample") # if all our siblings are now up to date, promote sample and ARs. parts = sample.objectValues("SamplePartition") if parts: doActionFor(sample, "sample")
def workflow_script_retract(self): if skip(self, "retract"): return workflow = getToolByName(self, "portal_workflow") self.reindexObject(idxs=["review_state"]) if not "retract all analyses" in self.REQUEST["workflow_skiplist"]: # retract all analyses in this self. # (NB: don't retract if it's verified) analyses = self.getAnalyses() for analysis in analyses: state = workflow.getInfoFor(analysis, "review_state", "") if state not in ("attachment_due", "to_be_verified"): continue doActionFor(analysis, "retract")
def workflow_script_retract(self): if skip(self, "retract"): return workflow = getToolByName(self, 'portal_workflow') self.reindexObject(idxs=["review_state", ]) if not "retract all analyses" in self.REQUEST['workflow_skiplist']: # retract all analyses in this self. # (NB: don't retract if it's verified) analyses = self.getAnalyses() for analysis in analyses: state = workflow.getInfoFor(analysis, 'review_state', '') if state not in ('attachment_due', 'to_be_verified',): continue doActionFor(analysis, 'retract')
def workflow_script_to_be_preserved(self): if skip(self, "to_be_preserved"): return workflow = getToolByName(self, 'portal_workflow') parts = self.objectValues('SamplePartition') # Transition our children tbs = [sp for sp in parts if workflow.getInfoFor(sp, 'review_state') == 'to_be_preserved'] for sp in tbs: doActionFor(sp, "to_be_preserved") # All associated AnalysisRequests are also transitioned for ar in self.getAnalysisRequests(): doActionFor(ar, "to_be_preserved") ar.reindexObject()
def workflow_script_sample(self): if skip(self, "sample"): return workflow = getToolByName(self, "portal_workflow") parts = self.objectValues("SamplePartition") # This action can happen in the Sample UI. So we transition all # partitions that are still 'to_be_sampled' tbs = [sp for sp in parts if workflow.getInfoFor(sp, "review_state") == "to_be_sampled"] for sp in tbs: doActionFor(sp, "sample") # All associated AnalysisRequests are also transitioned for ar in self.getAnalysisRequests(): doActionFor(ar, "sample") ar.reindexObject()
def after_attach(obj): if skip(obj, "attach"): return workflow = getToolByName(obj, "portal_workflow") # If all analyses in this AR have been attached escalate the action # to the parent AR ar = obj.aq_parent state = workflow.getInfoFor(ar, "review_state") if state == "attachment_due" and not skip(ar, "attach", peek=True): can_attach = True for a in ar.getAnalyses(): if a.review_state in ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due"): can_attach = False break if can_attach: workflow.doActionFor(ar, "attach") # If assigned to a worksheet and all analyses on the worksheet have # been attached, then attach the worksheet. ws = obj.getBackReferences('WorksheetAnalysis') if ws: ws_state = workflow.getInfoFor(ws, "review_state") if ws_state == "attachment_due" \ and not skip(ws, "attach", peek=True): can_attach = True for a in ws.getAnalyses(): state = workflow.getInfoFor(a, "review_state") if state in ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due", "assigned"): can_attach = False break if can_attach: workflow.doActionFor(ws, "attach") obj.reindexObject() _reindex_request(obj)
def workflow_script_cancel(self): """ When the round is cancelled, all its associated Samples and ARs are cancelled by the system. """ if skip(self, "cancel"): return self.reindexObject(idxs=["cancellation_state", ]) # deactivate all analysis requests in this sampling round. analysis_requests = self.getAnalysisRequests() for ar in analysis_requests: ar_obj = ar.getObject() workflow = getToolByName(self, 'portal_workflow') if workflow.getInfoFor(ar_obj, 'cancellation_state') != 'cancelled': doActionFor(ar.getObject(), 'cancel') doActionFor(ar.getObject().getSample(), 'cancel')
def workflow_script_sample(self): if skip(self, "sample"): return workflow = getToolByName(self, 'portal_workflow') parts = self.objectValues('SamplePartition') # This action can happen in the Sample UI. So we transition all # partitions that are still 'to_be_sampled' tbs = [sp for sp in parts if workflow.getInfoFor(sp, 'review_state') == 'to_be_sampled'] for sp in tbs: doActionFor(sp, "sample") # All associated AnalysisRequests are also transitioned for ar in self.getAnalysisRequests(): doActionFor(ar, "sample") ar.reindexObject()
def AfterTransitionEventHandler(instance, event): # there is no transition for the state change (creation doesn't have a # 'transition') if not event.transition: return function_name = "after_{}".format(event.transition.id) if not hasattr(events, function_name): # Use default's After Transition Event Handler _after(instance, event) return # Set the request variable preventing cascade's from re-transitioning. if skip(instance, event.transition.id): return # Call the after_* function from events package getattr(events, function_name)(instance)
def workflow_script_to_be_preserved(self): if skip(self, "to_be_preserved"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') # if all our siblings are now up to date, promote sample and ARs. parts = sample.objectValues("SamplePartition") if parts: lower_states = [ 'to_be_sampled', 'to_be_preserved', ] escalate = True for part in parts: if workflow.getInfoFor(part, 'review_state') in lower_states: escalate = False if escalate: doActionFor(sample, "to_be_preserved") for ar in sample.getAnalysisRequests(): doActionFor(ar, "to_be_preserved")
def workflow_script_to_be_preserved(self): if skip(self, "to_be_preserved"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') # Transition our analyses analyses = self.getBackReferences('AnalysisSamplePartition') for analysis in analyses: doActionFor(analysis, "to_be_preserved") # if all our siblings are now up to date, promote sample and ARs. parts = sample.objectValues("SamplePartition") if parts: lower_states = ['to_be_sampled', 'to_be_preserved', ] escalate = True for part in parts: if workflow.getInfoFor(part, 'review_state') in lower_states: escalate = False if escalate: doActionFor(sample, "to_be_preserved") for ar in sample.getAnalysisRequests(): doActionFor(ar, "to_be_preserved")
def workflow_script_sample_due(self): if skip(self, "sample_due"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') # Transition our analyses analyses = self.getBackReferences('AnalysisSamplePartition') for analysis in analyses: doActionFor(analysis, "sample_due") # if all our siblings are now up to date, promote sample and ARs. parts = sample.objectValues("SamplePartition") if parts: lower_states = ['to_be_preserved', ] escalate = True for part in parts: pstate = workflow.getInfoFor(part, 'review_state') if pstate in lower_states: escalate = False if escalate: doActionFor(sample, "sample_due") for ar in sample.getAnalysisRequests(): doActionFor(ar, "sample_due")
def workflow_script_unassign(self): # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "unassign"): return workflow = getToolByName(self, "portal_workflow") self.reindexObject(idxs=[ "worksheetanalysis_review_state", ]) rc = getToolByName(self, REFERENCE_CATALOG) wsUID = self.REQUEST["context_uid"] ws = rc.lookupObject(wsUID) # Escalate the action to the parent AR if it is assigned # Note: AR adds itself to the skiplist so we have to take it off again # to allow multiple promotions/demotions (maybe by more than one instance). if workflow.getInfoFor(self, "worksheetanalysis_review_state") == "assigned": workflow.doActionFor(self, "unassign") skip(self, "unassign", unskip=True) # If it has been duplicated on the worksheet, delete the duplicates. dups = self.getBackReferences("DuplicateAnalysisAnalysis") for dup in dups: ws.removeAnalysis(dup) # May need to promote the Worksheet's review_state # if all other analyses are at a higher state than this one was. # (or maybe retract it if there are no analyses left) # Note: duplicates, controls and blanks have 'assigned' as a review_state. can_submit = True can_attach = True can_verify = True ws_empty = True for a in ws.getAnalyses(): ws_empty = False a_state = workflow.getInfoFor(a, "review_state") if a_state in \ ("to_be_sampled", "to_be_preserved", "assigned", "sample_due", "sample_received",): can_submit = False else: if not ws.getAnalyst(): can_submit = False if a_state in \ ("to_be_sampled", "to_be_preserved", "assigned", "sample_due", "sample_received", "attachment_due",): can_attach = False if a_state in \ ("to_be_sampled", "to_be_preserved", "assigned", "sample_due", "sample_received", "attachment_due", "to_be_verified",): can_verify = False if not ws_empty: # Note: WS adds itself to the skiplist so we have to take it off again # to allow multiple promotions (maybe by more than one instance). if can_submit and workflow.getInfoFor(ws, "review_state") == "open": workflow.doActionFor(ws, "submit") skip(ws, 'unassign', unskip=True) if can_attach and workflow.getInfoFor( ws, "review_state") == "attachment_due": workflow.doActionFor(ws, "attach") skip(ws, 'unassign', unskip=True) if can_verify and workflow.getInfoFor( ws, "review_state") == "to_be_verified": self.REQUEST['workflow_skiplist'].append("verify all analyses") workflow.doActionFor(ws, "verify") skip(ws, 'unassign', unskip=True) else: if workflow.getInfoFor(ws, "review_state") != "open": workflow.doActionFor(ws, "retract") skip(ws, "retract", unskip=True)
def workflow_script_unassign(self): # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "unassign"): return workflow = getToolByName(self, "portal_workflow") self.reindexObject(idxs=["worksheetanalysis_review_state", ]) rc = getToolByName(self, REFERENCE_CATALOG) wsUID = self.REQUEST["context_uid"] ws = rc.lookupObject(wsUID) # Escalate the action to the parent AR if it is assigned # Note: AR adds itself to the skiplist so we have to take it off again # to allow multiple promotions/demotions (maybe by more than one instance). if workflow.getInfoFor(self, "worksheetanalysis_review_state") == "assigned": workflow.doActionFor(self, "unassign") skip(self, "unassign", unskip=True) # If it has been duplicated on the worksheet, delete the duplicates. dups = self.getBackReferences("DuplicateAnalysisAnalysis") for dup in dups: ws.removeAnalysis(dup) # May need to promote the Worksheet's review_state # if all other analyses are at a higher state than this one was. # (or maybe retract it if there are no analyses left) # Note: duplicates, controls and blanks have 'assigned' as a review_state. can_submit = True can_attach = True can_verify = True ws_empty = True for a in ws.getAnalyses(): ws_empty = False a_state = workflow.getInfoFor(a, "review_state") if a_state in \ ("to_be_sampled", "to_be_preserved", "assigned", "sample_due", "sample_received",): can_submit = False else: if not ws.getAnalyst(): can_submit = False if a_state in \ ("to_be_sampled", "to_be_preserved", "assigned", "sample_due", "sample_received", "attachment_due",): can_attach = False if a_state in \ ("to_be_sampled", "to_be_preserved", "assigned", "sample_due", "sample_received", "attachment_due", "to_be_verified",): can_verify = False if not ws_empty: # Note: WS adds itself to the skiplist so we have to take it off again # to allow multiple promotions (maybe by more than one instance). if can_submit and workflow.getInfoFor(ws, "review_state") == "open": workflow.doActionFor(ws, "submit") skip(ws, 'unassign', unskip=True) if can_attach and workflow.getInfoFor(ws, "review_state") == "attachment_due": workflow.doActionFor(ws, "attach") skip(ws, 'unassign', unskip=True) if can_verify and workflow.getInfoFor(ws, "review_state") == "to_be_verified": self.REQUEST['workflow_skiplist'].append("verify all analyses") workflow.doActionFor(ws, "verify") skip(ws, 'unassign', unskip=True) else: if workflow.getInfoFor(ws, "review_state") != "open": workflow.doActionFor(ws, "retract") skip(ws, "retract", unskip=True)
def workflow_script_submit(self): # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "submit"): return workflow = getToolByName(self, "portal_workflow") if workflow.getInfoFor(self, 'cancellation_state', 'active') == "cancelled": return False ar = self.aq_parent self.reindexObject(idxs=["review_state", ]) # Dependencies are submitted already, ignore them. #------------------------------------------------- # Submit our dependents # Need to check for result and status of dependencies first dependents = self.getDependents() for dependent in dependents: if not skip(dependent, "submit", peek=True): can_submit = True if not dependent.getResult(): can_submit = False else: interim_fields = False service = dependent.getService() calculation = service.getCalculation() if calculation: interim_fields = calculation.getInterimFields() if interim_fields: can_submit = False if can_submit: dependencies = dependent.getDependencies() for dependency in dependencies: if workflow.getInfoFor(dependency, "review_state") in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received",): can_submit = False if can_submit: workflow.doActionFor(dependent, "submit") # If all analyses in this AR have been submitted # escalate the action to the parent AR if not skip(ar, "submit", peek=True): all_submitted = True for a in ar.getAnalyses(): if a.review_state in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received",): all_submitted = False break if all_submitted: workflow.doActionFor(ar, "submit") # If assigned to a worksheet and all analyses on the worksheet have been submitted, # then submit the worksheet. ws = self.getBackReferences("WorksheetAnalysis") if ws: ws = ws[0] # if the worksheet analyst is not assigned, the worksheet can't be transitioned. if ws.getAnalyst() and not skip(ws, "submit", peek=True): all_submitted = True for a in ws.getAnalyses(): if workflow.getInfoFor(a, "review_state") in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "assigned",): # Note: referenceanalyses and duplicateanalyses can still have review_state = "assigned". all_submitted = False break if all_submitted: workflow.doActionFor(ws, "submit") # If no problem with attachments, do 'attach' action for this instance. can_attach = True if not self.getAttachment(): service = self.getService() if service.getAttachmentOption() == "r": can_attach = False if can_attach: dependencies = self.getDependencies() for dependency in dependencies: if workflow.getInfoFor(dependency, "review_state") in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due",): can_attach = False if can_attach: try: workflow.doActionFor(self, "attach") except WorkflowException: pass
def workflow_script_retract(self): # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "retract"): return ar = self.aq_parent workflow = getToolByName(self, "portal_workflow") if workflow.getInfoFor(self, 'cancellation_state', 'active') == "cancelled": return False # We'll assign the new analysis to this same worksheet, if any. ws = self.getBackReferences("WorksheetAnalysis") if ws: ws = ws[0] # Rename the analysis to make way for it's successor. # Support multiple retractions by renaming to *-0, *-1, etc parent = self.aq_parent analyses = [x for x in parent.objectValues("Analysis") if x.getId().startswith(self.id)] kw = self.getKeyword() parent.manage_renameObject(kw, "{0}-{1}".format(kw, len(analyses))) # Create new analysis and copy values from retracted analysis = _createObjectByType("Analysis", parent, kw) analysis.edit( Service=self.getService(), Calculation=self.getCalculation(), InterimFields=self.getInterimFields(), Result=self.getResult(), ResultDM=self.getResultDM(), Retested=True, # True MaxTimeAllowed=self.getMaxTimeAllowed(), DueDate=self.getDueDate(), Duration=self.getDuration(), ReportDryMatter=self.getReportDryMatter(), Analyst=self.getAnalyst(), Instrument=self.getInstrument(), SamplePartition=self.getSamplePartition()) analysis.unmarkCreationFlag() # zope.event.notify(ObjectInitializedEvent(analysis)) changeWorkflowState(analysis, "bika_analysis_workflow", "sample_received") if ws: ws.addAnalysis(analysis) analysis.reindexObject() # retract our dependencies if not "retract all dependencies" in self.REQUEST["workflow_skiplist"]: for dependency in self.getDependencies(): if not skip(dependency, "retract", peek=True): if workflow.getInfoFor(dependency, "review_state") in ("attachment_due", "to_be_verified",): # (NB: don"t retract if it"s verified) workflow.doActionFor(dependency, "retract") # Retract our dependents for dep in self.getDependents(): if not skip(dep, "retract", peek=True): if workflow.getInfoFor(dep, "review_state") not in ("sample_received", "retracted"): self.REQUEST["workflow_skiplist"].append("retract all dependencies") # just return to "received" state, no cascade workflow.doActionFor(dep, 'retract') self.REQUEST["workflow_skiplist"].remove("retract all dependencies") # Escalate action to the parent AR if not skip(ar, "retract", peek=True): if workflow.getInfoFor(ar, "review_state") == "sample_received": skip(ar, "retract") else: if not "retract all analyses" in self.REQUEST["workflow_skiplist"]: self.REQUEST["workflow_skiplist"].append("retract all analyses") workflow.doActionFor(ar, "retract") # Escalate action to the Worksheet (if it's on one). ws = self.getBackReferences("WorksheetAnalysis") if ws: ws = ws[0] if not skip(ws, "retract", peek=True): if workflow.getInfoFor(ws, "review_state") == "open": skip(ws, "retract") else: if not "retract all analyses" in self.REQUEST['workflow_skiplist']: self.REQUEST["workflow_skiplist"].append("retract all analyses") workflow.doActionFor(ws, "retract") # Add to worksheet Analyses analyses = list(ws.getAnalyses()) analyses += [analysis, ] ws.setAnalyses(analyses) # Add to worksheet layout layout = ws.getLayout() pos = [x["position"] for x in layout if x["analysis_uid"] == self.UID()][0] slot = {"position": pos, "analysis_uid": analysis.UID(), "container_uid": analysis.aq_parent.UID(), "type": "a"} layout.append(slot) ws.setLayout(layout)
def workflow_script_receive(self, state_info): skip(self, 'receive')
def workflow_script_retract(self): # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "retract"): return ar = self.aq_parent workflow = getToolByName(self, "portal_workflow") if workflow.getInfoFor(self, 'cancellation_state', 'active') == "cancelled": return False # We'll assign the new analysis to this same worksheet, if any. ws = self.getBackReferences("WorksheetAnalysis") if ws: ws = ws[0] # Rename the analysis to make way for it's successor. # Support multiple retractions by renaming to *-0, *-1, etc parent = self.aq_parent analyses = [ x for x in parent.objectValues("Analysis") if x.getId().startswith(self.id) ] kw = self.getKeyword() # LIMS-1290 - Analyst must be able to retract, which creates a new Analysis. parent._verifyObjectPaste = str # I cancel the permission check with this. parent.manage_renameObject(kw, "{0}-{1}".format(kw, len(analyses))) delattr(parent, '_verifyObjectPaste') # Create new analysis and copy values from retracted analysis = _createObjectByType("Analysis", parent, kw) analysis.edit( Service=self.getService(), Calculation=self.getCalculation(), InterimFields=self.getInterimFields(), Result=self.getResult(), ResultDM=self.getResultDM(), Retested=True, # True MaxTimeAllowed=self.getMaxTimeAllowed(), DueDate=self.getDueDate(), Duration=self.getDuration(), ReportDryMatter=self.getReportDryMatter(), Analyst=self.getAnalyst(), Instrument=self.getInstrument(), SamplePartition=self.getSamplePartition()) analysis.unmarkCreationFlag() # We must bring the specification across manually. analysis.specification = self.specification # zope.event.notify(ObjectInitializedEvent(analysis)) changeWorkflowState(analysis, "bika_analysis_workflow", "sample_received") if ws: ws.addAnalysis(analysis) analysis.reindexObject() # retract our dependencies if not "retract all dependencies" in self.REQUEST["workflow_skiplist"]: for dependency in self.getDependencies(): if not skip(dependency, "retract", peek=True): if workflow.getInfoFor(dependency, "review_state") in ( "attachment_due", "to_be_verified", ): # (NB: don"t retract if it"s verified) workflow.doActionFor(dependency, "retract") # Retract our dependents for dep in self.getDependents(): if not skip(dep, "retract", peek=True): if workflow.getInfoFor( dep, "review_state") not in ("sample_received", "retracted"): self.REQUEST["workflow_skiplist"].append( "retract all dependencies") # just return to "received" state, no cascade workflow.doActionFor(dep, 'retract') self.REQUEST["workflow_skiplist"].remove( "retract all dependencies") # Escalate action to the parent AR if not skip(ar, "retract", peek=True): if workflow.getInfoFor(ar, "review_state") == "sample_received": skip(ar, "retract") else: if not "retract all analyses" in self.REQUEST[ "workflow_skiplist"]: self.REQUEST["workflow_skiplist"].append( "retract all analyses") workflow.doActionFor(ar, "retract") # Escalate action to the Worksheet (if it's on one). ws = self.getBackReferences("WorksheetAnalysis") if ws: ws = ws[0] if not skip(ws, "retract", peek=True): if workflow.getInfoFor(ws, "review_state") == "open": skip(ws, "retract") else: if not "retract all analyses" in self.REQUEST[ 'workflow_skiplist']: self.REQUEST["workflow_skiplist"].append( "retract all analyses") try: workflow.doActionFor(ws, "retract") except WorkflowException: pass # Add to worksheet Analyses analyses = list(ws.getAnalyses()) analyses += [ analysis, ] ws.setAnalyses(analyses) # Add to worksheet layout layout = ws.getLayout() pos = [ x["position"] for x in layout if x["analysis_uid"] == self.UID() ][0] slot = { "position": pos, "analysis_uid": analysis.UID(), "container_uid": analysis.aq_parent.UID(), "type": "a" } layout.append(slot) ws.setLayout(layout)
def workflow_script_reject(self): """Copy real analyses to RejectAnalysis, with link to real create a new worksheet, with the original analyses, and new duplicates and references to match the rejected worksheet. """ if skip(self, "reject"): return utils = getToolByName(self, 'plone_utils') workflow = self.portal_workflow def copy_src_fields_to_dst(src, dst): # These will be ignored when copying field values between analyses ignore_fields = ['UID', 'id', 'title', 'allowDiscussion', 'subject', 'description', 'location', 'contributors', 'creators', 'effectiveDate', 'expirationDate', 'language', 'rights', 'creation_date', 'modification_date', 'Layout', # ws 'Analyses', # ws ] fields = src.Schema().fields() for field in fields: fieldname = field.getName() if fieldname in ignore_fields: continue getter = getattr(src, 'get'+fieldname, src.Schema().getField(fieldname).getAccessor(src)) setter = getattr(dst, 'set'+fieldname, dst.Schema().getField(fieldname).getMutator(dst)) if getter is None or setter is None: # ComputedField continue setter(getter()) analysis_positions = {} for item in self.getLayout(): analysis_positions[item['analysis_uid']] = item['position'] old_layout = [] new_layout = [] # New worksheet worksheets = self.aq_parent new_ws = _createObjectByType('Worksheet', worksheets, tmpID()) new_ws.unmarkCreationFlag() new_ws_id = renameAfterCreation(new_ws) copy_src_fields_to_dst(self, new_ws) new_ws.edit( Number = new_ws_id, Remarks = self.getRemarks() ) # Objects are being created inside other contexts, but we want their # workflow handlers to be aware of which worksheet this is occurring in. # We save the worksheet in request['context_uid']. # We reset it again below.... be very sure that this is set to the # UID of the containing worksheet before invoking any transitions on # analyses. self.REQUEST['context_uid'] = new_ws.UID() # loop all analyses analyses = self.getAnalyses() new_ws_analyses = [] old_ws_analyses = [] for analysis in analyses: # Skip published or verified analyses review_state = workflow.getInfoFor(analysis, 'review_state', '') if review_state in ['published', 'verified', 'retracted']: old_ws_analyses.append(analysis.UID()) old_layout.append({'position': position, 'type':'a', 'analysis_uid':analysis.UID(), 'container_uid':analysis.aq_parent.UID()}) continue # Normal analyses: # - Create matching RejectAnalysis inside old WS # - Link analysis to new WS in same position # - Copy all field values # - Clear analysis result, and set Retested flag if analysis.portal_type == 'Analysis': reject = _createObjectByType('RejectAnalysis', self, tmpID()) reject.unmarkCreationFlag() reject_id = renameAfterCreation(reject) copy_src_fields_to_dst(analysis, reject) reject.setAnalysis(analysis) reject.reindexObject() analysis.edit( Result = None, Retested = True, ) analysis.reindexObject() position = analysis_positions[analysis.UID()] old_ws_analyses.append(reject.UID()) old_layout.append({'position': position, 'type':'r', 'analysis_uid':reject.UID(), 'container_uid':self.UID()}) new_ws_analyses.append(analysis.UID()) new_layout.append({'position': position, 'type':'a', 'analysis_uid':analysis.UID(), 'container_uid':analysis.aq_parent.UID()}) # Reference analyses # - Create a new reference analysis in the new worksheet # - Transition the original analysis to 'rejected' state if analysis.portal_type == 'ReferenceAnalysis': service_uid = analysis.getService().UID() reference = analysis.aq_parent reference_type = analysis.getReferenceType() new_analysis_uid = reference.addReferenceAnalysis(service_uid, reference_type) position = analysis_positions[analysis.UID()] old_ws_analyses.append(analysis.UID()) old_layout.append({'position': position, 'type':reference_type, 'analysis_uid':analysis.UID(), 'container_uid':reference.UID()}) new_ws_analyses.append(new_analysis_uid) new_layout.append({'position': position, 'type':reference_type, 'analysis_uid':new_analysis_uid, 'container_uid':reference.UID()}) workflow.doActionFor(analysis, 'reject') new_reference = reference.uid_catalog(UID=new_analysis_uid)[0].getObject() workflow.doActionFor(new_reference, 'assign') analysis.reindexObject() # Duplicate analyses # - Create a new duplicate inside the new worksheet # - Transition the original analysis to 'rejected' state if analysis.portal_type == 'DuplicateAnalysis': src_analysis = analysis.getAnalysis() ar = src_analysis.aq_parent service = src_analysis.getService() duplicate_id = new_ws.generateUniqueId('DuplicateAnalysis') new_duplicate = _createObjectByType('DuplicateAnalysis', new_ws, duplicate_id) new_duplicate.unmarkCreationFlag() copy_src_fields_to_dst(analysis, new_duplicate) workflow.doActionFor(new_duplicate, 'assign') new_duplicate.reindexObject() position = analysis_positions[analysis.UID()] old_ws_analyses.append(analysis.UID()) old_layout.append({'position': position, 'type':'d', 'analysis_uid':analysis.UID(), 'container_uid':self.UID()}) new_ws_analyses.append(new_duplicate.UID()) new_layout.append({'position': position, 'type':'d', 'analysis_uid':new_duplicate.UID(), 'container_uid':new_ws.UID()}) workflow.doActionFor(analysis, 'reject') analysis.reindexObject() new_ws.setAnalyses(new_ws_analyses) new_ws.setLayout(new_layout) new_ws.replaces_rejected_worksheet = self.UID() for analysis in new_ws.getAnalyses(): review_state = workflow.getInfoFor(analysis, 'review_state', '') if review_state == 'to_be_verified': changeWorkflowState(analysis, "bika_analysis_workflow", "sample_received") self.REQUEST['context_uid'] = self.UID() self.setLayout(old_layout) self.setAnalyses(old_ws_analyses) self.replaced_by = new_ws.UID()
def workflow_script_reject(self): """Copy real analyses to RejectAnalysis, with link to real create a new worksheet, with the original analyses, and new duplicates and references to match the rejected worksheet. """ if skip(self, "reject"): return utils = getToolByName(self, "plone_utils") workflow = self.portal_workflow def copy_src_fields_to_dst(src, dst): # These will be ignored when copying field values between analyses ignore_fields = [ "UID", "id", "title", "allowDiscussion", "subject", "description", "location", "contributors", "creators", "effectiveDate", "expirationDate", "language", "rights", "creation_date", "modification_date", "Layout", # ws "Analyses", # ws ] fields = src.Schema().fields() for field in fields: fieldname = field.getName() if fieldname in ignore_fields: continue getter = getattr(src, "get" + fieldname, src.Schema().getField(fieldname).getAccessor(src)) setter = getattr(dst, "set" + fieldname, dst.Schema().getField(fieldname).getMutator(dst)) if getter is None or setter is None: # ComputedField continue setter(getter()) analysis_positions = {} for item in self.getLayout(): analysis_positions[item["analysis_uid"]] = item["position"] old_layout = [] new_layout = [] # New worksheet worksheets = self.aq_parent new_ws = _createObjectByType("Worksheet", worksheets, tmpID()) new_ws.unmarkCreationFlag() new_ws_id = renameAfterCreation(new_ws) copy_src_fields_to_dst(self, new_ws) new_ws.edit(Number=new_ws_id, Remarks=self.getRemarks()) # Objects are being created inside other contexts, but we want their # workflow handlers to be aware of which worksheet this is occurring in. # We save the worksheet in request['context_uid']. # We reset it again below.... be very sure that this is set to the # UID of the containing worksheet before invoking any transitions on # analyses. self.REQUEST["context_uid"] = new_ws.UID() # loop all analyses analyses = self.getAnalyses() new_ws_analyses = [] old_ws_analyses = [] for analysis in analyses: # Skip published or verified analyses review_state = workflow.getInfoFor(analysis, "review_state", "") if review_state in ["published", "verified", "retracted"]: old_ws_analyses.append(analysis.UID()) old_layout.append( { "position": position, "type": "a", "analysis_uid": analysis.UID(), "container_uid": analysis.aq_parent.UID(), } ) continue # Normal analyses: # - Create matching RejectAnalysis inside old WS # - Link analysis to new WS in same position # - Copy all field values # - Clear analysis result, and set Retested flag if analysis.portal_type == "Analysis": reject = _createObjectByType("RejectAnalysis", self, tmpID()) reject.unmarkCreationFlag() reject_id = renameAfterCreation(reject) copy_src_fields_to_dst(analysis, reject) reject.setAnalysis(analysis) reject.reindexObject() analysis.edit(Result=None, Retested=True) analysis.reindexObject() position = analysis_positions[analysis.UID()] old_ws_analyses.append(reject.UID()) old_layout.append( {"position": position, "type": "r", "analysis_uid": reject.UID(), "container_uid": self.UID()} ) new_ws_analyses.append(analysis.UID()) new_layout.append( { "position": position, "type": "a", "analysis_uid": analysis.UID(), "container_uid": analysis.aq_parent.UID(), } ) # Reference analyses # - Create a new reference analysis in the new worksheet # - Transition the original analysis to 'rejected' state if analysis.portal_type == "ReferenceAnalysis": service_uid = analysis.getService().UID() reference = analysis.aq_parent reference_type = analysis.getReferenceType() new_analysis_uid = reference.addReferenceAnalysis(service_uid, reference_type) position = analysis_positions[analysis.UID()] old_ws_analyses.append(analysis.UID()) old_layout.append( { "position": position, "type": reference_type, "analysis_uid": analysis.UID(), "container_uid": reference.UID(), } ) new_ws_analyses.append(new_analysis_uid) new_layout.append( { "position": position, "type": reference_type, "analysis_uid": new_analysis_uid, "container_uid": reference.UID(), } ) workflow.doActionFor(analysis, "reject") new_reference = reference.uid_catalog(UID=new_analysis_uid)[0].getObject() workflow.doActionFor(new_reference, "assign") analysis.reindexObject() # Duplicate analyses # - Create a new duplicate inside the new worksheet # - Transition the original analysis to 'rejected' state if analysis.portal_type == "DuplicateAnalysis": src_analysis = analysis.getAnalysis() ar = src_analysis.aq_parent service = src_analysis.getService() duplicate_id = new_ws.generateUniqueId("DuplicateAnalysis") new_duplicate = _createObjectByType("DuplicateAnalysis", new_ws, duplicate_id) new_duplicate.unmarkCreationFlag() copy_src_fields_to_dst(analysis, new_duplicate) workflow.doActionFor(new_duplicate, "assign") new_duplicate.reindexObject() position = analysis_positions[analysis.UID()] old_ws_analyses.append(analysis.UID()) old_layout.append( {"position": position, "type": "d", "analysis_uid": analysis.UID(), "container_uid": self.UID()} ) new_ws_analyses.append(new_duplicate.UID()) new_layout.append( { "position": position, "type": "d", "analysis_uid": new_duplicate.UID(), "container_uid": new_ws.UID(), } ) workflow.doActionFor(analysis, "reject") analysis.reindexObject() new_ws.setAnalyses(new_ws_analyses) new_ws.setLayout(new_layout) new_ws.replaces_rejected_worksheet = self.UID() for analysis in new_ws.getAnalyses(): review_state = workflow.getInfoFor(analysis, "review_state", "") if review_state == "to_be_verified": changeWorkflowState(analysis, "bika_analysis_workflow", "sample_received") self.REQUEST["context_uid"] = self.UID() self.setLayout(old_layout) self.setAnalyses(old_ws_analyses) self.replaced_by = new_ws.UID()
def workflow_script_open(self, state_info): skip(self, 'open') # reset everything and return to open state self.setDateReceived(None) self.reindexObject(idxs = ["getDateReceived", ])
def workflow_script_attach(self): if skip(self, "attach"): return self.reindexObject(idxs=["review_state"]) # Don't cascade. Shouldn't be attaching WSs for now (if ever). return
def workflow_script_submit(self): # DuplicateAnalysis doesn't have analysis_workflow. if self.portal_type == "DuplicateAnalysis": return if skip(self, "submit"): return workflow = getToolByName(self, "portal_workflow") if workflow.getInfoFor(self, 'cancellation_state', 'active') == "cancelled": return False ar = self.aq_parent self.reindexObject(idxs=[ "review_state", ]) # Dependencies are submitted already, ignore them. #------------------------------------------------- # Submit our dependents # Need to check for result and status of dependencies first dependents = self.getDependents() for dependent in dependents: if not skip(dependent, "submit", peek=True): can_submit = True if not dependent.getResult(): can_submit = False else: interim_fields = False service = dependent.getService() calculation = service.getCalculation() if calculation: interim_fields = calculation.getInterimFields() if interim_fields: can_submit = False if can_submit: dependencies = dependent.getDependencies() for dependency in dependencies: if workflow.getInfoFor(dependency, "review_state") in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received",): can_submit = False if can_submit: workflow.doActionFor(dependent, "submit") # If all analyses in this AR have been submitted # escalate the action to the parent AR if not skip(ar, "submit", peek=True): all_submitted = True for a in ar.getAnalyses(): if a.review_state in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received",): all_submitted = False break if all_submitted: workflow.doActionFor(ar, "submit") # If assigned to a worksheet and all analyses on the worksheet have been submitted, # then submit the worksheet. ws = self.getBackReferences("WorksheetAnalysis") if ws: ws = ws[0] # if the worksheet analyst is not assigned, the worksheet can't be transitioned. if ws.getAnalyst() and not skip(ws, "submit", peek=True): all_submitted = True for a in ws.getAnalyses(): if workflow.getInfoFor(a, "review_state") in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "assigned",): # Note: referenceanalyses and duplicateanalyses can still have review_state = "assigned". all_submitted = False break if all_submitted: workflow.doActionFor(ws, "submit") # If no problem with attachments, do 'attach' action for this instance. can_attach = True if not self.getAttachment(): service = self.getService() if service.getAttachmentOption() == "r": can_attach = False if can_attach: dependencies = self.getDependencies() for dependency in dependencies: if workflow.getInfoFor(dependency, "review_state") in \ ("to_be_sampled", "to_be_preserved", "sample_due", "sample_received", "attachment_due",): can_attach = False if can_attach: try: workflow.doActionFor(self, "attach") except WorkflowException: pass
def workflow_script_submit(self): """ Method triggered after a 'submit' transition for the current analysis is performed. Responsible of triggering cascade actions such as transitioning dependent analyses, transitioning worksheets, etc depending on the current analysis and other analyses that belong to the same Analysis Request or Worksheet. This function is called automatically by bika.lims.workfow.AfterTransitionEventHandler """ # The analyses that depends on this analysis to calculate their results # must be transitioned too, otherwise the user will be forced to submit # them individually. Note that the automatic transition of dependents # must only take place if all their dependencies have been submitted # already. for dependent in self.getDependents(): # If this submit transition has already been done for this # dependent analysis within the current request, continue. if skip(dependent, 'submit', peek=True): continue # TODO Workflow. All below and inside this loop should be moved to # a guard_submit_transition inside analysis # If this dependent has already been submitted, omit if dependent.getSubmittedBy(): continue # The dependent cannot be transitioned if doesn't have result if not dependent.getResult(): continue # If the calculation associated to the dependent analysis requires # the manual introduction of interim fields, do not transition the # dependent automatically, force the user to do it manually. calculation = dependent.getCalculation() if calculation and calculation.getInterimFields(): continue # All dependencies from this dependent analysis are ok? deps = dependent.getDependencies() dsub = [dep for dep in deps if wasTransitionPerformed(dep, 'submit')] if len(deps) == len(dsub): # The statuses of all dependencies of this dependent are ok # (at least, all of them have been submitted already) doActionFor(dependent, 'submit') # Do all the reflex rules process self._reflex_rule_process('submit') # If all analyses from the Analysis Request to which this Analysis # belongs have been submitted, then promote the action to the parent # Analysis Request ar = self.getRequest() ans = [an.getObject() for an in ar.getAnalyses()] anssub = [an for an in ans if wasTransitionPerformed(an, 'submit')] if len(ans) == len(anssub): doActionFor(ar, 'submit') # Delegate the transition of Worksheet to base class AbstractAnalysis super(AbstractRoutineAnalysis, self).workflow_script_submit()