def after_submit(analysis): """Method triggered after a 'submit' transition for the analysis passed in is performed. Promotes the submit transition to the Worksheet to which the analysis belongs to. Note that for the worksheet there is already a guard that assures the transition to the worksheet will only be performed if all analyses within the worksheet have already been transitioned. This function is called automatically by bika.lims.workfow.AfterTransitionEventHandler """ # Promote to analyses this analysis depends on promote_to_dependencies(analysis, "submit") # TODO: REFLEX TO REMOVE # Do all the reflex rules process if IRequestAnalysis.providedBy(analysis): analysis._reflex_rule_process('submit') # Promote transition to worksheet ws = analysis.getWorksheet() if ws: doActionFor(ws, 'submit') # Promote transition to Analysis Request if IRequestAnalysis.providedBy(analysis): doActionFor(analysis.getRequest(), 'submit') reindex_request(analysis)
def after_verify(analysis): """ Method triggered after a 'verify' transition for the analysis passed in is performed. Promotes the transition to the Analysis Request and to Worksheet (if the analysis is assigned to any) This function is called automatically by bika.lims.workfow.AfterTransitionEventHandler """ # Promote to analyses this analysis depends on promote_to_dependencies(analysis, "verify") # TODO: REFLEX TO REMOVE # Do all the reflex rules process if IRequestAnalysis.providedBy(analysis): analysis._reflex_rule_process('verify') # Promote transition to worksheet ws = analysis.getWorksheet() if ws: doActionFor(ws, 'verify') # Promote transition to Analysis Request if IRequestAnalysis.providedBy(analysis): doActionFor(analysis.getRequest(), 'verify') reindex_request(analysis)
def _reflex_rule_process(self, wf_action): """This function does all the reflex rule process. :param wf_action: is a string containing the workflow action triggered """ if not IRequestAnalysis.providedBy(self): # Only routine analyses (assigned to a Request) are supported logger.warn("Only IRequestAnalysis are supported in reflex testing") return # Check out if the analysis has any reflex rule bound to it. # First we have get the analysis' method because the Reflex Rule # objects are related to a method. a_method = self.getMethod() if not a_method: return # After getting the analysis' method we have to get all Reflex Rules # related to that method. all_rrs = a_method.getBackReferences('ReflexTestingScenarioMethod') if not all_rrs: return # Once we have all the Reflex Rules with the same method as the # analysis has, it is time to get the rules that are bound to the # same analysis service that is using the analysis. for rule in all_rrs: if not api.is_active(rule): continue # Getting the rules to be done from the reflex rule taking # in consideration the analysis service, the result and # the state change action_row = rule.getActionReflexRules(self, wf_action) # Once we have the rules, the system has to execute its # instructions if the result has the expected result. doReflexRuleAction(self, action_row)
def getRequest(self): """Return the AR to which this is linked there is a short time between creation and linking when it is not linked """ # Attachment field in AnalysisRequest is still a ReferenceField, not # an UIDReferenceField yet. tool = getToolByName(self, REFERENCE_CATALOG) uids = [uid for uid in tool.getBackReferences(self, 'AnalysisRequestAttachment')] if len(uids) > 1: logger.warn("Attachment assigned to more than one Analysis Request." "This should never happen!. The first Analysis Request" "will be returned.") if len(uids) > 0: reference = uids[0] ar = tool.lookupObject(reference.sourceUID) return ar # This Attachment is not linked directly to an Analysis Request, but # probably linked to an Analysis, so try to get the Analysis Request # from there. analysis = self.getAnalysis() if IRequestAnalysis.providedBy(analysis): return analysis.getRequest() return None
def after_retest(analysis): """Function triggered before 'retest' transition takes place. Creates a copy of the current analysis """ # When an analysis is retested, it automatically transitions to verified, # so we need to mark the analysis as such alsoProvides(analysis, IVerified) def verify_and_retest(relative): if not ISubmitted.providedBy(relative): # Result not yet submitted, no need to create a retest return # Apply the transition manually, but only if analysis can be verified doActionFor(relative, "verify") # Create the retest create_retest(relative) # Retest and auto-verify relatives, from bottom to top relatives = list(reversed(analysis.getDependents(recursive=True))) relatives.extend(analysis.getDependencies(recursive=True)) map(verify_and_retest, relatives) # Create the retest create_retest(analysis) # Try to rollback the Analysis Request if IRequestAnalysis.providedBy(analysis): doActionFor(analysis.getRequest(), "rollback_to_receive") reindex_request(analysis)
def is_result_range_compliant(analysis): """Returns whether the result range from the analysis matches with the result range for the service counterpart defined in the Sample """ if not IRequestAnalysis.providedBy(analysis): return True if IDuplicateAnalysis.providedBy(analysis): # Does not make sense to apply compliance to a duplicate, cause its # valid range depends on the result of the original analysis return True rr = analysis.getResultsRange() service_uid = rr.get("uid", None) if not api.is_uid(service_uid): return True # Compare with Sample sample = analysis.getRequest() # If no Specification is set, assume is compliant specification = sample.getRawSpecification() if not specification: return True # Compare with the Specification that was initially set to the Sample sample_rr = sample.getResultsRange(search_by=service_uid) if not sample_rr: # This service is not defined in Sample's ResultsRange, we # assume this *does not* break the compliance return True return rr == sample_rr
def getRequest(self): """Return the AR to which this is linked there is a short time between creation and linking when it is not linked """ # Attachment field in AnalysisRequest is still a ReferenceField, not # an UIDReferenceField yet. tool = getToolByName(self, REFERENCE_CATALOG) uids = [ uid for uid in tool.getBackReferences(self, 'AnalysisRequestAttachment') ] if len(uids) > 1: logger.warn( "Attachment assigned to more than one Analysis Request." "This should never happen!. The first Analysis Request" "will be returned.") if len(uids) > 0: reference = uids[0] ar = tool.lookupObject(reference.sourceUID) return ar # This Attachment is not linked directly to an Analysis Request, but # probably linked to an Analysis, so try to get the Analysis Request # from there. analysis = self.getAnalysis() if IRequestAnalysis.providedBy(analysis): return analysis.getRequest() return None
def getData(self): if not self._data: self._data = [] for an in self.analyses: item = {'ar': '', 'ar_url': '', 'ar_id': '', 'ar_html': '', 'ws': '', 'ws_url': '', 'ws_id': '', 'ws_html': '', 'an': an, 'an_id': an.id, 'an_title': an.Title()} if IRequestAnalysis.providedBy(an): ar = an.getRequest() item['ar'] = ar item['ar_url'] = ar.absolute_url() item['ar_id'] = ar.getId() item['ar_html'] = \ "<a href='%s'>%s</a>" % (item['ar_url'], item['ar_id']) ws = an.getBackReferences("WorksheetAnalysis") if ws and len(ws) > 0: wss = ws[0] item['ws'] = ws item['ws_url'] = ws.absolute_url() item['ws_id'] = ws.id item['ws_html'] = "<a href='%s'>%s</a>" \ % (item['ws_url'], item['ws_id']) self._data.append(item) return self._data
def after_submit(analysis): """Actions to be done after a submit transition for an analysis takes place """ analysis = api.get_object(analysis) if IRequestAnalysis.providedBy(analysis): ar = analysis.getRequest() set_field_value(ar, "AssayDate", analysis.getDateSubmitted())
def create_retest(analysis): """Creates a retest of the given analysis """ if not IRequestAnalysis.providedBy(analysis): raise ValueError("Type not supported: {}".format(repr(type(analysis)))) # Support multiple retests by prefixing keyword with *-0, *-1, etc. parent = api.get_parent(analysis) keyword = analysis.getKeyword() # Get only those analyses with same keyword as original analyses = parent.getAnalyses(full_objects=True) analyses = filter(lambda an: an.getKeyword() == keyword, analyses) new_id = '{}-{}'.format(keyword, len(analyses)) # Create a copy of the original analysis an_uid = api.get_uid(analysis) retest = create_analysis(parent, analysis, id=new_id, RetestOf=an_uid) retest.setResult("") retest.setResultCaptureDate(None) # Add the retest to the same worksheet, if any worksheet = analysis.getWorksheet() if worksheet: worksheet.addAnalysis(retest) retest.reindexObject() return retest
def getData(self): if not self._data: self._data = [] for an in self.analyses: item = {'ar': '', 'ar_url': '', 'ar_id': '', 'ar_html': '', 'ws': '', 'ws_url': '', 'ws_id': '', 'ws_html': '', 'an': an, 'an_id': an.id, 'an_title': an.Title()} if IRequestAnalysis.providedBy(an): ar = an.getRequest() item['ar'] = ar item['ar_url'] = ar.absolute_url() item['ar_id'] = ar.getId() item['ar_html'] = \ "<a href='%s'>%s</a>" % (item['ar_url'], item['ar_id']) ws = an.getWorksheet() if ws: item['ws'] = ws item['ws_url'] = ws.absolute_url() item['ws_id'] = ws.id item['ws_html'] = "<a href='%s'>%s</a>" \ % (item['ws_url'], item['ws_id']) self._data.append(item) return self._data
def _reindex_request(obj, idxs=None): if not IRequestAnalysis.providedBy(obj): return request = obj.getRequest() if idxs is None: request.reindexObject() else: request.reindexObject(idxs=idxs)
def after_reject(analysis): """Function triggered after the "reject" transition for the analysis passed in is performed.""" # Remove from the worksheet remove_analysis_from_worksheet(analysis) # Reject our dependents (analyses that depend on this analysis) cascade_to_dependents(analysis, "reject") # Try to rollback the Analysis Request (all analyses rejected) if IRequestAnalysis.providedBy(analysis): doActionFor(analysis.getRequest(), "rollback_to_receive") reindex_request(analysis)
def reindex_request(analysis, idxs=None): """Reindex the Analysis Request the analysis belongs to, as well as the ancestors recursively """ if not IRequestAnalysis.providedBy(analysis) or \ IDuplicateAnalysis.providedBy(analysis): # Analysis not directly bound to an Analysis Request. Do nothing return n_idxs = ['assigned_state', 'getDueDate'] n_idxs = idxs and list(set(idxs + n_idxs)) or n_idxs request = analysis.getRequest() ancestors = [request] + request.getAncestors(all_ancestors=True) for ancestor in ancestors: push_reindex_to_actions_pool(ancestor, n_idxs)
def after_submit(obj): """Method triggered after a 'submit' transition for the analysis passed in is performed. Promotes the submit transition to the Worksheet to which the analysis belongs to. Note that for the worksheet there is already a guard that assures the transition to the worksheet will only be performed if all analyses within the worksheet have already been transitioned. This function is called automatically by bika.lims.workfow.AfterTransitionEventHandler """ ws = obj.getWorksheet() if ws: doActionFor(ws, 'submit') if IRequestAnalysis.providedBy(obj): ar = obj.getRequest() doActionFor(ar, 'submit')
def getSiblings(self, with_retests=False): """ Return the list of duplicate analyses that share the same Request and are included in the same Worksheet as the current analysis. The current duplicate is excluded from the list. :param with_retests: If false, siblings with retests are dismissed :type with_retests: bool :return: list of siblings for this analysis :rtype: list of IAnalysis """ worksheet = self.getWorksheet() requestuid = self.getRequestUID() if not requestuid or not worksheet: return [] siblings = [] retracted_states = [STATE_RETRACTED, STATE_REJECTED] analyses = worksheet.getAnalyses() for analysis in analyses: if analysis.UID() == self.UID(): # Exclude me from the list continue if not IRequestAnalysis.providedBy(analysis): # Exclude analyses that do not have an analysis request # associated continue if analysis.getRequestUID() != requestuid: # Exclude those analyses that does not belong to the same # analysis request I belong to continue if not with_retests: if in_state(analysis, retracted_states): # Exclude retracted analyses continue elif analysis.getRetest(): # Exclude analyses with a retest continue siblings.append(analysis) return siblings
def after_retract(analysis): """Function triggered after a 'retract' transition for the analysis passed in is performed. The analysis transitions to "retracted" state and a new copy of the analysis is created. The copy initial state is "unassigned", unless the the retracted analysis was assigned to a worksheet. In such case, the copy is transitioned to 'assigned' state too """ # Retract our dependents (analyses that depend on this analysis) cascade_to_dependents(analysis, "retract") # Retract our dependencies (analyses this analysis depends on) promote_to_dependencies(analysis, "retract") # Rename the analysis to make way for it's successor. # Support multiple retractions by renaming to *-0, *-1, etc parent = analysis.aq_parent keyword = analysis.getKeyword() # Get only those that are analyses and with same keyword as the original analyses = parent.getAnalyses(full_objects=True) analyses = filter(lambda an: an.getKeyword() == keyword, analyses) # TODO This needs to get managed by Id server in a nearly future! new_id = '{}-{}'.format(keyword, len(analyses)) # Create a copy of the retracted analysis an_uid = api.get_uid(analysis) new_analysis = create_analysis(parent, analysis, id=new_id, RetestOf=an_uid) new_analysis.setResult("") new_analysis.setResultCaptureDate(None) new_analysis.reindexObject() logger.info("Retest for {} ({}) created: {}".format( keyword, api.get_id(analysis), api.get_id(new_analysis))) # Assign the new analysis to this same worksheet, if any. worksheet = analysis.getWorksheet() if worksheet: worksheet.addAnalysis(new_analysis) # Try to rollback the Analysis Request if IRequestAnalysis.providedBy(analysis): doActionFor(analysis.getRequest(), "rollback_to_receive") reindex_request(analysis)
def guard_submit(analysis): """Return whether the transition "submit" can be performed or not """ # Cannot submit without a result if not analysis.getResult(): return False # Cannot submit with interims without value for interim in analysis.getInterimFields(): if not interim.get("value", ""): return False # Cannot submit if attachment not set, but is required if not analysis.getAttachment(): if analysis.getAttachmentOption() == 'r': return False # Check if can submit based on the Analysis Request state if IRequestAnalysis.providedBy(analysis): point_of_capture = analysis.getPointOfCapture() # Cannot submit if the Sample has not been received if point_of_capture == "lab" and not analysis.isSampleReceived(): return False # Cannot submit if the Sample has not been sampled if point_of_capture == "field" and not analysis.isSampleSampled(): return False # Check if the current user can submit if is not assigned if not analysis.bika_setup.getAllowToSubmitNotAssigned(): if not user_has_super_roles(): # Cannot submit if unassigned analyst = analysis.getAnalyst() if not analyst: return False # Cannot submit if assigned analyst is not the current user if analyst != security.get_user_id(): return False # Cannot submit unless all dependencies are submitted or can be submitted for dependency in analysis.getDependencies(): if not is_submitted_or_submittable(dependency): return False return True
def after_reject(analysis): """Function triggered after the "reject" transition for the analysis passed in is performed.""" # Remove from the worksheet remove_analysis_from_worksheet(analysis) # Reject our dependents (analyses that depend on this analysis) cascade_to_dependents(analysis, "reject") if IRequestAnalysis.providedBy(analysis): # Try verify (for when remaining analyses are in 'verified') doActionFor(analysis.getRequest(), "verify") # Try submit (remaining analyses are in 'to_be_verified') doActionFor(analysis.getRequest(), "submit") # Try rollback (no remaining analyses or some not submitted) doActionFor(analysis.getRequest(), "rollback_to_receive") reindex_request(analysis)
def __call__(self): if not IRequestAnalysis.providedBy(self.analysis): # Cannot grab the patient from analyses not assigned to a Sample return {} # Get the sample's specificaion sample = self.analysis.getRequest() specification = sample.getSpecification() if not specification: # No specification, nothing to do return {} # Dynamic specification dyn_spec = specification.getDynamicAnalysisSpec() # Get the patient from the sample sample = self.analysis.getRequest() patient = sample.getField("Patient").get(sample) if not patient: # No patient assigned for this sample, do nothing return {} # Patient's age (in years) age = patient.getAge() # Patient's gender (male/female/dk) sex = patient.getGender() # Get the dynamic specification for this analysis by keyword # We expect the xls to have the columns "keyword", "age" and "sex" keyword = self.analysis.getKeyword() ranges = dyn_spec.get_by_keyword().get(keyword) if not ranges: # No ranges defined for this analysis return {} # Find a match by age and sex for range in ranges: if range.get("age") == age and range.get("sex") == sex: return range # No dynamic specification found for this analysis and patient return {}
def _get_service_uid(self, item): if api.is_uid(item): return item if not api.is_object(item): logger.warn("Not an UID: {}".format(item)) return None obj = api.get_object(item) if IAnalysisService.providedBy(obj): return api.get_uid(obj) if IAnalysis.providedBy(obj) and IRequestAnalysis.providedBy(obj): return obj.getServiceUID() # An object, but neither an Analysis nor AnalysisService? # This should never happen. msg = "ARAnalysesField doesn't accept objects from {} type. " \ "The object will be dismissed." logger.warn(msg.format(api.get_portal_type(obj))) return None
def after_retract(analysis): """Function triggered after a 'retract' transition for the analysis passed in is performed. The analysis transitions to "retracted" state and a new copy of the analysis is created. The copy initial state is "unassigned", unless the the retracted analysis was assigned to a worksheet. In such case, the copy is transitioned to 'assigned' state too """ # Retract our dependents (analyses that depend on this analysis) cascade_to_dependents(analysis, "retract") # Retract our dependencies (analyses this analysis depends on) promote_to_dependencies(analysis, "retract") # Create the retest create_retest(analysis) # Try to rollback the Analysis Request if IRequestAnalysis.providedBy(analysis): doActionFor(analysis.getRequest(), "rollback_to_receive") reindex_request(analysis)
def getSiblings(self, retracted=False): """ Return the list of duplicate analyses that share the same Request and are included in the same Worksheet as the current analysis. The current duplicate is excluded from the list. :param retracted: If false, retracted/rejected siblings are dismissed :type retracted: bool :return: list of siblings for this analysis :rtype: list of IAnalysis """ worksheet = self.getWorksheet() requestuid = self.getRequestUID() if not requestuid or not worksheet: return [] siblings = [] retracted_states = [STATE_RETRACTED, STATE_REJECTED] analyses = worksheet.getAnalyses() for analysis in analyses: if analysis.UID() == self.UID(): # Exclude me from the list continue if IRequestAnalysis.providedBy(analysis) is False: # Exclude analyses that do not have an analysis request # associated continue if analysis.getRequestUID() != requestuid: # Exclude those analyses that does not belong to the same # analysis request I belong to continue if retracted is False and in_state(analysis, retracted_states): # Exclude retracted analyses continue siblings.append(analysis) return siblings
def get_specification_for(spec, default=_marker): """Returns a plain dictionary with specification values (min, max, etc.) It looks through an excel file provided as-is to find the record that better fits with the gender and age from the analysis request and for the analysis passed in :param: Analysis object or analysis uid or analysis brain """ analysis = api.get_object_by_uid(spec.get(('analysis_uid'))) if not analysis or not IRequestAnalysis.providedBy(analysis): if default is not _marker: return default api.fail("Type {} not supported: ".format(repr(analysis))) request = analysis.getRequest() gender = api.get_field_value(request, "Gender") if not gender or gender.lower() not in GENDERS.keys(): # If no gender is specified or not a valid value, assume any gender = 'a' dob = api.get_field_value(request, "DateOfBirth") sampled = request.getDateSampled() if not dob or not sampled: #logger.error("No DateSampled/ DateOfBirth set. Ignore if 1.3 upgrade") return {} specification = request.getSpecification() if not specification: # This should never happen, Since this function has been triggered, we # assume an specification has been set to the AR if default is not _marker: return default api.fail("Specification not set for request {}".format(request.id)) years, months, days = api.get_age(dob, sampled) return get_analysisspec(analysis_keyword=analysis.getKeyword(), gender=gender, years=years, months=months, days=days)
def after_retract(analysis): """Function triggered after a 'retract' transition for the analysis passed in is performed. The analysis transitions to "retracted" state and a new copy of the analysis is created. The copy initial state is "unassigned", unless the the retracted analysis was assigned to a worksheet. In such case, the copy is transitioned to 'assigned' state too """ # Rename the analysis to make way for it's successor. # Support multiple retractions by renaming to *-0, *-1, etc parent = analysis.aq_parent keyword = analysis.getKeyword() analyses = filter(lambda an: an.getKeyword() == keyword, parent.objectValues("Analysis")) # Rename the retracted analysis # https://docs.plone.org/develop/plone/content/rename.html # _verifyObjectPaste permission check must be cancelled parent._verifyObjectPaste = str retracted_id = '{}-{}'.format(keyword, len(analyses)) # Make sure all persistent objects have _p_jar attribute transaction.savepoint(optimistic=True) parent.manage_renameObject(analysis.getId(), retracted_id) delattr(parent, '_verifyObjectPaste') # Create a copy of the retracted analysis analysis_uid = api.get_uid(analysis) new_analysis = create_analysis(parent, analysis, RetestOf=analysis_uid) # Assign the new analysis to this same worksheet, if any. worksheet = analysis.getWorksheet() if worksheet: worksheet.addAnalysis(new_analysis) # Retract our dependents (analyses that depend on this analysis) cascade_to_dependents(analysis, "retract") # Try to rollback the Analysis Request if IRequestAnalysis.providedBy(analysis): doActionFor(analysis.getRequest(), "rollback_to_receive") reindex_request(analysis)
def getSiblings(self): """Returns the list of duplicate analyses that share the same Request and are included in the same Worksheet as the current. The current duplicate is excluded from the list """ worksheet = self.getWorksheet() requestuid = self.getRequestUID() if not requestuid or not worksheet: return [] siblings = [] analyses = worksheet.getAnalyses() for analysis in analyses: if analysis.UID() == self.UID(): # Exclude me from the list continue if IRequestAnalysis.providedBy(analysis): # We exclude here all analyses that do not have an analysis # request associated (e.g. IReferenceAnalysis) if analysis.getRequestUID() == requestuid: siblings.append(analysis) return siblings
def __call__(self): """Get the default value. """ if not IRequestAnalysis.providedBy(self.context): return {} # Get the AnalysisRequest to look at analysis = self.context sample = analysis.getRequest() if not sample: return {} # Search by keyword field = sample.getField("ResultsRange") keyword = analysis.getKeyword() rr = field.get(sample, search_by=keyword) if rr: return rr # Try with uid (this shouldn't be necessary) service_uid = analysis.getServiceUID() return field.get(sample, search_by=service_uid) or {}
def getRequest(self): """Return the primary AR this attachment is linked """ ars = self.getLinkedRequests() if len(ars) > 1: # Attachment is assigned to more than one Analysis Request. # This might happen when the AR was invalidated ar_ids = ", ".join(map(api.get_id, ars)) logger.info("Attachment assigned to more than one AR: [{}]. " "The first AR will be returned".format(ar_ids)) # return the first AR if len(ars) >= 1: return ars[0] # Check if the attachment is linked to an analysis and try to get the # AR from the linked analysis analysis = self.getAnalysis() if IRequestAnalysis.providedBy(analysis): return analysis.getRequest() return None
def doActionToAnalysis(source_analysis, action): """ This functions executes the action against the analysis. :base: a full analysis object. The new analyses will be cloned from it. :action: a dictionary representing an action row. [{'action': 'duplicate', ...}, {,}, ...] :returns: the new analysis """ if not IRequestAnalysis.providedBy(source_analysis): # Only routine analyses (assigned to a Request) are supported logger.warn("Only IRequestAnalysis are supported in reflex testing") return None state = api.get_review_status(source_analysis) action_id = action.get('action', '') if action_id == "new_analysis": # Create a new analysis (different from the original) service_uid = action.get("new_analysis", "") if not api.is_uid(service_uid): logger.error("Not a valid UID: {}".format(service_uid)) return None service = api.get_object_by_uid(service_uid, None) if not service or not IAnalysisService.providedBy(service): logger.error("No valid service for UID {}".format(service_uid)) return None analysis = create_analysis(source_analysis.aq_parent, service) analysis.setSamplePartition(source_analysis.getSamplePartition()) changeWorkflowState(analysis, "bika_analysis_workflow", "sample_received") elif action_id == 'setvisibility': target_id = action.get('setvisibilityof', '') if target_id == "original": analysis = source_analysis else: analysis = _fetch_analysis_for_local_id(source_analysis, target_id) elif action_id == 'repeat' and state != 'retracted': # Repeat an analysis consist on cancel it and then create a new # analysis with the same analysis service used for the canceled # one (always working with the same sample). It'll do a retract # action doActionFor(source_analysis, 'retract') analysis_request = source_analysis.getRequest() analysis = analysis_request.getAnalyses(sort_on="created")[-1] analysis = api.get_object(analysis) analysis.setResult('') elif action_id == 'duplicate' or state == 'retracted': analysis = duplicateAnalysis(source_analysis) analysis.setResult('') elif action_id == 'setresult': target = action.get('setresulton', '') result_value = action.get('setresultdiscrete', '') or \ action['setresultvalue'] if target == 'original': analysis = source_analysis.getOriginalReflexedAnalysis() analysis.setResult(result_value) elif target == 'new': # Create a new analysis analysis = duplicateAnalysis(source_analysis) analysis.setResult(result_value) doActionFor(analysis, 'submit') else: logger.error("Unknown 'setresulton' directive: {}".format(target)) return None else: logger.error("Unknown Reflex Rule action: {}".format(action_id)) return None analysis.setReflexRuleAction(action_id) analysis.setIsReflexAnalysis(True) analysis.setReflexAnalysisOf(source_analysis) analysis.setReflexRuleActionsTriggered( source_analysis.getReflexRuleActionsTriggered()) if action.get('showinreport', '') == "invisible": analysis.setHidden(True) elif action.get('showinreport', '') == "visible": analysis.setHidden(False) # Setting the original reflected analysis if source_analysis.getOriginalReflexedAnalysis(): analysis.setOriginalReflexedAnalysis( source_analysis.getOriginalReflexedAnalysis()) else: analysis.setOriginalReflexedAnalysis(source_analysis) analysis.setReflexRuleLocalID(action.get('an_result_id', '')) # Setting the remarks to base analysis #remarks = get_remarks(action, analysis) #analysis.setRemarks(remarks) return analysis
def workflow_action_submit(self): uids = self.get_selected_uids() if not uids: message = _('No items selected.') self.context.plone_utils.addPortalMessage(message, 'info') self.request.response.redirect(self.context.absolute_url()) return if not is_active(self.context): message = _('Item is inactive.') self.context.plone_utils.addPortalMessage(message, 'info') self.request.response.redirect(self.context.absolute_url()) return form = self.request.form remarks = form.get('Remarks', [{}])[0] results = form.get('Result', [{}])[0] retested = form.get('retested', {}) methods = form.get('Method', [{}])[0] instruments = form.get('Instrument', [{}])[0] analysts = self.request.form.get('Analyst', [{}])[0] uncertainties = self.request.form.get('Uncertainty', [{}])[0] dlimits = self.request.form.get('DetectionLimit', [{}])[0] # XXX combine data from multiple bika listing tables. # TODO: Is this necessary? item_data = {} if 'item_data' in form: if type(form['item_data']) == list: for i_d in form['item_data']: for i, d in json.loads(i_d).items(): item_data[i] = d else: item_data = json.loads(form['item_data']) # Store affected Analysis Requests affected_ars = set() # Store affected Worksheets affected_ws = set() # Store invalid instruments-ref.analyses invalid_instrument_refs = dict() # We manually query by all analyses uids at once here instead of using # _get_selected_items from the base class, cause that function fetches # the objects by uid, but sequentially one by one query = dict(UID=uids) for brain in api.search(query, CATALOG_ANALYSIS_LISTING): uid = api.get_uid(brain) analysis = api.get_object(brain) # If not active, do nothing if not is_active(brain): continue # Need to save remarks? if uid in remarks: analysis.setRemarks(remarks[uid]) # Retested? if uid in retested: analysis.setRetested(retested[uid]) # Need to save the instrument? if uid in instruments: instrument = instruments[uid] or None analysis.setInstrument(instrument) if instrument and IReferenceAnalysis.providedBy(analysis): if is_out_of_range(analysis): # This reference analysis is out of range, so we have # to retract all analyses assigned to this same # instrument that are awaiting for verification if uid not in invalid_instrument_refs: invalid_instrument_refs[uid] = set() invalid_instrument_refs[uid].add(analysis) else: # The reference result is valid, so make the instrument # available again for further analyses instrument.setDisposeUntilNextCalibrationTest(False) # Need to save the method? if uid in methods: method = methods[uid] or None analysis.setMethod(method) # Need to save the analyst? if uid in analysts: analysis.setAnalyst(analysts[uid]) # Need to save the uncertainty? if uid in uncertainties: analysis.setUncertainty(uncertainties[uid]) # Need to save the detection limit? if uid in dlimits and dlimits[uid]: analysis.setDetectionLimitOperand(dlimits[uid]) # Need to save results? submitted = False if uid in results and results[uid]: interims = item_data.get(uid, []) analysis.setInterimFields(interims) analysis.setResult(results[uid]) # Can the analysis be submitted? # An analysis can only be submitted if all its dependencies # are valid and have been submitted already can_submit = True invalid_states = [ 'to_be_sampled', 'to_be_preserved', 'sample_due', 'sample_received' ] for dependency in analysis.getDependencies(): if in_state(dependency, invalid_states): can_submit = False break if can_submit: # doActionFor transitions the analysis to verif pending, # so must only be done when results are submitted. doActionFor(analysis, 'submit') submitted = True if IRequestAnalysis.providedBy(analysis): # Store the AR uids to be reindexed later. affected_ars.add(brain.getParentUID) if brain.worksheetanalysis_review_state == 'assigned': worksheet_uid = analysis.getWorksheetUID() if worksheet_uid: affected_ws.add(worksheet_uid) if not submitted: # Analysis has not been submitted, so we need to reindex the # object manually, to update catalog's metadata. analysis.reindexObject() # If a reference analysis with an out-of-range result and instrument # assigned has been submitted, retract then routine analyses that are # awaiting for verification and with same instrument associated retracted = list() for invalid_instrument_uid in invalid_instrument_refs.keys(): query = dict( getInstrumentUID=invalid_instrument_uid, portal_type=['Analysis', 'DuplicateAnalysis'], review_state='to_be_verified', cancellation_state='active', ) brains = api.search(query, CATALOG_ANALYSIS_LISTING) for brain in brains: analysis = api.get_object(brain) failed_msg = '{0}: {1}'.format( ulocalized_time(DateTime(), long_format=1), _("Instrument failed reference test")) an_remarks = analysis.getRemarks() analysis.setRemarks('. '.join([an_remarks, failed_msg])) doActionFor(analysis, 'retract') retracted.append(analysis) # If some analyses have been retracted because instrument failed a # reference test, then generate a pdf report if retracted: # Create the Retracted Analyses List report = AnalysesRetractedListReport(self.context, self.request, self.portal_url, 'Retracted analyses', retracted) # Attach the pdf to all ReferenceAnalysis that failed (accessible # from Instrument's Internal Calibration Tests list pdf = report.toPdf() for ref in invalid_instrument_refs.values(): ref.setRetractedAnalysesPdfReport(pdf) # Send the email try: report.sendEmail() except: pass # Finally, when we are done processing all applicable analyses, we must # attempt to initiate the submit transition on the ARs and Worksheets # the processed analyses belong to. # We stick only to affected_ars, and affected_ws # Reindex the Analysis Requests for which at least one Analysis has # been submitted. We do this here because one AR can contain multiple # Analyses, so better to just reindex the AR once instead of each time. # AR Catalog contains some metadata that that rely on the Analyses an # Analysis Request contains. if affected_ars: query = dict(UID=list(affected_ars), portal_type="AnalysisRequest") for ar_brain in api.search(query, CATALOG_ANALYSIS_REQUEST_LISTING): if ar_brain.review_state == 'to_be_verified': continue ar = api.get_object(ar_brain) if isTransitionAllowed(ar, "submit"): doActionFor(ar, "submit") else: ar.reindexObject() if affected_ws: query = dict(UID=list(affected_ws), portal_type="Worksheet") for ws_brain in api.search(query, CATALOG_WORKSHEET_LISTING): if ws_brain.review_state == 'to_be_verified': continue ws = api.get_object(ws_brain) if isTransitionAllowed(ws, "submit"): doActionFor(ws, "submit") message = PMF("Changes saved.") self.context.plone_utils.addPortalMessage(message, 'info') self.destination_url = self.request.get_header( "referer", self.context.absolute_url()) self.request.response.redirect(self.destination_url)