def fix_ar_sample_workflow(brain_or_object): """Re-set the state of an AR, Sample and SamplePartition to match the least-early state of all contained valid/current analyses. Ignores retracted/rejected/cancelled analyses. """ def log_change_state(ar_id, obj_id, src, dst): msg = "While fixing {ar_id}: " \ "state changed for {obj_id}: " \ "{src} -> {dst}".format(**locals()) ar = get_object(brain_or_object) if not IAnalysisRequest.providedBy(ar): return wf = api.get_tool('portal_workflow') arwf = wf['bika_ar_workflow'] anwf = wf['bika_analysis_workflow'] swf = wf['bika_sample_workflow'] ignored = ['retracted', 'rejected'] tmp = filter(lambda x: x[0] not in ignored, arwf.states.items()) arstates = OrderedDict(tmp) tmp = filter(lambda x: x[0] not in ignored, swf.states.items()) samplestates = OrderedDict(tmp) tmp = filter(lambda x: x[0] in arstates, anwf.states.items()) anstates = OrderedDict(tmp) # find least-early analysis state # !!! Assumes states in definitions are roughly ordered earliest to latest ar_dest_state = arstates.items()[0][0] for anstate in anstates: if ar.getAnalyses(review_state=anstate): ar_dest_state = anstate # Force state of AR ar_state = get_review_status(ar) if ar_state != ar_dest_state: changeWorkflowState(ar, arwf.id, ar_dest_state) log_change_state(ar.id, ar.id, ar_state, ar_dest_state) # Force state of Sample sample = ar.getSample() sample_state = get_review_status(sample) if ar_dest_state in samplestates: changeWorkflowState(sample, swf.id, ar_dest_state) log_change_state(ar.id, sample.id, sample_state, ar_dest_state) # Force states of Partitions for part in sample.objectValues(): part_state = get_review_status(part) if get_review_status(part) != ar_dest_state: changeWorkflowState(sample, swf.id, ar_dest_state) log_change_state(ar.id, part.id, part_state, ar_dest_state)
def after_recover(sample): """Unassigns the sample from its storage container and "recover". It also transitions the sample to its previous state before it was stored """ container = _api.get_storage_sample(api.get_uid(sample)) if container: container.remove_object(sample) else: logger.warn("Container for Sample {} not found".format(sample.getId())) # Transition the sample to the state before it was stored previous_state = get_previous_state(sample) or "sample_due" changeWorkflowState(sample, "bika_ar_workflow", previous_state) # Notify the sample has ben modified modified(sample) # Reindex the sample sample.reindexObject() # If the sample is a partition, try to promote to the primary primary = sample.getParentAnalysisRequest() if not primary: return # Recover primary sample if all its partitions have been recovered parts = primary.getDescendants() # Partitions in some statuses won't be considered. skip = ['stored'] parts = filter(lambda part: api.get_review_status(part) in skip, parts) if not parts: # There are no partitions left, transition the primary do_action_for(primary, "recover")
def _objectdata_cache_key(func, obj): """Cache Key for object data """ uid = api.get_uid(obj) modified = api.get_modification_date(obj).millis() review_state = api.get_review_status(obj) return "{}-{}-{}".format(uid, review_state, modified)
def get_object_metadata(obj, **kw): """Get object metadata :param obj: Content object :returns: Dictionary of extracted object metadata """ # inject metadata of volatile data metadata = { "actor": get_user_id(), "roles": get_roles(), "action": "", "review_state": api.get_review_status(obj), "active": api.is_active(obj), "snapshot_created": DateTime().ISO(), "modified": api.get_modification_date(obj).ISO(), "remote_address": "", "user_agent": "", "referer": "", "comments": "", } # Update request data metadata.update(get_request_data()) # allow metadata overrides metadata.update(kw) return metadata
def guard_rollback_to_open(worksheet): """Return whether 'rollback_to_receive' transition can be performed or not """ for analysis in worksheet.getAnalyses(): if api.get_review_status(analysis) in ["assigned"]: return True return False
def _children_are_ready(obj, transition_id, dettached_states=None): """Returns true if the children of the object passed in (worksheet) have been all transitioned in accordance with the 'transition_id' passed in. If detached_states is provided, children with those states are dismissed, so they will not be taken into account in the evaluation. Nevertheless, at least one child with for which the transition_id performed is required for this function to return true (if all children are in detached states, it always return False). """ detached_count = 0 analyses = obj.getAnalyses() for analysis in analyses: if dettached_states: if api.get_review_status(analysis) in dettached_states: detached_count += 1 continue if not api.is_active(analysis): return False if not wasTransitionPerformed(analysis, transition_id): return False if detached_count == len(analyses): # If all analyses are in a detached state, it means that the # condition of at least having one child for which the # transition is performed is not satisfied so return False return False return True
def guard_unassign(duplicate_analysis): """Return whether the transition 'unassign' can be performed or not """ analysis = duplicate_analysis.getAnalysis() if wf.isTransitionAllowed(analysis, "unassign"): return True skip = ["retracted", "rejected", "unassigned"] if api.get_review_status(analysis) in skip: return True return analysis_guards.guard_unassign(duplicate_analysis)
def isVisible(self, field, mode="view", default="visible"): edit_modes = ["sample_received", "to_be_verified"] if mode == "edit": if api.get_review_status(self.context) not in edit_modes: return "invisible" # Only Lab Manager can edit Assay Date! allowed_roles = ["LabManager", "Manager", "Analyst"] user = api.get_current_user() user_roles = user.getRolesInContext(self.context) allowed_roles = filter(lambda r: r in user_roles, allowed_roles) if not allowed_roles: return "invisible" return default
def after_store(sample): """Event triggered after "store" transition takes place for a given sample """ primary = sample.getParentAnalysisRequest() if not primary: return # Store primary sample if its partitions have been stored parts = primary.getDescendants() # Partitions in some statuses won't be considered skip = ['cancelled', 'stored', 'retracted', 'rejected'] parts = filter(lambda part: api.get_review_status(part) not in skip, parts) if not parts: # There are no partitions left, transition the primary do_action_for(primary, "store")
def get_ast_analyses(sample, short_title=None, skip_invalid=True): """Returns the ast analyses assigned to the sample passed in and for the microorganism name specified, if any """ analyses = sample.getAnalyses(getPointOfCapture="ast") analyses = map(api.get_object, analyses) if short_title: # Filter by microorganism name (short title) analyses = filter(lambda a: a.getShortTitle() == short_title, analyses) # Skip invalid analyses skip = skip_invalid and ["cancelled", "retracted", "rejected"] or [] analyses = filter(lambda a: api.get_review_status(a) not in skip, analyses) return analyses
def add_analysis(self, instance, service, **kwargs): service_uid = api.get_uid(service) # Ensure we have suitable parameters specs = kwargs.get("specs") or {} # Get the hidden status for the service hidden = kwargs.get("hidden") or [] hidden = filter(lambda d: d.get("uid") == service_uid, hidden) hidden = hidden and hidden[0].get("hidden") or service.getHidden() # Get the price for the service prices = kwargs.get("prices") or {} price = prices.get(service_uid) or service.getPrice() # Gets the analysis or creates the analysis for this service # Note this returns a list, because is possible to have multiple # partitions with same analysis analyses = self.resolve_analyses(instance, service) if not analyses: # Create the analysis keyword = service.getKeyword() logger.info("Creating new analysis '{}'".format(keyword)) analysis = create_analysis(instance, service) analyses.append(analysis) skip = ["cancelled", "retracted", "rejected"] for analysis in analyses: # Skip analyses to better not modify if api.get_review_status(analysis) in skip: continue # Set the hidden status analysis.setHidden(hidden) # Set the price of the Analysis analysis.setPrice(price) # Set the internal use status parent_sample = analysis.getRequest() analysis.setInternalUse(parent_sample.getInternalUse()) # Set the result range to the analysis analysis_rr = specs.get(service_uid) or analysis.getResultsRange() analysis.setResultsRange(analysis_rr) analysis.reindexObject()
def get_client_aware_html_image(obj): """Renders an icon based on the client the object belongs to """ if is_from_external_client(obj): icon_info = ("lock.png", _("Private, from an external client")) elif is_from_internal_client(obj): if api.get_review_status(obj) == "shared": icon_info = ("share.png", _("Shared, from an internal client")) else: icon_info = ("share_lock.png", _("From an internal client, but not shared")) else: logger.warn("No client assigned for {}".format(repr(obj))) icon_info = ("exclamation_red.png", _("No client assigned")) return get_html_image(icon_info[0], title=icon_info[1])
def get_identified_microorganisms(sample): """Returns the identified microorganisms from the sample passed-in. It resolves the microorganisms by looking to the results of the "Identification" analysis """ keyword = IDENTIFICATION_KEY ans = sample.getAnalyses(getKeyword=keyword, full_objects=True) # Discard invalid analyses skip = ["rejected", "cancelled", "retracted"] ans = filter(lambda a: api.get_review_status(a) not in skip, ans) # Get the names of the selected microorganisms names = map(get_microorganisms_from_result, ans) names = list(itertools.chain.from_iterable(names)) # Get the microorganisms objects = api.get_setup().microorganisms.objectValues() return filter(lambda m: api.get_title(m) in names, objects)
def get_non_compliant_analyses(self): """Returns the list of analysis keywords from this sample with a result range set not compliant with the result range of the Sample """ non_compliant = [] skip = ["cancelled", "retracted", "rejected"] # Check if the results ranges set to analyses individually remain # compliant with the Sample's ResultRange analyses = self.context.getAnalyses(full_objects=True) for analysis in analyses: # Skip non-valid/inactive analyses if api.get_review_status(analysis) in skip: continue if not is_result_range_compliant(analysis): # Result range for this service has been changed manually, # it does not match with sample's ResultRange an_title = api.get_title(analysis) keyword = analysis.getKeyword() non_compliant.append("{} ({})".format(an_title, keyword)) # Return the list of keywords from non-compliant analyses return list(set(non_compliant))
Sample Location;\ Sampler;\ Sampling Date;\ Sampling Time;\ Test Date;\ Test Time;\ Analyte;\ Result\n") #Get all ARs (Sample) ARs = api.search({'portal_type': 'AnalysisRequest'}) ##### Get Sample Data ##### for AR_brain in ARs: #AR Object AR = api.get_object(AR_brain) #Status status = api.get_review_status(AR) #Batch ## Title ## Received Date ## Test Date batch = AR.getBatch() if batch is not None and batch != '': batch_title = api.get_title(batch) recv_datetime = batch.getBatchDate() if recv_datetime is not None and recv_datetime != '': received_date = recv_datetime.Date() received_time = recv_datetime.Time() test_datetime = batch.DateTimeIn if test_datetime is not None and test_datetime != '': test_date = test_datetime.Date() test_time = test_datetime.Time()
def __call__(self): # TODO: Refactor to permission # Control the visibility of the invoice create/print document actions if api.get_review_status(self.context) in ["verified", "published"]: self.request["verified"] = 1 return self.template()
def is_verified(self): """Checks if the AR is verified """ target_states = ["verified", "published", "invalid"] return api.get_review_status(self.context) in target_states
def is_provisional(self): if self.is_invalid(): return True valid_states = ['verified', 'published'] return api.get_review_status(self.instance) not in valid_states
def filter_by_state(brains_or_objects, state): """Filters the objects passed in by state """ objs = map(_api.get_object, brains_or_objects) return filter(lambda obj: _api.get_review_status(obj) == state, objs)
def is_cancelled(self): """Checks if the AR is cancelled """ return api.get_review_status(self.context) == "cancelled"
def is_stored(self): """Returns whether the current sample is stored """ return api.get_review_status(self.context) == "stored"
def doActionToAnalysis(source_analysis, action): """ This functions executes the action against the analysis. :base: a full analysis object. The new analyses will be cloned from it. :action: a dictionary representing an action row. [{'action': 'duplicate', ...}, {,}, ...] :returns: the new analysis """ if not IRequestAnalysis.providedBy(source_analysis): # Only routine analyses (assigned to a Request) are supported logger.warn("Only IRequestAnalysis are supported in reflex testing") return None state = api.get_review_status(source_analysis) action_id = action.get('action', '') if action_id == "new_analysis": # Create a new analysis (different from the original) service_uid = action.get("new_analysis", "") if not api.is_uid(service_uid): logger.error("Not a valid UID: {}".format(service_uid)) return None service = api.get_object_by_uid(service_uid, None) if not service or not IAnalysisService.providedBy(service): logger.error("No valid service for UID {}".format(service_uid)) return None analysis = create_analysis(source_analysis.aq_parent, service) analysis.setSamplePartition(source_analysis.getSamplePartition()) changeWorkflowState(analysis, "bika_analysis_workflow", "sample_received") elif action_id == 'setvisibility': target_id = action.get('setvisibilityof', '') if target_id == "original": analysis = source_analysis else: analysis = _fetch_analysis_for_local_id(source_analysis, target_id) elif action_id == 'repeat' and state != 'retracted': # Repeat an analysis consist on cancel it and then create a new # analysis with the same analysis service used for the canceled # one (always working with the same sample). It'll do a retract # action doActionFor(source_analysis, 'retract') analysis_request = source_analysis.getRequest() analysis = analysis_request.getAnalyses(sort_on="created")[-1] analysis = api.get_object(analysis) analysis.setResult('') elif action_id == 'duplicate' or state == 'retracted': analysis = duplicateAnalysis(source_analysis) analysis.setResult('') elif action_id == 'setresult': target = action.get('setresulton', '') result_value = action.get('setresultdiscrete', '') or \ action['setresultvalue'] if target == 'original': analysis = source_analysis.getOriginalReflexedAnalysis() analysis.setResult(result_value) elif target == 'new': # Create a new analysis analysis = duplicateAnalysis(source_analysis) analysis.setResult(result_value) doActionFor(analysis, 'submit') else: logger.error("Unknown 'setresulton' directive: {}".format(target)) return None else: logger.error("Unknown Reflex Rule action: {}".format(action_id)) return None analysis.setReflexRuleAction(action_id) analysis.setIsReflexAnalysis(True) analysis.setReflexAnalysisOf(source_analysis) analysis.setReflexRuleActionsTriggered( source_analysis.getReflexRuleActionsTriggered()) if action.get('showinreport', '') == "invisible": analysis.setHidden(True) elif action.get('showinreport', '') == "visible": analysis.setHidden(False) # Setting the original reflected analysis if source_analysis.getOriginalReflexedAnalysis(): analysis.setOriginalReflexedAnalysis( source_analysis.getOriginalReflexedAnalysis()) else: analysis.setOriginalReflexedAnalysis(source_analysis) analysis.setReflexRuleLocalID(action.get('an_result_id', '')) # Setting the remarks to base analysis #remarks = get_remarks(action, analysis) #analysis.setRemarks(remarks) return analysis