def after_recover(sample): """Unassigns the sample from its storage container and "recover". It also transitions the sample to its previous state before it was stored """ container = _api.get_storage_sample(api.get_uid(sample)) if container: container.remove_object(sample) else: logger.warn("Container for Sample {} not found".format(sample.getId())) # Transition the sample to the state before it was stored previous_state = get_previous_state(sample) or "sample_due" changeWorkflowState(sample, "bika_ar_workflow", previous_state) # Notify the sample has ben modified modified(sample) # Reindex the sample sample.reindexObject() # If the sample is a partition, try to promote to the primary primary = sample.getParentAnalysisRequest() if not primary: return # Recover primary sample if all its partitions have been recovered parts = primary.getDescendants() # Partitions in some statuses won't be considered. skip = ['stored'] parts = filter(lambda part: api.get_review_status(part) in skip, parts) if not parts: # There are no partitions left, transition the primary do_action_for(primary, "recover")
def after_no_sampling_workflow(analysis_request): """Function triggered after "no_sampling_workflow transition for the Analysis Request passed in is performed """ setup = api.get_setup() if setup.getAutoreceiveSamples(): # Auto-receive samples is enabled. Note transition to "received" state # will only take place if the current user has enough privileges (this # is handled by do_action_for already). do_action_for(analysis_request, "receive")
def do_action_to_analyses(analysis_request, transition_id, all_analyses=False): """Cascades the transition to the analysis request analyses. If all_analyses is set to True, the transition will be triggered for all analyses of this analysis request, those from the descendant partitions included. """ analyses = list() if all_analyses: analyses = analysis_request.getAnalyses(full_objects=True) else: analyses = analysis_request.objectValues("Analysis") for analysis in analyses: do_action_for(analysis, transition_id)
def after_store(sample): """Event triggered after "store" transition takes place for a given sample """ primary = sample.getParentAnalysisRequest() if not primary: return # Store primary sample if its partitions have been stored parts = primary.getDescendants() # Partitions in some statuses won't be considered skip = ['cancelled', 'stored', 'retracted', 'rejected'] parts = filter(lambda part: api.get_review_status(part) not in skip, parts) if not parts: # There are no partitions left, transition the primary do_action_for(primary, "store")
def create_sample(services, client, contact, sample_type, receive=True): """Creates a new sample with the specified services """ request = _api.get_request() values = { 'Client': client.UID(), 'Contact': contact.UID(), 'DateSampled': DateTime().strftime("%Y-%m-%d"), 'SampleType': sample_type.UID() } service_uids = map(_api.get_uid, services) sample = create_analysisrequest(client, request, values, service_uids) if receive: do_action_for(sample, "receive") transaction.commit() return sample
def rollback_to_receive_inconsistent_ars(portal): logger.info("Rolling back inconsistent Analysis Requests ...") review_states = ["to_be_verified"] query = dict(portal_type="AnalysisRequest", review_state=review_states) brains = api.search(query, CATALOG_ANALYSIS_LISTING) total = len(brains) for num, brain in enumerate(brains): request = api.get_object(brain) if not isTransitionAllowed(request, "rollback_to_receive"): total -= 1 continue if num % 100 == 0: logger.info("Rolling back inconsistent AR '{}': {}/{}".format( request.getId(), num, total)) do_action_for(request, "rollback_to_receive")
def rollback_to_open_inconsistent_ars(portal): logger.info("Rolling back inconsistent Worksheets ...") review_states = ["to_be_verified"] query = dict(portal_type="Worksheet", review_state=review_states) brains = api.search(query, CATALOG_WORKSHEET_LISTING) total = len(brains) for num, brain in enumerate(brains): ws = api.get_object(brain) if not isTransitionAllowed(ws, "rollback_to_open"): total -= 1 continue if num % 100 == 0: logger.info( "Rolling back inconsistent Worksheet '{}': {}/{}".format( ws.getId(), num, total)) do_action_for(ws, "rollback_to_open")
def do_action(self, action, objects): """Performs the workflow transition passed in and returns the list of objects that have been successfully transitioned """ transitioned = [] ActionHandlerPool.get_instance().queue_pool() for obj in objects: obj = api.get_object(obj) success, message = do_action_for(obj, action) if success: transitioned.append(obj) ActionHandlerPool.get_instance().resume() return transitioned
def do_action(self, action, objects): """Performs the workflow transition passed in and returns the list of objects that have been successfully transitioned """ start = time.time() transitioned = [] ActionHandlerPool.get_instance().queue_pool() for obj in objects: obj = api.get_object(obj) success, message = do_action_for(obj, action) if success: transitioned.append(obj) ActionHandlerPool.get_instance().resume() end = time.time() logger.info("Action '{}' for {} objects took {:.2f}s".format( action, len(transitioned), end - start)) return transitioned
def do_action_to_descendants(analysis_request, transition_id): """Cascades the transition passed in to the descendant partitions """ for partition in analysis_request.getDescendants(all_descendants=False): do_action_for(partition, transition_id)
def do_action_to_ancestors(analysis_request, transition_id): """Promotes the transitiion passed in to ancestors, if any """ parent_ar = analysis_request.getParentAnalysisRequest() if parent_ar: do_action_for(parent_ar, transition_id)