def _is_frozen(self, brain_or_object, *frozen_transitions): """Check if the passed in object is frozen: the object is cancelled, inactive or has been verified at some point :param brain_or_object: Analysis or AR Brain/Object :param frozen_transitions: additional transitions that freeze the object :returns: True if the object is frozen """ if not api.is_active(brain_or_object): return True object = api.get_object(brain_or_object) frozen_trans = set(frozen_transitions) frozen_trans.add('verify') performed_transitions = set(getReviewHistoryActionsList(object)) if frozen_trans.intersection(performed_transitions): return True return False
def _is_frozen(self, brain_or_object): """Check if the passed in object is frozen: the object is cancelled, inactive or has been verified at some point :param brain_or_object: Analysis or AR Brain/Object :returns: True if the object is frozen """ if not api.is_active(brain_or_object): return True if api.get_workflow_status_of(brain_or_object) in FROZEN_STATES: return True # Check the review history if one of the frozen transitions was done object = api.get_object(brain_or_object) performed_transitions = set(getReviewHistoryActionsList(object)) if set(FROZEN_TRANSITIONS).intersection(performed_transitions): return True return False
def _create_ar(self, context, request): """Creates AnalysisRequest object, with supporting Sample, Partition and Analysis objects. The client is retrieved from the obj_path key in the request. Required request parameters: - Contact: One client contact Fullname. The contact must exist in the specified client. The first Contact with the specified value in it's Fullname field will be used. - SampleType_<index> - Must be an existing sample type. Optional request parameters: - CCContacts: A list of contact Fullnames, which will be copied on all messages related to this AR and it's sample or results. - CCEmails: A list of email addresses to include as above. - Sample_id: Create a secondary AR with an existing sample. If unspecified, a new sample is created. - Specification: a lookup to set Analysis specs default values for all analyses - Analysis_Specification: specs (or overrides) per analysis, using a special lookup format. &Analysis_Specification:list=<Keyword>:min:max:error&... """ wftool = getToolByName(context, 'portal_workflow') bc = getToolByName(context, 'bika_catalog') bsc = getToolByName(context, 'bika_setup_catalog') pc = getToolByName(context, 'portal_catalog') ret = { "url": router.url_for("create", force_external=True), "success": True, "error": False, } SamplingWorkflowEnabled = context.bika_setup.getSamplingWorkflowEnabled() for field in [ 'Client', 'SampleType', 'Contact', 'SamplingDate', 'Services']: self.require(field) self.used(field) try: client = resolve_request_lookup(context, request, 'Client')[0].getObject() except IndexError: raise Exception("Client not found") secondary = False sample = None # Sample_id if 'Sample' in request: # Secondary AR try: sample = resolve_request_lookup(context, request, 'Sample')[0].getObject() except IndexError: raise Exception("Sample not found") secondary = True else: # Primary AR sample = _createObjectByType("Sample", client, tmpID()) sample.unmarkCreationFlag() fields = set_fields_from_request(sample, request) for field in fields: self.used(field) sample._renameAfterCreation() sample.setSampleID(sample.getId()) sample.setSamplingWorkflowEnabled(SamplingWorkflowEnabled) event.notify(ObjectInitializedEvent(sample)) sample.at_post_create_script() ret['sample_id'] = sample.getId() parts = [{'services': [], 'container': [], 'preservation': '', 'separate': False}] specs = self.get_specs_from_request() ar = _createObjectByType("AnalysisRequest", client, tmpID()) ar.unmarkCreationFlag() fields = set_fields_from_request(ar, request) for field in fields: self.used(field) ar.setSample(sample) ar._renameAfterCreation() ret['ar_id'] = ar.getId() brains = resolve_request_lookup(context, request, 'Services') service_uids = [p.UID for p in brains] # If there is a profile, add its services' UIDs brains = resolve_request_lookup(context, request, 'Profiles') profiles_uids = [p.UID for p in brains] profiles_uids = ','.join(profiles_uids) profiles_dict = {'Profiles': profiles_uids} service_uids = get_services_uids( context=context, analyses_serv=service_uids, values=profiles_dict) ar.setAnalyses(service_uids, specs=specs) new_analyses = ar.getAnalyses(full_objects=True) ar.reindexObject() event.notify(ObjectInitializedEvent(ar)) ar.at_post_create_script() # Create sample partitions parts_and_services = {} for _i in range(len(parts)): p = parts[_i] part_prefix = sample.getId() + "-P" if '%s%s' % (part_prefix, _i + 1) in sample.objectIds(): parts[_i]['object'] = sample['%s%s' % (part_prefix, _i + 1)] parts_and_services['%s%s' % (part_prefix, _i + 1)] = p['services'] part = parts[_i]['object'] else: part = _createObjectByType("SamplePartition", sample, tmpID()) parts[_i]['object'] = part container = None preservation = p['preservation'] parts[_i]['prepreserved'] = False part.edit( Container=container, Preservation=preservation, ) part.processForm() parts_and_services[part.id] = p['services'] # Add analyses to sample partitions # XXX jsonapi create AR: right now, all new analyses are linked to the first samplepartition if new_analyses: analyses = list(part.getAnalyses()) analyses.extend(new_analyses) for analysis in new_analyses: analysis.setSamplePartition(part) part.setAnalyses(analyses) action = 'no_sampling_workflow' if SamplingWorkflowEnabled: action = 'sampling_workflow' wftool.doActionFor(ar, action) if secondary: # If secondary AR, then we need to manually transition the AR (and its # children) to fit with the Sample Partition's current state sampleactions = getReviewHistoryActionsList(sample) doActionsFor(ar, sampleactions) else: # If Preservation is required for some partitions, # and the SamplingWorkflow is disabled, we need # to transition to to_be_preserved manually. if not SamplingWorkflowEnabled: to_be_preserved = [] sample_due = [] lowest_state = 'sample_due' for p in sample.objectValues('SamplePartition'): if p.getPreservation(): lowest_state = 'to_be_preserved' to_be_preserved.append(p) else: sample_due.append(p) for p in to_be_preserved: doActionFor(p, 'to_be_preserved') for p in sample_due: doActionFor(p, 'sample_due') doActionFor(sample, lowest_state) # Transition pre-preserved partitions for p in parts: if 'prepreserved' in p and p['prepreserved']: part = p['object'] state = workflow.getInfoFor(part, 'review_state') if state == 'to_be_preserved': doActionFor(part, 'preserve') if self.unused: raise BadRequest("The following request fields were not used: %s. Request aborted." % self.unused) return ret
def create_analysisrequest(client, request, values, analyses=None, partitions=None, specifications=None, prices=None): """This is meant for general use and should do everything necessary to create and initialise an AR and any other required auxilliary objects (Sample, SamplePartition, Analysis...) :param client: The container (Client) in which the ARs will be created. :param request: The current Request object. :param values: a dict, where keys are AR|Sample schema field names. :param analyses: Analysis services list. If specified, augments the values in values['Analyses']. May consist of service objects, UIDs, or Keywords. :param partitions: A list of dictionaries, if specific partitions are required. If not specified, AR's sample is created with a single partition. :param specifications: These values augment those found in values['Specifications'] :param prices: Allow different prices to be set for analyses. If not set, prices are read from the associated analysis service. """ # Don't pollute the dict param passed in values = deepcopy(values) # Create new sample or locate the existing for secondary AR secondary = False sample = None if not values.get('Sample', False): sample = create_sample(client, request, values) else: sample = get_sample_from_values(client, values) secondary = True # Create the Analysis Request ar = _createObjectByType('AnalysisRequest', client, tmpID()) # Set some required fields manually before processForm is called ar.setSample(sample) values['Sample'] = sample ar.processForm(REQUEST=request, values=values) ar.edit(RequestID=ar.getId()) # Set analysis request analyses. 'Analyses' param are analyses services analyses = analyses if analyses else [] service_uids = get_services_uids( context=client, analyses_serv=analyses, values=values) # processForm already has created the analyses, but here we create the # analyses with specs and prices. This function, even it is called 'set', # deletes the old analyses, so eventually we obtain the desired analyses. ar.setAnalyses(service_uids, prices=prices, specs=specifications) analyses = ar.getAnalyses(full_objects=True) # Create sample partitions if not partitions: partitions = values.get('Partitions', [{'services': service_uids}]) part_num = 0 prefix = sample.getId() + "-P" if secondary: # Always create new partitions if is a Secondary AR, cause it does # not make sense to reuse the partitions used in a previous AR! sparts = sample.getSamplePartitions() for spart in sparts: spartnum = int(spart.getId().split(prefix)[1]) if spartnum > part_num: part_num = spartnum for n, partition in enumerate(partitions): # Calculate partition id partition_id = '%s%s' % (prefix, part_num + 1) partition['part_id'] = partition_id # Point to or create sample partition if partition_id in sample.objectIds(): partition['object'] = sample[partition_id] else: partition['object'] = create_samplepartition( sample, partition, analyses ) part_num += 1 # At this point, we have a fully created AR, with a Sample, Partitions and # Analyses, but the state of all them is the initial ("sample_registered"). # We can now transition the whole thing (instead of doing it manually for # each object we created). After and Before transitions will take care of # cascading and promoting the transitions in all the objects "associated" # to this Analysis Request. sampling_workflow_enabled = sample.getSamplingWorkflowEnabled() action = 'no_sampling_workflow' if sampling_workflow_enabled: action = 'sampling_workflow' # Transition the Analysis Request and related objects to "sampled" (if # sampling workflow not enabled) or to "to_be_sampled" statuses. doActionFor(ar, action) if secondary: # If secondary AR, then we need to manually transition the AR (and its # children) to fit with the Sample Partition's current state sampleactions = getReviewHistoryActionsList(sample) doActionsFor(ar, sampleactions) # We need a workaround here in order to transition partitions. # auto_no_preservation_required and auto_preservation_required are # auto transitions applied to analysis requests, but partitions don't # have them, so we need to replace them by the sample_workflow # equivalent. if 'auto_no_preservation_required' in sampleactions: index = sampleactions.index('auto_no_preservation_required') sampleactions[index] = 'sample_due' elif 'auto_preservation_required' in sampleactions: index = sampleactions.index('auto_preservation_required') sampleactions[index] = 'to_be_preserved' # We need to transition the partition manually # Transition pre-preserved partitions for partition in partitions: part = partition['object'] doActionsFor(part, sampleactions) # Transition pre-preserved partitions for p in partitions: if 'prepreserved' in p and p['prepreserved']: part = p['object'] doActionFor(part, 'preserve') # Once the ar is fully created, check if there are rejection reasons reject_field = values.get('RejectionReasons', '') if reject_field and reject_field.get('checkbox', False): doActionFor(ar, 'reject') return ar
def _create_ar(self, context, request): """Creates AnalysisRequest object, with supporting Sample, Partition and Analysis objects. The client is retrieved from the obj_path key in the request. Required request parameters: - Contact: One client contact Fullname. The contact must exist in the specified client. The first Contact with the specified value in it's Fullname field will be used. - SampleType_<index> - Must be an existing sample type. Optional request parameters: - CCContacts: A list of contact Fullnames, which will be copied on all messages related to this AR and it's sample or results. - CCEmails: A list of email addresses to include as above. - Sample_id: Create a secondary AR with an existing sample. If unspecified, a new sample is created. - Specification: a lookup to set Analysis specs default values for all analyses - Analysis_Specification: specs (or overrides) per analysis, using a special lookup format. &Analysis_Specification:list=<Keyword>:min:max:error&... """ wftool = getToolByName(context, 'portal_workflow') bc = getToolByName(context, 'bika_catalog') bsc = getToolByName(context, 'bika_setup_catalog') pc = getToolByName(context, 'portal_catalog') ret = { "url": router.url_for("create", force_external=True), "success": True, "error": False, } SamplingWorkflowEnabled = context.bika_setup.getSamplingWorkflowEnabled( ) for field in [ 'Client', 'SampleType', 'Contact', 'SamplingDate', 'Services' ]: self.require(field) self.used(field) try: client = resolve_request_lookup(context, request, 'Client')[0].getObject() except IndexError: raise Exception("Client not found") secondary = False sample = None # Sample_id if 'Sample' in request: # Secondary AR try: sample = resolve_request_lookup(context, request, 'Sample')[0].getObject() except IndexError: raise Exception("Sample not found") secondary = True else: # Primary AR sample = _createObjectByType("Sample", client, tmpID()) sample.unmarkCreationFlag() fields = set_fields_from_request(sample, request) for field in fields: self.used(field) sample._renameAfterCreation() sample.setSampleID(sample.getId()) sample.setSamplingWorkflowEnabled(SamplingWorkflowEnabled) event.notify(ObjectInitializedEvent(sample)) sample.at_post_create_script() ret['sample_id'] = sample.getId() parts = [{ 'services': [], 'container': [], 'preservation': '', 'separate': False }] specs = self.get_specs_from_request() ar = _createObjectByType("AnalysisRequest", client, tmpID()) ar.unmarkCreationFlag() fields = set_fields_from_request(ar, request) for field in fields: self.used(field) ar.setSample(sample) ar._renameAfterCreation() ret['ar_id'] = ar.getId() brains = resolve_request_lookup(context, request, 'Services') service_uids = [p.UID for p in brains] # If there is a profile, add its services' UIDs brains = resolve_request_lookup(context, request, 'Profiles') profiles_uids = [p.UID for p in brains] profiles_uids = ','.join(profiles_uids) profiles_dict = {'Profiles': profiles_uids} service_uids = get_services_uids(context=context, analyses_serv=service_uids, values=profiles_dict) ar.setAnalyses(service_uids, specs=specs) new_analyses = ar.getAnalyses(full_objects=True) ar.reindexObject() event.notify(ObjectInitializedEvent(ar)) ar.at_post_create_script() # Create sample partitions parts_and_services = {} for _i in range(len(parts)): p = parts[_i] part_prefix = sample.getId() + "-P" if '%s%s' % (part_prefix, _i + 1) in sample.objectIds(): parts[_i]['object'] = sample['%s%s' % (part_prefix, _i + 1)] parts_and_services['%s%s' % (part_prefix, _i + 1)] = p['services'] part = parts[_i]['object'] else: part = _createObjectByType("SamplePartition", sample, tmpID()) parts[_i]['object'] = part container = None preservation = p['preservation'] parts[_i]['prepreserved'] = False part.edit( Container=container, Preservation=preservation, ) part.processForm() parts_and_services[part.id] = p['services'] # Add analyses to sample partitions # XXX jsonapi create AR: right now, all new analyses are linked to the first samplepartition if new_analyses: analyses = list(part.getAnalyses()) analyses.extend(new_analyses) for analysis in new_analyses: analysis.setSamplePartition(part) part.setAnalyses(analyses) action = 'no_sampling_workflow' if SamplingWorkflowEnabled: action = 'sampling_workflow' wftool.doActionFor(ar, action) if secondary: # If secondary AR, then we need to manually transition the AR (and its # children) to fit with the Sample Partition's current state sampleactions = getReviewHistoryActionsList(sample) doActionsFor(ar, sampleactions) else: # If Preservation is required for some partitions, # and the SamplingWorkflow is disabled, we need # to transition to to_be_preserved manually. if not SamplingWorkflowEnabled: to_be_preserved = [] sample_due = [] lowest_state = 'sample_due' for p in sample.objectValues('SamplePartition'): if p.getPreservation(): lowest_state = 'to_be_preserved' to_be_preserved.append(p) else: sample_due.append(p) for p in to_be_preserved: doActionFor(p, 'to_be_preserved') for p in sample_due: doActionFor(p, 'sample_due') doActionFor(sample, lowest_state) # Transition pre-preserved partitions for p in parts: if 'prepreserved' in p and p['prepreserved']: part = p['object'] state = workflow.getInfoFor(part, 'review_state') if state == 'to_be_preserved': doActionFor(part, 'preserve') if self.unused: raise BadRequest( "The following request fields were not used: %s. Request aborted." % self.unused) return ret