def create_partition_for_storage(sample_obj_brain_or_uid): """Creates an empty partition suitable for storage from the given sample If the sample passed in is a partition, generates a copy of the same partition without analyses set, but keeping the same parent. If the sample passed in is a primary sample, generates a new partition, but without analyses """ sample = get_object(sample_obj_brain_or_uid) logger.info("Creating partition for storage: {}".format(get_id(sample))) PARTITION_SKIP_FIELDS = [ "Analyses", "Attachment", "Client", "Profile", "Profiles", "RejectionReasons", "Remarks", "ResultsInterpretation", "ResultsInterpretationDepts", "Sample", "Template", "creation_date", "id", "modification_date", "ParentAnalysisRequest", ] primary = sample if sample.isPartition(): primary = sample.getParentAnalysisRequest() # Set the basic fields for the Partition record = { "ParentAnalysisRequest": get_uid(primary), } # Copy all fields for fieldname, field in get_fields(sample).items(): if field.type == "computed": logger.info("Skipping computed field {}".format(fieldname)) continue if fieldname in PARTITION_SKIP_FIELDS: logger.info("Skipping field {}".format(fieldname)) continue fieldvalue = field.get(sample) record[fieldname] = fieldvalue logger.info("Update record '{}': {}".format(fieldname, repr(fieldvalue))) client = sample.getClient() partition = crar(client, request={}, values=record) # Force status to "stored" wf.changeWorkflowState(partition, "bika_ar_workflow", "stored") # Reindex the primary AR primary.reindexObject(idxs=["isRootAncestor"]) return partition
def create_ar(self): client = self.portal.clients["client-1"] contacts = client.getContacts() contact = contacts[0] sampletype = self.portal.bika_setup.bika_sampletypes["sampletype-1"] values = { "Client": api.get_uid(client), "Contact": api.get_uid(contact), "DateSampled": self.timestamp(), "SampleType": api.get_uid(sampletype)} services = self.get_services()[:3] service_uids = map(api.get_uid, services) return crar(client, self.request, values, service_uids)
def create_partition(self, primary_uid, sampletype_uid, analyses_uids): """Create a new partition (AR) """ logger.info("*** CREATE PARTITION ***") ar = self.get_object_by_uid(primary_uid) sample = ar.getSample() record = { "PrimarySample": api.get_uid(sample), "InternalUse": True, "PrimaryAnalysisRequest": primary_uid, "SampleType": sampletype_uid, } for fieldname, field in api.get_fields(ar).items(): # if self.is_proxy_field(field): # logger.info("Skipping proxy field {}".format(fieldname)) # continue if self.is_computed_field(field): logger.info("Skipping computed field {}".format(fieldname)) continue if fieldname in PARTITION_SKIP_FIELDS: logger.info("Skipping field {}".format(fieldname)) continue fieldvalue = field.get(ar) record[fieldname] = fieldvalue logger.info("Update record '{}': {}".format( fieldname, repr(fieldvalue))) client = ar.getClient() # se assume to have analyses = map(self.get_object_by_uid, analyses_uids) services = map(lambda an: an.getAnalysisService(), analyses) ar = crar( client, self.request, record, analyses=services, specifications=self.get_specifications_for(ar) ) ar.Schema().getField("InternalUse").set(ar, True) return ar
def create_partition(self, primary_uid, sampletype_uid, analyses_uids): """Create a new partition (AR) """ logger.info("*** CREATE PARTITION ***") ar = self.get_object_by_uid(primary_uid) record = { "InternalUse": True, "ParentAnalysisRequest": primary_uid, "SampleType": sampletype_uid, } for fieldname, field in api.get_fields(ar).items(): # if self.is_proxy_field(field): # logger.info("Skipping proxy field {}".format(fieldname)) # continue if self.is_computed_field(field): logger.info("Skipping computed field {}".format(fieldname)) continue if fieldname in PARTITION_SKIP_FIELDS: logger.info("Skipping field {}".format(fieldname)) continue fieldvalue = field.get(ar) record[fieldname] = fieldvalue logger.info("Update record '{}': {}".format( fieldname, repr(fieldvalue))) client = ar.getClient() analyses = map(self.get_object_by_uid, analyses_uids) services = map(lambda an: an.getAnalysisService(), analyses) partition = crar(client, self.request, record, analyses=services, specifications=self.get_specifications_for(ar)) # Reindex Parent Analysis Request # TODO Workflow - AnalysisRequest - Partitions creation ar.reindexObject(idxs=["isRootAncestor"]) return partition
def process_form(self): # Now, we will create the specified ARs. portal_catalog = getToolByName(self.context, 'portal_catalog') ARs = [] new_ar_uids = [] from bika.lims.utils.analysisrequest import \ create_analysisrequest as crar for arnum, state in self.valid_states.items(): # Create the Analysis Request ar = crar( portal_catalog(UID=state['Client'])[0].getObject(), self.request, state) ARs.append(ar.Title()) # Automatic label printing won't print "register" labels for # Secondary ARs if ar.Title()[-2:] == '01': new_ar_uids.append(ar.UID()) # Display the appropriate message after creation if len(ARs) > 1: message = _('Analysis requests ${ARs} were successfully created.', mapping={'ARs': safe_unicode(', '.join(ARs))}) else: message = _('Analysis request ${AR} was successfully created.', mapping={'AR': safe_unicode(ARs[0])}) self.context.plone_utils.addPortalMessage(message, 'info') if new_ar_uids and 'register'\ in self.context.bika_setup.getAutoPrintStickers(): return json.dumps({ 'success': message, 'stickers': new_ar_uids, 'stickertemplate': self.context.bika_setup.getAutoStickerTemplate() }) else: return json.dumps({'success': message})
def process_form(self): # Now, we will create the specified ARs. portal_catalog = getToolByName(self.context, 'portal_catalog') ARs = [] new_ar_uids = [] from bika.lims.utils.analysisrequest import \ create_analysisrequest as crar for arnum, state in self.valid_states.items(): # Create the Analysis Request ar = crar( portal_catalog(UID=state['Client'])[0].getObject(), self.request, state ) ARs.append(ar.Title()) # Automatic label printing won't print "register" labels for # Secondary ARs if ar.Title()[-2:] == '01': new_ar_uids.append(ar.UID()) # Display the appropriate message after creation if len(ARs) > 1: message = _('Analysis requests ${ARs} were successfully created.', mapping={'ARs': safe_unicode(', '.join(ARs))}) else: message = _('Analysis request ${AR} was successfully created.', mapping={'AR': safe_unicode(ARs[0])}) self.context.plone_utils.addPortalMessage(message, 'info') if new_ar_uids and 'register'\ in self.context.bika_setup.getAutoPrintStickers(): return json.dumps({ 'success': message, 'stickers': new_ar_uids, 'stickertemplate': self.context.bika_setup.getAutoStickerTemplate() }) else: return json.dumps({'success': message})
def create_analysisrequest(context, request, values): return crar(context, request, values)
def __call__(self): form = self.request.form plone.protect.CheckAuthenticator(self.request.form) plone.protect.PostOnly(self.request.form) uc = getToolByName(self.context, 'uid_catalog') bsc = getToolByName(self.context, 'bika_setup_catalog') portal_catalog = getToolByName(self.context, 'portal_catalog') # Load the form data from request.state. If anything goes wrong here, # put a bullet through the whole process. try: states = json.loads(form['state']) except Exception as e: message = t(_('Badly formed state: ${errmsg}', mapping={'errmsg': e.message})) ajax_form_error(self.errors, message=message) return json.dumps({'errors': self.errors}) # Validate incoming form data required = [field.getName() for field in AnalysisRequestSchema.fields() if field.required] + ["Analyses"] # First remove all states which are completely empty; if all # required fields are not present, we assume that the current # AR had no data entered, and can be ignored nonblank_states = {} for arnum, state in states.items(): for key, val in state.items(): if val \ and "%s_hidden" % key not in state \ and not key.endswith('hidden'): nonblank_states[arnum] = state break # in valid_states, all ars that pass validation will be stored valid_states = {} for arnum, state in nonblank_states.items(): # Secondary ARs are a special case, these fields are not required if state.get('Sample', ''): if 'SamplingDate' in required: required.remove('SamplingDate') if 'SampleType' in required: required.remove('SampleType') # fields flagged as 'hidden' are not considered required because # they will already have default values inserted in them for fieldname in required: if fieldname + '_hidden' in state: required.remove(fieldname) missing = [f for f in required if not state.get(f, '')] # If there are required fields missing, flag an error if missing: msg = t(_('Required fields have no values: ' '${field_names}', mapping={'field_names': ', '.join(missing)})) ajax_form_error(self.errors, arnum=arnum, message=msg) continue # This ar is valid! valid_states[arnum] = state # - Expand lists of UIDs returned by multiValued reference widgets # - Transfer _uid values into their respective fields for arnum in valid_states.keys(): for field, value in valid_states[arnum].items(): if field.endswith('_uid') and ',' in value: valid_states[arnum][field] = value.split(',') elif field.endswith('_uid'): valid_states[arnum][field] = value if self.errors: return json.dumps({'errors': self.errors}) # Now, we will create the specified ARs. ARs = [] for arnum, state in valid_states.items(): # Create the Analysis Request ar = crar( portal_catalog(UID=state['Client'])[0].getObject(), self.request, state ) ARs.append(ar.Title()) # Display the appropriate message after creation if len(ARs) > 1: message = _('Analysis requests ${ARs} were successfully created.', mapping={'ARs': safe_unicode(', '.join(ARs))}) else: message = _('Analysis request ${AR} was successfully created.', mapping={'AR': safe_unicode(ARs[0])}) self.context.plone_utils.addPortalMessage(message, 'info') # Automatic label printing won't print "register" labels for Secondary. ARs #This was a hack and is still a hack but will work more widely new_ars = [ar for ar in ARs if ar[-1] == '1'] if 'register' in self.context.bika_setup.getAutoPrintStickers() \ and new_ars: return json.dumps({ 'success': message, 'stickers': new_ars, 'stickertemplate': self.context.bika_setup.getAutoStickerTemplate() }) else: return json.dumps({'success': message})