def generate_delivery_pdf(context, ars_or_samples): if not ars_or_samples: logger.warn("No Analysis Requests or Samples provided") return if ISample.providedBy(ars_or_samples) or \ IAnalysisRequest.providedBy(ars_or_samples): return generate_delivery_pdf(context, [ars_or_samples]) if not isinstance(ars_or_samples, list): logger.warn("Type not supported: {}".format(repr(ars_or_samples))) return html = DeliveryFormPdf(context, context.REQUEST, analysis_requests=ars_or_samples).template() html = safe_unicode(html).encode("utf-8") filename = "delivery" pdf_fn = tempfile.mktemp(suffix=".pdf") pdf = createPdf(htmlreport=html, outfile=pdf_fn) if not pdf: ar_ids = map(lambda ar: ar.id, ars_or_samples) logger.warn( "Unable to generate the PDF of delivery form for {}".format( ' '.join(ar_ids))) return None def _attach_to_ar(pdf, ar_brain_or_obj): ar = api.get_object(ar_brain_or_obj) attid = ar.aq_parent.generateUniqueId('Attachment') att = _createObjectByType("Attachment", ar.aq_parent, attid) att.setAttachmentFile(open(pdf_fn)) # Awkward workaround to rename the file attf = att.getAttachmentFile() attf.filename = '%s.pdf' % filename att.setAttachmentFile(attf) att.unmarkCreationFlag() renameAfterCreation(att) atts = ar.getAttachment() + [att] if ar.getAttachment() else [att] atts = [a.UID() for a in atts] ar.setAttachment(atts) for ar_or_sample in ars_or_samples: # Attach the pdf to the Analysis Request if ISample.providedBy(ar_or_sample): for ar in ar_or_sample.getAnalysisRequests(): _attach_to_ar(pdf, ar) elif IAnalysisRequest.providedBy(ar_or_sample): _attach_to_ar(pdf, ar_or_sample) return pdf_fn
def __init__(self, context, request): super(RequisitionFormPdf, self).__init__(context, request) self.analysis_requests = [] if ISample.providedBy(context): self.analysis_requests = context.getAnalysisRequests() elif IAnalysisRequest.providedBy(context): self.analysis_requests = [context]
def __init__(self, context, request, analysis_requests=None): super(DeliveryFormPdf, self).__init__(context, request) self.analysis_requests = analysis_requests if not self.analysis_requests: if ISample.providedBy(context): self.analysis_requests = context.getAnalysisRequests() elif IAnalysisRequest.providedBy(context): self.analysis_requests = [context]
def get_sample(instance): """Returns the sample associated to this instance, if any. Otherwise, returns None""" if ISample.providedBy(instance): return instance if IAnalysisRequest.providedBy(instance): return get_sample(instance.getSample()) if ISamplePartition.providedBy(instance): return get_sample(instance.aq_parent) return None
def __init__(self, context, request, analysis_requests=None, lab_department=None): super(InternalDeliveryFormPdf, self).__init__(context, request) self.analysis_requests = analysis_requests self.lab_department = lab_department if not self.analysis_requests: if ISample.providedBy(context): self.analysis_requests = context.getAnalysisRequests() elif IAnalysisRequest.providedBy(context): self.analysis_requests = [context]
def folderitems(self, full_objects=False): items = BikaListingView.folderitems(self) bsc = getToolByName(self.context, 'bika_setup_catalog') brains = bsc(portal_type='SampleType', inactive_state='active') biospecimen_types = [{ 'ResultValue': brain.UID, 'ResultText': brain.title } for brain in brains] ret = [] for x, item in enumerate(items): if not items[x].has_key('obj'): continue obj = items[x]['obj'] if not ISample.providedBy(obj): continue items[x]['Type'] = obj.getSampleType() and obj.getSampleType( ).Title() or '' items[x]['Volume'] = obj.getField('Volume').get(obj) items[x]['Unit'] = VOLUME_UNITS[0]['ResultText'] items[x]['SubjectID'] = obj.getField('SubjectID').get(obj) kit = obj.getField('Kit').get(obj) project = obj.getField('Project').get(obj) items[x]['Kit'] = kit items[x]['Project'] = project if project: items[x]['replace']['Project'] = \ '<a href="%s">%s</a>' % (project.absolute_url(), project.Title()) if kit: items[x]['replace']['Kit'] = \ '<a href="%s">%s</a>' % (kit.absolute_url(), kit.Title()) # TODO: IF STATUS IS RECEIVED EXECUTE THIS # items[x]['replace']['Type'] = \ # '<a href="%s">%s</a>' % (obj.getSampleType().absolute_url(), # obj.getSampleType().Title()) items[x]['Barcode'] = obj.getField('Barcode').get(obj) items[x]['replace']['Title'] = "<a href='%s'>%s</a>" % \ (items[x]['url'], items[x]['Title']) # TODO: SPECIFY OBJ STATES WHERE USER CAN EDIT BARCODE if self.allow_edit and isActive(self.context) and \ getSecurityManager().checkPermission(ManageProjects, obj): if items[x]['review_state'] == "sample_registered": items[x]['allow_edit'] = ['Type', 'Barcode'] items[x]['choices']['Type'] = biospecimen_types elif items[x]['review_state'] == "sample_due": items[x]['allow_edit'] = ['SubjectID', 'Volume', 'Unit'] if not items[x]['Unit']: items[x]['choices']['Unit'] = VOLUME_UNITS ret.append(item) return ret
def after_process(obj): """Event fired after process (Process) transition is triggered """ logger.info("*** Custom after_process transition ***") if IAnalysisRequest.providedBy(obj): # Generate a derived AR (and Sample) for every single partition create_requests_from_partitions(obj) elif ISample.providedBy(obj): # We do not permit partitioning directly from Sample! # sample_events._cascade_transition(obj, 'process') pass
def after_send_to_lab(obj): """ Event fired after send_to_lab transition is triggered. """ logger.info("*** Custom after_send_to_lab transition ***") if IAnalysisRequest.providedBy(obj): # Promote to sample sample = obj.getSample() if sample: doActionFor(sample, 'send_to_lab') elif ISample.providedBy(obj): sample_events._cascade_transition(obj, 'send_to_lab')
def workflow_action_download_requisition(self): if ISample.providedBy(self.context): # TODO, Concatenate the PDFs of all ocntaine ARs logger.info("This is a sample!") elif IAnalysisRequest.providedBy(self.context): # Redirect to the requisition PDF req_att = self.get_last_requisition_attachment(self.context) if not req_att: return self.destination_url = '{}/at_download/AttachmentFile'.format( req_att.absolute_url()) self.request.response.redirect(self.destination_url)
def after_send_to_pot(obj): """Event fired after sending to point of testing """ logger.info("*** Custom after_send_to_pot transition ***") if IAnalysisRequest.providedBy(obj): # Transition Analyses to sample_due ans = obj.getAnalyses(full_objects=True, cancellation_state='active') for analysis in ans: doActionFor(analysis, 'sample_due') # Promote to parent AR _promote_cascade(obj, "send_to_pot") elif ISample.providedBy(obj): sample_events._cascade_transition(obj, 'send_to_pot')
def get_sample_from_values(context, values): """values may contain a UID or a direct Sample object. """ if ISample.providedBy(values['Sample']): sample = values['Sample'] else: bc = getToolByName(context, 'bika_catalog') brains = bc(UID=values['Sample']) if brains: sample = brains[0].getObject() else: raise RuntimeError( "create_analysisrequest: invalid sample value provided. values=%s" % values) if not sample: raise RuntimeError( "create_analysisrequest: invalid sample value provided. values=%s" % values)
def generate_requisition_pdf(ar_or_sample): if not ar_or_sample: logger.warn("No Analysis Request or Sample provided") return if ISample.providedBy(ar_or_sample): for ar in ar_or_sample.getAnalysisRequests(): generate_requisition_pdf(ar) return elif not IAnalysisRequest.providedBy(ar_or_sample): logger.warn("Type not supported: {}".format(repr(ar_or_sample))) return html = RequisitionFormPdf(ar_or_sample, ar_or_sample.REQUEST).template() html = safe_unicode(html).encode('utf-8') filename = '%s-requisition' % ar_or_sample.id pdf_fn = tempfile.mktemp(suffix=".pdf") pdf = createPdf(htmlreport=html, outfile=pdf_fn) if not pdf: logger.warn( "Unable to generate the PDF of requisition form for {}".format( ar_or_sample.id)) return # Attach the pdf to the Analysis Request attid = ar_or_sample.aq_parent.generateUniqueId('Attachment') att = _createObjectByType("Attachment", ar_or_sample.aq_parent, attid) att.setAttachmentFile(open(pdf_fn)) att.setReportOption('i') # Ignore in report # Try to assign the Requisition Attachment Type query = dict(portal_type='AttachmentType', title='Requisition') brains = api.search(query, 'bika_setup_catalog') if brains: att_type = api.get_object(brains[0]) att.setAttachmentType(att_type) # Awkward workaround to rename the file attf = att.getAttachmentFile() attf.filename = '%s.pdf' % filename att.setAttachmentFile(attf) att.unmarkCreationFlag() renameAfterCreation(att) atts = ar_or_sample.getAttachment() + [att] if \ ar_or_sample.getAttachment() else [att] atts = [a.UID() for a in atts] ar_or_sample.setAttachment(atts) os.remove(pdf_fn)
def get_sample_from_values(context, values): """values may contain a UID or a direct Sample object. """ if ISample.providedBy(values['Sample']): sample = values['Sample'] else: bc = getToolByName(context, 'bika_catalog') brains = bc(UID=values['Sample']) if brains: sample = brains[0].getObject() else: raise RuntimeError( "create_analysisrequest: invalid sample value provided. values=%s" % values) if not sample: raise RuntimeError( "create_analysisrequest: invalid sample value provided. values=%s" % values) return sample
def after_receive(obj): """Event fired after receive (Process) transition is triggered """ logger.info("*** Custom after_receive transition ***") if IAnalysisRequest.providedBy(obj): # Transition Analyses to sample_due ans = obj.getAnalyses(full_objects=True, cancellation_state='active') for analysis in ans: doActionFor(analysis, 'receive') # Promote to parent AR _promote_cascade(obj, "receive") elif ISample.providedBy(obj): sample_events._cascade_transition(obj, 'receive')
def after_deliver(obj): """ Event fired after delivery transition is triggered. """ logger.info("*** Custom after_deliver transition ***") if IAnalysisRequest.providedBy(obj): # Promote to sample sample = obj.getSample() if sample: doActionFor(sample, 'deliver') obj.reindexObject(idxs=[ "getDateReceived", ]) elif ISample.providedBy(obj): obj.setDateReceived(DateTime()) obj.reindexObject(idxs=[ "getDateReceived", ]) sample_events._cascade_transition(obj, 'deliver')
def after_no_sampling_workflow(obj): """ Event fired for no_sampling_workflow that makes the status of the Analysis request or Sample to become sample_ordered """ logger.info("*** Custom after_no_sampling_workflow (order) transition ***") # Generate the requisition report if IAnalysisRequest.providedBy(obj): # Transition Analyses to sample_due ans = obj.getAnalyses(full_objects=True, cancellation_state='active') for analysis in ans: doActionFor(analysis, 'no_sampling_workflow') # Promote to sample sample = obj.getSample() if sample: doActionFor(sample, 'no_sampling_workflow') # Generate the delivery pdf generate_requisition_pdf(obj) # Set specifications by default sample_type = obj.getSampleType() specs = _api.get_field_value(sample_type, "DefaultAnalysisSpecifications", None) if specs: obj.setSpecification(api.get_object(specs)) else: # Find out suitable specs by Sample Type name sample_type = obj.getSampleType().Title() specs_title = "{} - calculated".format(sample_type) query = dict(portal_type="AnalysisSpec", title=specs_title) specs = api.search(query, 'bika_setup_catalog') if specs: obj.setSpecification(api.get_object(specs[0])) elif ISample.providedBy(obj): sample_events._cascade_transition(obj, 'no_sampling_workflow')
def folderitems(self, full_objects=False): items = super(AliquotsView, self).folderitems(self) linked_samples = self.context.getBackReferences("SampleSample") new_items = [] for x, item in enumerate(items): if not items[x].has_key('obj'): continue obj = items[x]['obj'] if not ISample.providedBy(obj) or obj not in linked_samples: continue items[x]['Type'] = obj.getSampleType() and obj.getSampleType( ).Title() or '' items[x]['Volume'] = obj.getField('Volume').get(obj) items[x]['Unit'] = VOLUME_UNITS[0]['ResultText'] items[x]['SubjectID'] = obj.getField('SubjectID').get(obj) kit = obj.getField('Kit').get(obj) project = obj.getField('Project').get(obj) items[x]['Kit'] = kit items[x]['Project'] = project if project: items[x]['replace']['Project'] = \ '<a href="%s">%s</a>' % (project.absolute_url(), project.Title()) if kit: items[x]['replace']['Kit'] = \ '<a href="%s">%s</a>' % (kit.absolute_url(), kit.Title()) items[x]['replace']['Type'] = \ '<a href="%s">%s</a>' % (obj.getSampleType().absolute_url(), obj.getSampleType().Title()) items[x]['Barcode'] = obj.getField('Barcode').get(obj) items[x]['replace']['Title'] = "<a href='%s'>%s</a>" % \ (items[x]['url'], items[x]['Title']) new_items.append(item) return new_items
def create_analysisrequest(context, request, values): """Create an AR. :param context the container in which the AR will be created (Client) :param request the request object :param values a dictionary containing fieldname/value pairs, which will be applied. Some fields will have specific code to handle them, and others will be directly written to the schema. :return the new AR instance Special keys present (or required) in the values dict, which are not present in the schema: - Partitions: data about partitions to be created, and the analyses that are to be assigned to each. - Prices: custom prices set in the HTML form. - ResultsRange: Specification values entered in the HTML form. """ # Gather neccesary tools workflow = getToolByName(context, 'portal_workflow') bc = getToolByName(context, 'bika_catalog') # Create new sample or locate the existing for secondary AR if values['Sample']: secondary = True if ISample.providedBy(values['Sample']): sample = values['Sample'] else: sample = bc(UID=values['Sample'])[0].getObject() samplingworkflow_enabled = sample.getSamplingWorkflowEnabled() else: secondary = False samplingworkflow_enabled = context.bika_setup.getSamplingWorkflowEnabled() sample = create_sample(context, request, values) # Create the Analysis Request ar = _createObjectByType('AnalysisRequest', context, tmpID()) ar.setSample(sample) # processform renames the sample, this requires values to contain the Sample. values['Sample'] = sample ar.processForm(REQUEST=request, values=values) # Object has been renamed ar.edit(RequestID=ar.getId()) # Set initial AR state workflow_action = 'sampling_workflow' if samplingworkflow_enabled \ else 'no_sampling_workflow' workflow.doActionFor(ar, workflow_action) # We need to send a list of service UIDS to setAnalyses function. # But we may have received a list of titles, list of UIDS, # list of keywords or list of service objects! service_uids = [] for obj in values['Analyses']: uid = False # service objects if hasattr(obj, 'portal_type') and obj.portal_type == 'AnalysisService': uid = obj.UID() # Analysis objects (shortcut for eg copying analyses from other AR) elif hasattr(obj, 'portal_type') and obj.portal_type == 'Analysis': uid = obj.getService() # Maybe already UIDs. if not uid: bsc = getToolByName(context, 'bika_setup_catalog') brains = bsc(portal_type='AnalysisService', UID=obj) if brains: uid = brains[0].UID # Maybe already UIDs. if not uid: bsc = getToolByName(context, 'bika_setup_catalog') brains = bsc(portal_type='AnalysisService', title=obj) if brains: uid = brains[0].UID if uid: service_uids.append(uid) else: logger.info("In analysisrequest.add.create_analysisrequest: cannot " "find uid of this service: %s" % obj) # Set analysis request analyses ar.setAnalyses(service_uids, prices=values.get("Prices", []), specs=values.get('ResultsRange', [])) analyses = ar.getAnalyses(full_objects=True) skip_receive = ['to_be_sampled', 'sample_due', 'sampled', 'to_be_preserved'] if secondary: # Only 'sample_due' and 'sample_recieved' samples can be selected # for secondary analyses doActionFor(ar, 'sampled') doActionFor(ar, 'sample_due') sample_state = workflow.getInfoFor(sample, 'review_state') if sample_state not in skip_receive: doActionFor(ar, 'receive') for analysis in analyses: doActionFor(analysis, 'sample_due') analysis_state = workflow.getInfoFor(analysis, 'review_state') if analysis_state not in skip_receive: doActionFor(analysis, 'receive') if not secondary: # Create sample partitions partitions = [] for n, partition in enumerate(values['Partitions']): # Calculate partition id partition_prefix = sample.getId() + "-P" partition_id = '%s%s' % (partition_prefix, n + 1) partition['part_id'] = partition_id # Point to or create sample partition if partition_id in sample.objectIds(): partition['object'] = sample[partition_id] else: partition['object'] = create_samplepartition( sample, partition ) # now assign analyses to this partition. obj = partition['object'] for analysis in analyses: if analysis.getService().UID() in partition['services']: analysis.setSamplePartition(obj) partitions.append(partition) # If Preservation is required for some partitions, # and the SamplingWorkflow is disabled, we need # to transition to to_be_preserved manually. if not samplingworkflow_enabled: to_be_preserved = [] sample_due = [] lowest_state = 'sample_due' for p in sample.objectValues('SamplePartition'): if p.getPreservation(): lowest_state = 'to_be_preserved' to_be_preserved.append(p) else: sample_due.append(p) for p in to_be_preserved: doActionFor(p, 'to_be_preserved') for p in sample_due: doActionFor(p, 'sample_due') doActionFor(sample, lowest_state) doActionFor(ar, lowest_state) # Transition pre-preserved partitions for p in partitions: if 'prepreserved' in p and p['prepreserved']: part = p['object'] state = workflow.getInfoFor(part, 'review_state') if state == 'to_be_preserved': workflow.doActionFor(part, 'preserve') # Return the newly created Analysis Request return ar
def create_analysisrequest(context, request, values, analyses=[], partitions=None, specifications=None, prices=None): # Gather neccesary tools workflow = getToolByName(context, 'portal_workflow') bc = getToolByName(context, 'bika_catalog') # Create new sample or locate the existing for secondary AR if values.get('Sample'): secondary = True if ISample.providedBy(values['Sample']): sample = values['Sample'] else: sample = bc(UID=values['Sample'])[0].getObject() workflow_enabled = sample.getSamplingWorkflowEnabled() else: secondary = False workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled() sample = create_sample(context, request, values) # Create the Analysis Request ar = _createObjectByType('AnalysisRequest', context, tmpID()) ar.setSample(sample) # processform renames the sample, this requires values to contain the Sample. values['Sample'] = sample ar.processForm(REQUEST=request, values=values) # Object has been renamed ar.edit(RequestID=ar.getId()) # Set initial AR state workflow_action = 'sampling_workflow' if workflow_enabled \ else 'no_sampling_workflow' workflow.doActionFor(ar, workflow_action) # Set analysis request analyses analyses = ar.setAnalyses(analyses, prices=prices, specs=specifications) if secondary: # Only 'sample_due' and 'sample_recieved' samples can be selected # for secondary analyses doActionFor(ar, 'sample') doActionFor(ar, 'sample_due') sample_state = workflow.getInfoFor(sample, 'review_state') if sample_state == 'sample_received': doActionFor(ar, 'receive') for analysis in ar.getAnalyses(full_objects=1): doActionFor(analysis, 'sample') doActionFor(analysis, 'sample_due') analysis_transition_ids = [ t['id'] for t in workflow.getTransitionsFor(analysis) ] if 'receive' in analysis_transition_ids and sample_state == 'sample_received': doActionFor(analysis, 'receive') if not secondary: # Create sample partitions if not partitions: partitions = [{'services': analyses}] for n, partition in enumerate(partitions): # Calculate partition id partition_prefix = sample.getId() + "-P" partition_id = '%s%s' % (partition_prefix, n + 1) partition['part_id'] = partition_id # Point to or create sample partition if partition_id in sample.objectIds(): partition['object'] = sample[partition_id] else: partition['object'] = create_samplepartition( sample, partition, analyses) # If Preservation is required for some partitions, # and the SamplingWorkflow is disabled, we need # to transition to to_be_preserved manually. if not workflow_enabled: to_be_preserved = [] sample_due = [] lowest_state = 'sample_due' for p in sample.objectValues('SamplePartition'): if p.getPreservation(): lowest_state = 'to_be_preserved' to_be_preserved.append(p) else: sample_due.append(p) for p in to_be_preserved: doActionFor(p, 'to_be_preserved') for p in sample_due: doActionFor(p, 'sample_due') doActionFor(sample, lowest_state) doActionFor(ar, lowest_state) # Transition pre-preserved partitions for p in partitions: if 'prepreserved' in p and p['prepreserved']: part = p['object'] state = workflow.getInfoFor(part, 'review_state') if state == 'to_be_preserved': workflow.doActionFor(part, 'preserve') # Return the newly created Analysis Request return ar
def create_analysisrequest( context, request, values, analyses=[], partitions=None, specifications=None, prices=None ): # Gather neccesary tools workflow = getToolByName(context, 'portal_workflow') bc = getToolByName(context, 'bika_catalog') # Create new sample or locate the existing for secondary AR if values['Sample']: secondary = True if ISample.providedBy(values['Sample']): sample = values['Sample'] else: sample = bc(UID=values['Sample'])[0].getObject() workflow_enabled = sample.getSamplingWorkflowEnabled() else: secondary = False workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled() sample = create_sample(context, request, values) # Create the Analysis Request ar = _createObjectByType('AnalysisRequest', context, tmpID()) ar.setSample(sample) # processform renames the sample, this requires values to contain the Sample. values['Sample'] = sample ar.processForm(REQUEST=request, values=values) # Object has been renamed ar.edit(RequestID=ar.getId()) # Set initial AR state workflow_action = 'sampling_workflow' if workflow_enabled \ else 'no_sampling_workflow' workflow.doActionFor(ar, workflow_action) # Set analysis request analyses analyses = ar.setAnalyses(analyses, prices=prices, specs=specifications) if secondary: # Only 'sample_due' and 'sample_recieved' samples can be selected # for secondary analyses doActionFor(ar, 'sample') doActionFor(ar, 'sample_due') sample_state = workflow.getInfoFor(sample, 'review_state') if sample_state == 'sample_received': doActionFor(ar, 'receive') for analysis in ar.getAnalyses(full_objects=1): doActionFor(analysis, 'sample') doActionFor(analysis, 'sample_due') analysis_transition_ids = [t['id'] for t in workflow.getTransitionsFor(analysis)] if 'receive' in analysis_transition_ids and sample_state == 'sample_received': doActionFor(analysis, 'receive') if not secondary: # Create sample partitions for n, partition in enumerate(partitions): # Calculate partition id partition_prefix = sample.getId() + "-P" partition_id = '%s%s' % (partition_prefix, n + 1) partition['part_id'] = partition_id # Point to or create sample partition if partition_id in sample.objectIds(): partition['object'] = sample[partition_id] else: partition['object'] = create_samplepartition( sample, partition, analyses ) # If Preservation is required for some partitions, # and the SamplingWorkflow is disabled, we need # to transition to to_be_preserved manually. if not workflow_enabled: to_be_preserved = [] sample_due = [] lowest_state = 'sample_due' for p in sample.objectValues('SamplePartition'): if p.getPreservation(): lowest_state = 'to_be_preserved' to_be_preserved.append(p) else: sample_due.append(p) for p in to_be_preserved: doActionFor(p, 'to_be_preserved') for p in sample_due: doActionFor(p, 'sample_due') doActionFor(sample, lowest_state) doActionFor(ar, lowest_state) # Transition pre-preserved partitions for p in partitions: if 'prepreserved' in p and p['prepreserved']: part = p['object'] state = workflow.getInfoFor(part, 'review_state') if state == 'to_be_preserved': workflow.doActionFor(part, 'preserve') # Return the newly created Analysis Request return ar
def create_analysisrequest( context, request, values, # {field: value, ...} analyses=[], # uid, service or analysis; list of uids, services or analyses partitions=None, # list of dictionaries with container, preservation etc) specifications=None, prices=None): """This is meant for general use and should do everything necessary to create and initialise the AR and it's requirements. XXX The ar-add form's ajaxAnalysisRequestSubmit should be calling this. """ # Gather neccesary tools workflow = getToolByName(context, 'portal_workflow') bc = getToolByName(context, 'bika_catalog') # It's necessary to modify these and we don't want to pollute the # parent's data values = values.copy() # Create new sample or locate the existing for secondary AR if not values.get('Sample', False): secondary = False workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled() sample = create_sample(context, request, values) else: secondary = True if ISample.providedBy(values['Sample']): sample = values['Sample'] else: brains = bc(UID=values['Sample']) if brains: sample = brains[0].getObject() if not sample: raise RuntimeError( "create_analysisrequest No sample. values=%s" % values) workflow_enabled = sample.getSamplingWorkflowEnabled() # Create the Analysis Request ar = _createObjectByType('AnalysisRequest', context, tmpID()) # Set some required fields manually before processForm is called ar.setSample(sample) values['Sample'] = sample ar.processForm(REQUEST=request, values=values) # Object has been renamed ar.edit(RequestID=ar.getId()) # Set initial AR state action = '{0}sampling_workflow'.format('' if workflow_enabled else 'no_') workflow.doActionFor(ar, action) # Set analysis request analyses service_uids = _resolve_items_to_service_uids(analyses) analyses = ar.setAnalyses(service_uids, prices=prices, specs=specifications) if secondary: # Only 'sample_due' and 'sample_recieved' samples can be selected # for secondary analyses api.content.transition(obj=ar, to_state='sampled') api.content.transition(obj=ar, to_state='sample_due') sample_state = workflow.getInfoFor(sample, 'review_state') if sample_state == 'sample_received': doActionFor(ar, 'receive') for analysis in ar.getAnalyses(full_objects=1): doActionFor(analysis, 'sample') doActionFor(analysis, 'sample_due') analysis_transition_ids = [t['id'] for t in workflow.getTransitionsFor(analysis)] if 'receive' in analysis_transition_ids and sample_state == 'sample_received': doActionFor(analysis, 'receive') if not secondary: # Create sample partitions if not partitions: partitions = [{'services': analyses}] for n, partition in enumerate(partitions): # Calculate partition id partition_prefix = sample.getId() + "-P" partition_id = '%s%s' % (partition_prefix, n + 1) partition['part_id'] = partition_id # Point to or create sample partition if partition_id in sample.objectIds(): partition['object'] = sample[partition_id] else: partition['object'] = create_samplepartition( sample, partition, analyses ) # If Preservation is required for some partitions, # and the SamplingWorkflow is disabled, we need # to transition to to_be_preserved manually. if not workflow_enabled: to_be_preserved = [] sample_due = [] lowest_state = 'sample_due' for p in sample.objectValues('SamplePartition'): if p.getPreservation(): lowest_state = 'to_be_preserved' to_be_preserved.append(p) else: sample_due.append(p) for p in to_be_preserved: doActionFor(p, 'to_be_preserved') for p in sample_due: doActionFor(p, 'sample_due') doActionFor(sample, lowest_state) doActionFor(ar, lowest_state) # Transition pre-preserved partitions for p in partitions: if 'prepreserved' in p and p['prepreserved']: part = p['object'] state = workflow.getInfoFor(part, 'review_state') if state == 'to_be_preserved': workflow.doActionFor(part, 'preserve') # Return the newly created Analysis Request return ar
def folderitems(self, full_objects=False): # Show only ISharable samples for EMS. Skip others. pm = getToolByName(self.context, 'portal_membership') roles = pm.getAuthenticatedMember().getRoles() #print roles if 'EMS' in roles: self.contentFilter['object_provides'] = ISharableSample.__identifier__ items = BikaListingView.folderitems(self) bsc = getToolByName(self.context, 'bika_setup_catalog') brains = bsc(portal_type='SampleType', inactive_state='active') biospecimen_types = [ { 'ResultValue': brain.UID, 'ResultText': brain.title } for brain in brains ] ret = [] for x, item in enumerate(items): if not items[x].has_key('obj'): continue obj = items[x]['obj'] if not ISample.providedBy(obj): continue items[x]['Type'] = obj.getSampleType() and obj.getSampleType().Title() or '' items[x]['Volume'] = obj.getField('Volume').get(obj) items[x]['Unit'] = obj.getField('Unit').get(obj) items[x]['SubjectID'] = obj.getField('SubjectID').get(obj) project = obj.getField('Project').get(obj) if not project: project = obj.aq_parent items[x]['Project'] = project storage_location = obj.getField('StorageLocation').get(obj) if storage_location: items[x]['StorageLocation'] = storage_location.Title() if project: items[x]['replace']['Project'] = \ '<a href="%s">%s</a>' % (project.absolute_url(), project.Title()) items[x]['Barcode'] = obj.getField('Barcode').get(obj) items[x]['replace']['Title'] = "<a href='%s'>%s</a>" % \ (items[x]['url'], items[x]['Title']) frozen_time = obj.getField('FrozenTime').get(obj) if frozen_time: try: items[x]['FrozenTime'] = frozen_time.strftime("%Y-%m-%d %H:%M") except: items[x]['FrozenTime'] = str(frozen_time) batch = obj.getField('Batch').get(obj) try: items[x]['CFGTime'] = batch.getField('CfgDateTime').get(batch).strftime("%Y/%m/%d %H:%M") except: items[x]['CFGTime'] = '' try: items[x]['SamplingTime'] = obj.getField('SamplingDate').get(obj).strftime("%Y/%m/%d %H:%M") except: items[x]['SamplingTime'] = '' if self.allow_edit and isActive(self.context) and \ getSecurityManager().checkPermission(ModifyPortalContent, obj): if items[x]['review_state'] == "sample_registered": items[x]['allow_edit'] = ['Type', 'Barcode', 'FrozenTime'] items[x]['choices']['Type'] = biospecimen_types elif items[x]['review_state'] == "sample_due": # items[x]['allow_edit'] = ['SubjectID', 'Volume', 'Unit'] items[x]['allow_edit'] = ['Volume', 'Unit'] if not items[x]['Unit']: items[x]['choices']['Unit'] = VOLUME_UNITS elif items[x]['review_state'] == "sample_shipped": # items[x]['allow_edit'] = ['SubjectID', 'Volume'] items[x]['allow_edit'] = ['Volume'] ret.append(item) return ret
def generate_delivery_pdf(context, ars_or_samples): if not ars_or_samples: logger.warn("No Analysis Requests or Samples provided") return if ISample.providedBy(ars_or_samples) or \ IAnalysisRequest.providedBy(ars_or_samples): return generate_delivery_pdf(context, [ars_or_samples]) if not isinstance(ars_or_samples, list): logger.warn("Type not supported: {}".format(repr(ars_or_samples))) return html = DeliveryFormPdf(context, context.REQUEST, analysis_requests=ars_or_samples).template() html = safe_unicode(html).encode("utf-8") filename = "delivery" pdf_fn = tempfile.mktemp(suffix=".pdf") pdf = createPdf(htmlreport=html, outfile=pdf_fn) if not pdf: ar_ids = map(lambda ar: ar.id, ars_or_samples) logger.warn("Unable to generate the PDF of delivery form for {}". format(' '.join(ar_ids))) return None def _attach_to_ar(pdf, ar_brain_or_obj): ar = api.get_object(ar_brain_or_obj) # Attach the pdf to the Analysis Request attid = ar.aq_parent.generateUniqueId('Attachment') att = _createObjectByType( "Attachment", ar.aq_parent, attid) att.setAttachmentFile(open(pdf_fn)) att.setReportOption('i') # Ignore in report # Try to assign the Requisition Attachment Type query = dict(portal_type='AttachmentType', title='Delivery') brains = api.search(query, 'bika_setup_catalog') if brains: att_type = api.get_object(brains[0]) att.setAttachmentType(att_type) # Awkward workaround to rename the file attf = att.getAttachmentFile() attf.filename = '%s.pdf' % filename att.setAttachmentFile(attf) att.unmarkCreationFlag() renameAfterCreation(att) atts = ar.getAttachment() + [att] if ar.getAttachment() else [att] atts = [a.UID() for a in atts] ar.setAttachment(atts) # TODO Create only one Attachment per Client and assign it to all ARs # There is no need to creat a single Attachment object for each AR. Same # attachment can be assigned to different ARs and they will resolve the # attachment correctly later. This will be useful for: # a) Reduce the database size (less pdfs to store) # b) workflow_download_delivery can easily return the attachments that are # different when multiple ARs are selected. for ar_or_sample in ars_or_samples: # Attach the pdf to the Analysis Request if ISample.providedBy(ar_or_sample): for ar in ar_or_sample.getAnalysisRequests(): _attach_to_ar(pdf, ar) elif IAnalysisRequest.providedBy(ar_or_sample): _attach_to_ar(pdf, ar_or_sample) return pdf_fn
def create_analysisrequest( context, request, values, # {field: value, ...} analyses=[], # uid, service or analysis; list of uids, services or analyses partitions=None, # list of dictionaries with container, preservation etc) specifications=None, prices=None): """This is meant for general use and should do everything necessary to create and initialise the AR and it's requirements. XXX The ar-add form's ajaxAnalysisRequestSubmit should be calling this. """ # Gather neccesary tools workflow = getToolByName(context, 'portal_workflow') bc = getToolByName(context, 'bika_catalog') # It's necessary to modify these and we don't want to pollute the # parent's data values = values.copy() # Create new sample or locate the existing for secondary AR if not values.get('Sample', False): secondary = False workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled() sample = create_sample(context, request, values) else: secondary = True if ISample.providedBy(values['Sample']): sample = values['Sample'] else: brains = bc(UID=values['Sample']) if brains: sample = brains[0].getObject() if not sample: raise RuntimeError("create_analysisrequest No sample. values=%s" % values) workflow_enabled = sample.getSamplingWorkflowEnabled() # Create the Analysis Request ar = _createObjectByType('AnalysisRequest', context, tmpID()) # Set some required fields manually before processForm is called ar.setSample(sample) values['Sample'] = sample ar.processForm(REQUEST=request, values=values) # Object has been renamed ar.edit(RequestID=ar.getId()) # Set initial AR state action = '{0}sampling_workflow'.format('' if workflow_enabled else 'no_') workflow.doActionFor(ar, action) # Set analysis request analyses service_uids = _resolve_items_to_service_uids(analyses) analyses = ar.setAnalyses(service_uids, prices=prices, specs=specifications) if secondary: # Only 'sample_due' and 'sample_recieved' samples can be selected # for secondary analyses api.content.transition(obj=ar, to_state='sampled') api.content.transition(obj=ar, to_state='sample_due') sample_state = workflow.getInfoFor(sample, 'review_state') if sample_state == 'sample_received': doActionFor(ar, 'receive') for analysis in ar.getAnalyses(full_objects=1): doActionFor(analysis, 'sample') doActionFor(analysis, 'sample_due') analysis_transition_ids = [ t['id'] for t in workflow.getTransitionsFor(analysis) ] if 'receive' in analysis_transition_ids and sample_state == 'sample_received': doActionFor(analysis, 'receive') if not secondary: # Create sample partitions if not partitions: partitions = [{'services': analyses}] for n, partition in enumerate(partitions): # Calculate partition id partition_prefix = sample.getId() + "-P" partition_id = '%s%s' % (partition_prefix, n + 1) partition['part_id'] = partition_id # Point to or create sample partition if partition_id in sample.objectIds(): partition['object'] = sample[partition_id] else: partition['object'] = create_samplepartition( sample, partition, analyses) # If Preservation is required for some partitions, # and the SamplingWorkflow is disabled, we need # to transition to to_be_preserved manually. if not workflow_enabled: to_be_preserved = [] sample_due = [] lowest_state = 'sample_due' for p in sample.objectValues('SamplePartition'): if p.getPreservation(): lowest_state = 'to_be_preserved' to_be_preserved.append(p) else: sample_due.append(p) for p in to_be_preserved: doActionFor(p, 'to_be_preserved') for p in sample_due: doActionFor(p, 'sample_due') doActionFor(sample, lowest_state) doActionFor(ar, lowest_state) # Transition pre-preserved partitions for p in partitions: if 'prepreserved' in p and p['prepreserved']: part = p['object'] state = workflow.getInfoFor(part, 'review_state') if state == 'to_be_preserved': workflow.doActionFor(part, 'preserve') # Return the newly created Analysis Request return ar
def create_analysisrequest(context, request, values): """Create an AR. :param context the container in which the AR will be created (Client) :param request the request object :param values a dictionary containing fieldname/value pairs, which will be applied. Some fields will have specific code to handle them, and others will be directly written to the schema. :return the new AR instance Special keys present (or required) in the values dict, which are not present in the schema: - Partitions: data about partitions to be created, and the analyses that are to be assigned to each. - Prices: custom prices set in the HTML form. - ResultsRange: Specification values entered in the HTML form. """ # Gather neccesary tools workflow = getToolByName(context, 'portal_workflow') bc = getToolByName(context, 'bika_catalog') # Create new sample or locate the existing for secondary AR sample = False if values['Sample']: if ISample.providedBy(values['Sample']): secondary = True sample = values['Sample'] samplingworkflow_enabled = sample.getSamplingWorkflowEnabled() else: brains = bc(UID=values['Sample']) if brains: secondary = True sample = brains[0].getObject() samplingworkflow_enabled = sample.getSamplingWorkflowEnabled() if not sample: secondary = False sample = create_sample(context, request, values) samplingworkflow_enabled = context.bika_setup.getSamplingWorkflowEnabled() # Create the Analysis Request ar = _createObjectByType('AnalysisRequest', context, tmpID()) ar.setSample(sample) # processform renames the sample, this requires values to contain the Sample. values['Sample'] = sample ar.processForm(REQUEST=request, values=values) # Object has been renamed ar.edit(RequestID=ar.getId()) # Set initial AR state workflow_action = 'sampling_workflow' if samplingworkflow_enabled \ else 'no_sampling_workflow' workflow.doActionFor(ar, workflow_action) # We need to send a list of service UIDS to setAnalyses function. # But we may have received a list of titles, list of UIDS, # list of keywords or list of service objects! service_uids = [] for obj in values['Analyses']: uid = False # service objects if hasattr(obj, 'portal_type') and obj.portal_type == 'AnalysisService': uid = obj.UID() # Analysis objects (shortcut for eg copying analyses from other AR) elif hasattr(obj, 'portal_type') and obj.portal_type == 'Analysis': uid = obj.getService() # Maybe already UIDs. if not uid: bsc = getToolByName(context, 'bika_setup_catalog') brains = bsc(portal_type='AnalysisService', UID=obj) if brains: uid = brains[0].UID # Maybe already UIDs. if not uid: bsc = getToolByName(context, 'bika_setup_catalog') brains = bsc(portal_type='AnalysisService', title=obj) if brains: uid = brains[0].UID if uid: service_uids.append(uid) else: logger.info("In analysisrequest.add.create_analysisrequest: cannot " "find uid of this service: %s" % obj) # Set analysis request analyses ar.setAnalyses(service_uids, prices=values.get("Prices", []), specs=values.get('ResultsRange', [])) analyses = ar.getAnalyses(full_objects=True) skip_receive = ['to_be_sampled', 'sample_due', 'sampled', 'to_be_preserved'] if secondary: # Only 'sample_due' and 'sample_recieved' samples can be selected # for secondary analyses doActionFor(ar, 'sampled') doActionFor(ar, 'sample_due') sample_state = workflow.getInfoFor(sample, 'review_state') if sample_state not in skip_receive: doActionFor(ar, 'receive') for analysis in analyses: doActionFor(analysis, 'sample_due') analysis_state = workflow.getInfoFor(analysis, 'review_state') if analysis_state not in skip_receive: doActionFor(analysis, 'receive') if not secondary: # Create sample partitions partitions = [] for n, partition in enumerate(values['Partitions']): # Calculate partition id partition_prefix = sample.getId() + "-P" partition_id = '%s%s' % (partition_prefix, n + 1) partition['part_id'] = partition_id # Point to or create sample partition if partition_id in sample.objectIds(): partition['object'] = sample[partition_id] else: partition['object'] = create_samplepartition( sample, partition ) # now assign analyses to this partition. obj = partition['object'] for analysis in analyses: if analysis.getService().UID() in partition['services']: analysis.setSamplePartition(obj) partitions.append(partition) # If Preservation is required for some partitions, # and the SamplingWorkflow is disabled, we need # to transition to to_be_preserved manually. if not samplingworkflow_enabled: to_be_preserved = [] sample_due = [] lowest_state = 'sample_due' for p in sample.objectValues('SamplePartition'): if p.getPreservation(): lowest_state = 'to_be_preserved' to_be_preserved.append(p) else: sample_due.append(p) for p in to_be_preserved: doActionFor(p, 'to_be_preserved') for p in sample_due: doActionFor(p, 'sample_due') doActionFor(sample, lowest_state) doActionFor(ar, lowest_state) # Transition pre-preserved partitions for p in partitions: if 'prepreserved' in p and p['prepreserved']: part = p['object'] state = workflow.getInfoFor(part, 'review_state') if state == 'to_be_preserved': workflow.doActionFor(part, 'preserve') # Return the newly created Analysis Request return ar