def create_analysis(context, service, keyword, interim_fields): # Determine if the sampling workflow is enabled workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled() # Create the analysis analysis = _createObjectByType("Analysis", context, keyword) analysis.setService(service) analysis.setInterimFields(interim_fields) analysis.setMaxTimeAllowed(service.getMaxTimeAllowed()) analysis.unmarkCreationFlag() analysis.reindexObject() # Trigger the intitialization event of the new object # zope.event.notify(ObjectInitializedEvent(analysis)) event.notify(ObjectInitializedEvent(analysis)) # Perform the appropriate workflow action try: workflow_action = 'sampling_workflow' if workflow_enabled \ else 'no_sampling_workflow' context.portal_workflow.doActionFor(analysis, workflow_action) except WorkflowException: # The analysis may have been transitioned already! # I am leaving this code here though, to prevent regression. pass # Return the newly created analysis return analysis
def _add_services_to_ar(self, ar, analyses): #Add Services service_uids = [i.split(':')[0] for i in analyses] new_analyses = ar.setAnalyses(service_uids) ar.setRequestID(ar.getId()) ar.reindexObject() event.notify(ObjectInitializedEvent(ar)) ar.at_post_create_script() SamplingWorkflowEnabled = \ self.bika_setup.getSamplingWorkflowEnabled() wftool = getToolByName(self, 'portal_workflow') # Create sample partitions parts = [{'services': [], 'container':[], 'preservation':'', 'separate':False}] sample = ar.getSample() parts_and_services = {} for _i in range(len(parts)): p = parts[_i] part_prefix = sample.getId() + "-P" if '%s%s' % (part_prefix, _i + 1) in sample.objectIds(): parts[_i]['object'] = sample['%s%s' % (part_prefix, _i + 1)] parts_and_services['%s%s' % (part_prefix, _i + 1)] = \ p['services'] else: part = _createObjectByType("SamplePartition", sample, tmpID()) parts[_i]['object'] = part container = None preservation = p['preservation'] parts[_i]['prepreserved'] = False part.unmarkCreationFlag() part.edit( Container=container, Preservation=preservation, ) part._renameAfterCreation() if SamplingWorkflowEnabled: wftool.doActionFor(part, 'sampling_workflow') else: wftool.doActionFor(part, 'no_sampling_workflow') parts_and_services[part.id] = p['services'] if SamplingWorkflowEnabled: wftool.doActionFor(ar, 'sampling_workflow') else: wftool.doActionFor(ar, 'no_sampling_workflow') # Add analyses to sample partitions # XXX jsonapi create AR: right now, all new analyses are linked to the first samplepartition if new_analyses: analyses = list(part.getAnalyses()) analyses.extend(new_analyses) part.edit( Analyses=analyses, ) for analysis in new_analyses: analysis.setSamplePartition(part) # If Preservation is required for some partitions, # and the SamplingWorkflow is disabled, we need # to transition to to_be_preserved manually. if not SamplingWorkflowEnabled: to_be_preserved = [] sample_due = [] lowest_state = 'sample_due' for p in sample.objectValues('SamplePartition'): if p.getPreservation(): lowest_state = 'to_be_preserved' to_be_preserved.append(p) else: sample_due.append(p) for p in to_be_preserved: doActionFor(p, 'to_be_preserved') for p in sample_due: doActionFor(p, 'sample_due') doActionFor(sample, lowest_state) for analysis in ar.objectValues('Analysis'): doActionFor(analysis, lowest_state) doActionFor(ar, lowest_state)
def _submit_arimport_p(self): """ load the profiles import layout """ ars = [] samples = [] valid_batch = False client = self.aq_parent contact_obj = None cc_contact_obj = None # validate contact for contact in client.objectValues('Contact'): if contact.getUsername() == self.getContactID(): contact_obj = contact if self.getCCContactID() == None: if contact_obj != None: break else: if contact.getUsername() == self.getCCContactID(): cc_contact_obj = contact if contact_obj != None: break if contact_obj == None: valid_batch = False # get Keyword to ServiceId Map services = {} service_uids = {} for service in self.bika_setup_catalog( portal_type = 'AnalysisService'): obj = service.getObject() keyword = obj.getKeyword() if keyword: services[keyword] = '%s:%s' % (obj.UID(), obj.getPrice()) service_uids[obj.UID()] = '%s:%s' % (obj.UID(), obj.getPrice()) samplepoints = self.bika_setup_catalog( portal_type = 'SamplePoint', Title = self.getSamplePoint()) if not samplepoints: valid_batch = False profiles = {} aritems = self.objectValues('ARImportItem') request = self.REQUEST title = 'Submitting AR Import' bar = ProgressBar( self, request, title, description='') event.notify(InitialiseProgressBar(bar)) row_count = 0 item_count = len(aritems) prefix = 'Sample' for aritem in aritems: # set up analyses ar_profile = None analyses = [] row_count += 1 for profilekey in aritem.getAnalysisProfile(): this_profile = None if not profiles.has_key(profilekey): profiles[profilekey] = [] # there is no profilekey index l_prox = self._findProfileKey(profilekey) if l_prox: profiles[profilekey] = \ [s.UID() for s in l_prox.getService()] this_profile = l_prox else: #TODO This will not find it!! # there is no profilekey index c_prox = self.bika_setup_catalog( portal_type = 'AnalysisProfile', getClientUID = client.UID(), getProfileKey = profilekey) if c_prox: obj = c_prox[0].getObject() profiles[profilekey] = \ [s.UID() for s in obj.getService()] this_profile = obj if ar_profile is None: ar_profile = obj else: ar_profile = None profile = profiles[profilekey] for analysis in profile: if not service_uids.has_key(analysis): service = tool.lookupObject(analysis) keyword = service.getKeyword() service_uids[obj.UID()] = '%s:%s' % (obj.UID(), obj.getPrice()) if keyword: services[keyword] = '%s:%s' % (obj.UID(), obj.getPrice()) if service_uids.has_key(analysis): if not service_uids[analysis] in analyses: analyses.append(service_uids[analysis]) else: valid_batch = False for analysis in aritem.getAnalyses(full_objects=True): if not services.has_key(analysis): for service in self.bika_setup_catalog( portal_type = 'AnalysisService', getKeyword = analysis): obj = service.getObject() services[analysis] = '%s:%s' % (obj.UID(), obj.getPrice()) service_uids[obj.UID()] = '%s:%s' % (obj.UID(), obj.getPrice()) if services.has_key(analysis): analyses.append(services[analysis]) else: valid_batch = False sampletypes = self.portal_catalog( portal_type = 'SampleType', sortable_title = aritem.getSampleType().lower(), ) if not sampletypes: valid_batch = False return sampletypeuid = sampletypes[0].getObject().UID() if aritem.getSampleDate(): date_items = aritem.getSampleDate().split('/') sample_date = DateTime( int(date_items[2]), int(date_items[0]), int(date_items[1])) else: sample_date = None sample_id = '%s-%s' % (prefix, tmpID()) sample = _createObjectByType("Sample", client, sample_id) sample.unmarkCreationFlag() sample.edit( SampleID = sample_id, ClientReference = aritem.getClientRef(), ClientSampleID = aritem.getClientSid(), SampleType = aritem.getSampleType(), DateSampled = sample_date, SamplingDate = sample_date, DateReceived = DateTime(), Remarks = aritem.getClientRemarks(), ) sample._renameAfterCreation() sample.setSamplePoint(self.getSamplePoint()) sample.setSampleID(sample.getId()) event.notify(ObjectInitializedEvent(sample)) sample.at_post_create_script() sample_uid = sample.UID() samples.append(sample_id) aritem.setSample(sample_uid) priorities = self.bika_setup_catalog( portal_type = 'ARPriority', sortable_title = aritem.Priority.lower(), ) if len(priorities) < 1: logger.warning( 'Invalid Priority: validation should have prevented this') priority = '' else: priority = priorities[0].getObject() ar_id = tmpID() ar = _createObjectByType("AnalysisRequest", client, ar_id) report_dry_matter = False ar.unmarkCreationFlag() ar.edit( RequestID = ar_id, Contact = self.getContact(), CCContact = self.getCCContact(), CCEmails = self.getCCEmailsInvoice(), ClientOrderNumber = self.getOrderID(), ReportDryMatter = report_dry_matter, Profile = ar_profile, Analyses = analyses, Remarks = aritem.getClientRemarks(), Priority = priority, ) ar.setSample(sample_uid) sample = ar.getSample() ar.setSampleType(sampletypeuid) ar_uid = ar.UID() aritem.setAnalysisRequest(ar_uid) ars.append(ar_id) ar._renameAfterCreation() progress_index = float(row_count)/float(item_count)*100.0 progress = ProgressState(request, progress_index) event.notify(UpdateProgressEvent(progress)) self._add_services_to_ar(ar, analyses) self.setDateApplied(DateTime()) self.reindexObject()
def _submit_arimport_c(self): """ load the classic import layout """ ars = [] samples = [] valid_batch = True client = self.aq_parent contact_obj = None cc_contact_obj = None # validate contact for contact in client.objectValues('Contact'): if contact.getUsername() == self.getContactID(): contact_obj = contact if self.getCCContactID() == None: if contact_obj != None: break else: if contact.getUsername() == self.getCCContactID(): cc_contact_obj = contact if contact_obj != None: break if contact_obj == None: valid_batch = False # get Keyword to ServiceId Map services = {} for service in self.bika_setup_catalog( portal_type = 'AnalysisService'): obj = service.getObject() keyword = obj.getKeyword() if keyword: services[keyword] = '%s:%s' % (obj.UID(), obj.getPrice()) samplepoints = self.bika_setup_catalog( portal_type = 'SamplePoint', Title = self.getSamplePoint()) if not samplepoints: valid_batch = False aritems = self.objectValues('ARImportItem') request = self.REQUEST title = 'Submitting AR Import' bar = ProgressBar( self, request, title, description='') event.notify(InitialiseProgressBar(bar)) SamplingWorkflowEnabled = \ self.bika_setup.getSamplingWorkflowEnabled() row_count = 0 item_count =len(aritems) prefix = 'Sample' for aritem in aritems: row_count += 1 # set up analyses analyses = [] for analysis in aritem.getAnalyses(full_objects=True): if services.has_key(analysis): analyses.append(services[analysis]) else: valid_batch = False sampletypes = self.portal_catalog( portal_type = 'SampleType', sortable_title = aritem.getSampleType().lower(), ) if not sampletypes: valid_batch = False return sampletypeuid = sampletypes[0].getObject().UID() if aritem.getSampleDate(): date_items = aritem.getSampleDate().split('/') sample_date = DateTime( int(date_items[2]), int(date_items[1]), int(date_items[0])) else: sample_date = None sample_id = '%s-%s' % (prefix, tmpID()) sample = _createObjectByType("Sample", client, sample_id) sample.unmarkCreationFlag() sample.edit( SampleID = sample_id, ClientReference = aritem.getClientRef(), ClientSampleID = aritem.getClientSid(), SampleType = aritem.getSampleType(), DateSampled = sample_date, SamplingDate = sample_date, DateReceived = DateTime(), ) sample._renameAfterCreation() #sp_id = client.invokeFactory('SamplePoint', id=tmpID()) #sp = client[sp_id] #sp.edit(title=self.getSamplePoint()) sample.setSamplePoint(self.getSamplePoint()) sample.setSampleID(sample.getId()) event.notify(ObjectInitializedEvent(sample)) sample.at_post_create_script() sample_uid = sample.UID() samples.append(sample_id) aritem.setSample(sample_uid) priorities = self.bika_setup_catalog( portal_type = 'ARPriority', sortable_title = aritem.Priority.lower(), ) if len(priorities) < 1: logger.warning( 'Invalid Priority: validation should have prevented this') #Create AR ar_id = tmpID() ar = _createObjectByType("AnalysisRequest", client, ar_id) if aritem.getReportDryMatter().lower() == 'y': report_dry_matter = True else: report_dry_matter = False ar.unmarkCreationFlag() ar.edit( RequestID = ar_id, Contact = self.getContact(), CCContact = self.getCCContact(), CCEmails = self.getCCEmailsInvoice(), ClientOrderNumber = self.getOrderID(), ReportDryMatter = report_dry_matter, Analyses = analyses, Priority = priorities[0].getObject(), ) ar.setSample(sample_uid) sample = ar.getSample() ar.setSampleType(sampletypeuid) ar_uid = ar.UID() aritem.setAnalysisRequest(ar_uid) ars.append(ar_id) ar._renameAfterCreation() self._add_services_to_ar(ar, analyses) progress_index = float(row_count)/float(item_count)*100.0 progress = ProgressState(request, progress_index) event.notify(UpdateProgressEvent(progress)) #TODO REmove for production - just to look pretty #time.sleep(1) self.setDateApplied(DateTime()) self.reindexObject()
def create(self, context, request): """/@@API/create: Create new object. Required parameters: - obj_type = portal_type of new object. - obj_path = path of new object, from plone site root. - Not required for obj_type=AnalysisRequest Optionally: - obj_id = ID of new object. All other parameters in the request are matched against the object's Schema. If a matching field is found in the schema, then the value is taken from the request and sent to the field's mutator. Reference fields may have their target value(s) specified with a delimited string query syntax, containing the portal_catalog search: <FieldName>=index1:value1|index2:value2 eg to set the Client of a batch: ...@@API/update?obj_path=<path>... ...&Client=title:<client_title>&... And, to set a multi-valued reference, these both work: ...@@API/update?obj_path=<path>... ...&InheritedObjects:list=title:AR1... ...&InheritedObjects:list=title:AR2... ...@@API/update?obj_path=<path>... ...&InheritedObjects[]=title:AR1... ...&InheritedObjects[]=title:AR2... The Analysis_Specification parameter is special, it mimics the format of the python dictionaries, and only service Keyword can be used to reference services. Even if the keyword is not actively required, it must be supplied: <service_keyword>:min:max:error tolerance The function returns a dictionary as a json string: { runtime: Function running time. error: true or string(message) if error. false if no error. success: true or string(message) if success. false if no success. } >>> portal = layer['portal'] >>> portal_url = portal.absolute_url() >>> from plone.app.testing import SITE_OWNER_NAME >>> from plone.app.testing import SITE_OWNER_PASSWORD Simple AR creation, no obj_path parameter is required: >>> browser = layer['getBrowser'](portal, loggedIn=True, username=SITE_OWNER_NAME, password=SITE_OWNER_PASSWORD) >>> browser.open(portal_url+"/@@API/create", "&".join([ ... "obj_type=AnalysisRequest", ... "Client=portal_type:Client|id:client-1", ... "SampleType=portal_type:SampleType|title:Apple Pulp", ... "Contact=portal_type:Contact|getFullname:Rita Mohale", ... "Services:list=portal_type:AnalysisService|title:Calcium", ... "Services:list=portal_type:AnalysisService|title:Copper", ... "Services:list=portal_type:AnalysisService|title:Magnesium", ... "SamplingDate=2013-09-29", ... "Specification=portal_type:AnalysisSpec|title:Apple Pulp", ... ])) >>> browser.contents '{..."success": true...}' If some parameters are specified and are not located as existing fields or properties of the created instance, the create should fail: >>> browser = layer['getBrowser'](portal, loggedIn=True, username=SITE_OWNER_NAME, password=SITE_OWNER_PASSWORD) >>> browser.open(portal_url+"/@@API/create?", "&".join([ ... "obj_type=Batch", ... "obj_path=/batches", ... "title=Test", ... "Thing=Fish" ... ])) >>> browser.contents '{...The following request fields were not used: ...Thing...}' Now we test that the AR create also fails if some fields are spelled wrong >>> browser = layer['getBrowser'](portal, loggedIn=True, username=SITE_OWNER_NAME, password=SITE_OWNER_PASSWORD) >>> browser.open(portal_url+"/@@API/create", "&".join([ ... "obj_type=AnalysisRequest", ... "thing=Fish", ... "Client=portal_type:Client|id:client-1", ... "SampleType=portal_type:SampleType|title:Apple Pulp", ... "Contact=portal_type:Contact|getFullname:Rita Mohale", ... "Services:list=portal_type:AnalysisService|title:Calcium", ... "Services:list=portal_type:AnalysisService|title:Copper", ... "Services:list=portal_type:AnalysisService|title:Magnesium", ... "SamplingDate=2013-09-29" ... ])) >>> browser.contents '{...The following request fields were not used: ...thing...}' """ savepoint = transaction.savepoint() self.context = context self.request = request self.unused = [x for x in self.request.form.keys()] self.used("form.submitted") self.used("__ac_name") self.used("__ac_password") # always require obj_type self.require("obj_type") obj_type = self.request['obj_type'] self.used("obj_type") # AnalysisRequest shortcut: creates Sample, Partition, AR, Analyses. if obj_type == "AnalysisRequest": try: return self._create_ar(context, request) except: savepoint.rollback() raise # Other object types require explicit path as their parent self.require("obj_path") obj_path = self.request['obj_path'] if not obj_path.startswith("/"): obj_path = "/" + obj_path self.used("obj_path") site_path = request['PATH_INFO'].replace("/@@API/create", "") parent = context.restrictedTraverse(str(site_path + obj_path)) # normal permissions still apply for this user if not getSecurityManager().checkPermission(AccessJSONAPI, parent): msg = "You don't have the '{0}' permission on {1}".format( AccessJSONAPI, parent.absolute_url()) raise Unauthorized(msg) obj_id = request.get("obj_id", "") _renameAfterCreation = False if not obj_id: _renameAfterCreation = True obj_id = tmpID() self.used(obj_id) ret = { "url": router.url_for("create", force_external=True), "success": True, "error": False, } try: obj = _createObjectByType(obj_type, parent, obj_id) obj.unmarkCreationFlag() if _renameAfterCreation: renameAfterCreation(obj) ret['obj_id'] = obj.getId() used_fields = set_fields_from_request(obj, request) for field in used_fields: self.used(field) obj.reindexObject() obj.aq_parent.reindexObject() event.notify(ObjectInitializedEvent(obj)) obj.at_post_create_script() except: savepoint.rollback() raise if self.unused: raise BadRequest("The following request fields were not used: %s. Request aborted." % self.unused) return ret
def _create_ar(self, context, request): """Creates AnalysisRequest object, with supporting Sample, Partition and Analysis objects. The client is retrieved from the obj_path key in the request. Required request parameters: - Contact: One client contact Fullname. The contact must exist in the specified client. The first Contact with the specified value in it's Fullname field will be used. - SampleType_<index> - Must be an existing sample type. Optional request parameters: - CCContacts: A list of contact Fullnames, which will be copied on all messages related to this AR and it's sample or results. - CCEmails: A list of email addresses to include as above. - Sample_id: Create a secondary AR with an existing sample. If unspecified, a new sample is created. - Specification: a lookup to set Analysis specs default values for all analyses - Analysis_Specification: specs (or overrides) per analysis, using a special lookup format. &Analysis_Specification:list=<Keyword>:min:max:error&... """ wftool = getToolByName(context, 'portal_workflow') bc = getToolByName(context, 'bika_catalog') bsc = getToolByName(context, 'bika_setup_catalog') pc = getToolByName(context, 'portal_catalog') ret = { "url": router.url_for("create", force_external=True), "success": True, "error": False, } SamplingWorkflowEnabled = context.bika_setup.getSamplingWorkflowEnabled() for field in [ 'Client', 'SampleType', 'Contact', 'SamplingDate', 'Services']: self.require(field) self.used(field) try: client = resolve_request_lookup(context, request, 'Client')[0].getObject() except IndexError: raise Exception("Client not found") # Sample_id if 'Sample' in request: try: sample = resolve_request_lookup(context, request, 'Sample')[0].getObject() except IndexError: raise Exception("Sample not found") else: # Primary AR sample = _createObjectByType("Sample", client, tmpID()) sample.unmarkCreationFlag() fields = set_fields_from_request(sample, request) for field in fields: self.used(field) sample._renameAfterCreation() sample.setSampleID(sample.getId()) event.notify(ObjectInitializedEvent(sample)) sample.at_post_create_script() if SamplingWorkflowEnabled: wftool.doActionFor(sample, 'sampling_workflow') else: wftool.doActionFor(sample, 'no_sampling_workflow') ret['sample_id'] = sample.getId() parts = [{'services': [], 'container': [], 'preservation': '', 'separate': False}] specs = self.get_specs_from_request() ar = _createObjectByType("AnalysisRequest", client, tmpID()) ar.unmarkCreationFlag() fields = set_fields_from_request(ar, request) for field in fields: self.used(field) ar.setSample(sample.UID()) ar._renameAfterCreation() ret['ar_id'] = ar.getId() brains = resolve_request_lookup(context, request, 'Services') service_uids = [p.UID for p in brains] new_analyses = ar.setAnalyses(service_uids, specs=specs) ar.setRequestID(ar.getId()) ar.reindexObject() event.notify(ObjectInitializedEvent(ar)) ar.at_post_create_script() # Create sample partitions parts_and_services = {} for _i in range(len(parts)): p = parts[_i] part_prefix = sample.getId() + "-P" if '%s%s' % (part_prefix, _i + 1) in sample.objectIds(): parts[_i]['object'] = sample['%s%s' % (part_prefix, _i + 1)] parts_and_services['%s%s' % (part_prefix, _i + 1)] = p['services'] part = parts[_i]['object'] else: part = _createObjectByType("SamplePartition", sample, tmpID()) parts[_i]['object'] = part container = None preservation = p['preservation'] parts[_i]['prepreserved'] = False part.edit( Container=container, Preservation=preservation, ) part.processForm() if SamplingWorkflowEnabled: wftool.doActionFor(part, 'sampling_workflow') else: wftool.doActionFor(part, 'no_sampling_workflow') parts_and_services[part.id] = p['services'] if SamplingWorkflowEnabled: wftool.doActionFor(ar, 'sampling_workflow') else: wftool.doActionFor(ar, 'no_sampling_workflow') # Add analyses to sample partitions # XXX jsonapi create AR: right now, all new analyses are linked to the first samplepartition if new_analyses: analyses = list(part.getAnalyses()) analyses.extend(new_analyses) part.edit( Analyses=analyses, ) for analysis in new_analyses: analysis.setSamplePartition(part) # If Preservation is required for some partitions, # and the SamplingWorkflow is disabled, we need # to transition to to_be_preserved manually. if not SamplingWorkflowEnabled: to_be_preserved = [] sample_due = [] lowest_state = 'sample_due' for p in sample.objectValues('SamplePartition'): if p.getPreservation(): lowest_state = 'to_be_preserved' to_be_preserved.append(p) else: sample_due.append(p) for p in to_be_preserved: doActionFor(p, 'to_be_preserved') for p in sample_due: doActionFor(p, 'sample_due') doActionFor(sample, lowest_state) for analysis in ar.objectValues('Analysis'): doActionFor(analysis, lowest_state) doActionFor(ar, lowest_state) # receive secondary AR if request.get('Sample_id', ''): doActionFor(ar, 'sampled') doActionFor(ar, 'sample_due') not_receive = ['to_be_sampled', 'sample_due', 'sampled', 'to_be_preserved'] sample_state = wftool.getInfoFor(sample, 'review_state') if sample_state not in not_receive: doActionFor(ar, 'receive') for analysis in ar.getAnalyses(full_objects=1): doActionFor(analysis, 'sampled') doActionFor(analysis, 'sample_due') if sample_state not in not_receive: doActionFor(analysis, 'receive') if self.unused: raise BadRequest("The following request fields were not used: %s. Request aborted." % self.unused) return ret