def test_run_experiment_twice(self): # create experiment form = self.form.get_form() form.request.form.update({ 'form.buttons.save': 'Create and start', }) # update form with updated request form.update() # start experiment jt = IJobTracker(self.experiments['my-experiment']) self.assertEqual(jt.state, u'QUEUED') #error state = jt.start_job(form.request) self.assertEqual(state[0], 'error') # finish current job transaction.commit() self.assertEqual(jt.state, u'COMPLETED') # TODO: after commit tasks cause site to disappear and the # following code will fail, bceause without site we can't find # a catalog without whchi we can't finde the toolkit by uuid jt.start_job(form.request) # FIXME: why is this running? (would a transaction abort work as well? to refresh my object?) self.assertEqual(jt.state, u'RUNNING') transaction.commit() self.assertEqual(jt.state, u'COMPLETED')
def __call__(self): # TODO: could also just submit current context (the experiment) # with all infos accessible from it jt = IJobTracker(self.context) msgtype, msg = jt.start_job(self.request) if msgtype is not None: IStatusMessage(self.request).add(msg, type=msgtype) self.request.response.redirect(self.context.absolute_url())
def pullOccurrenceFromALA(self, lsid, taxon, common=None): # TODO: check permisions? # 1. create new dataset with taxon, lsid and common name set portal = getToolByName(self.context, 'portal_url').getPortalObject() dscontainer = portal[defaults.DATASETS_FOLDER_ID][defaults.DATASETS_SPECIES_FOLDER_ID] title = [taxon] if common: title.append(u"({})".format(common)) # TODO: check whether title will be updated in transmog import? # set title now to "Whatever (import pending)"? # TODO: make sure we get a better content id that dataset-x ds = createContentInContainer(dscontainer, 'org.bccvl.content.dataset', title=u' '.join(title)) # TODO: add number of occurences to description ds.description = u' '.join(title) + u' imported from ALA' md = IBCCVLMetadata(ds) # TODO: provenance ... import url? # FIXME: verify input parameters before adding to graph md['genre'] = 'DataGenreSpeciesOccurrence' md['species'] = { 'scientificName': taxon, 'taxonID': lsid, } if common: md['species']['vernacularName'] = common IStatusMessage(self.request).add('New Dataset created', type='info') # 2. create and push alaimport job for dataset # TODO: make this named adapter jt = IJobTracker(ds) status, message = jt.start_job() # reindex object to make sure everything is up to date ds.reindexObject() # Job submission state notifier IStatusMessage(self.request).add(message, type=status) return (status, message)
def handleAdd(self, action): data, errors = self.extractData() self.validateAction(data) if errors: self.status = self.formErrorsMessage return # TODO: this is prob. a bug in base form, because createAndAdd # does not return the wrapped object. obj = self.createAndAdd(data) if obj is None: # TODO: this is probably an error here? # object creation/add failed for some reason return # get wrapped instance fo new object (see above) obj = self.context[obj.id] # mark only as finished if we get the new object self._finishedAdd = True IStatusMessage(self.request).addStatusMessage(_(u"Item created"), "info") # auto start job here jt = IJobTracker(obj) msgtype, msg = jt.start_job(self.request) if msgtype is not None: IStatusMessage(self.request).add(msg, type=msgtype)