def setAnswerSet(self, answer_set_id): r = self.session.get(lsrh.getAnswerSetURL(answer_set_id)) try: lsrh.checkStatus(r, (200,), "Answer set doesn't exist") except: return self.answer_set_id = answer_set_id
def addAnswerSet(self, trained_models = [], tags = []): data = { "prompt": lsrh.getPromptURL(self.prompt_id), "trained_models": [lsrh.getTrainedModelURL(t) for t in trained_models] if len(trained_models) > 0 else [], "tags": tags } r = self.session.post(lsrh.getAnswerSetURL(), data=json.dumps(data)) lsrh.checkStatus(r, (201,), "Failed to create answer set.") self.answer_set_id = lsrh.getIdFromResponse(r)
def runPredictions(self): ''' Returns list of urls of prediction results. ''' data = { "answer_set": lsrh.getAnswerSetURL(self.answer_set_id) } r = self.session.post(lsrh.getPredictionTaskURL(), data=json.dumps(data)) lsrh.checkStatus(r, (201,), "Failed to create prediction task") predictionTaskData = r.json() r2 = self.session.post(predictionTaskData["process"]) r3 = self.waitForTask(predictionTaskData["url"], "prediction task") return r3.json()["prediction_results"]
def uploadAnswerSet(self, fname): if self.answer_set_id == None: self.addAnswerSet() r = self.uploadToS3('answerset', fname) soup = bsoup(r.content) key = soup.find('key').get_text() data2 = {'prompt': lsrh.getPromptURL(self.prompt_id), 'answer_set': lsrh.getAnswerSetURL(self.answer_set_id), 's3_key': key, 'content_type': 'text/csv'} # create and start upload task r2 = self.session.post(lsrh.getAnswerSetUploadTaskURL(), data=json.dumps(data2)) answerUploadTaskData = r2.json() r3 = self.session.post(answerUploadTaskData["process"]) lsrh.checkStatus(r3, (200, 202), "Queueing of answerset upload task failed.") r4 = self.waitForTask(answerUploadTaskData["url"], "answer set upload task") return r4.json()["answer_set"]