Esempio n. 1
0
    def executeJob(self, job):

        time.sleep(5)
        job['status'] = 'RUNNING'
        (responseCode, job) = self.service.saveJob(job)
        if responseCode != 200: raise Exception("could not save job status.")

        filePath = conf.MOCK_DATA_PATH[job['queryType']]
        with open(filePath, 'r') as handle:
            data = json.loads(handle.read())
        result_set = GeqeAPI.rawResultToResultDocument(job, data)
        (response, result_set) = self.service.saveResultset(result_set)
        if response != 200:
            job['status'] = 'FAILED'
            self.service.saveJob(job)
            print str(result_set)
            raise Exception("Could not save result set. error: " +
                            str(response))

        if 'modelSavePath' in job and job['modelSavePath'] is not None:
            # save the model meta data
            modelData = {
                "name": job['name'],
                "username": job["username"],
                "queryType": job["queryType"],
                "modelSavePath": job['modelSavePath'],
                "siteListId": job["siteListId"],
                "datasetId": job["datasetId"]
            }

            (response, modelData) = self.service.saveModelData(modelData)
            if response != 200:
                job['status'] = 'FAILED'
                self.service.saveJob(job)
                raise Exception("Could not save model metadata: " +
                                str(response) + " \n" + str(modelData))

        # save the results set into elastic search
        if conf.ES_HOST is not None:
            elaticsearchConnetor = GeqeAPI.ElasticSearchHelper(
                conf.ES_HOST, port=conf.ES_PORT)
            (response,
             es_result) = elaticsearchConnetor.addResultSet(result_set)
            if response != 201:
                job['status'] = 'FAILED'
                self.service.saveJob(job)
                raise Exception("Could not save result set to es. error: " +
                                str(response) + " \n" + str(result_set))

        time.sleep(5)
        job['status'] = 'SUCCESS'
        job['resultsetId'] = result_set['id']
        (response, job) = self.service.saveJob(job)
        if response != 200:
            raise Exception("could not save job status.")

        return True
Esempio n. 2
0
if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("serviceUrl", help="loop back data service url")
    parser.add_argument("--elasticsearchHost",
                        help="Host name or ip address for elastic search.")
    parser.add_argument("--elasticsearchPort",
                        type=int,
                        help="Port for elastic search defaults to 9200",
                        default=9200)
    parser.add_argument("jobId", help="The job to execute")
    args = parser.parse_args()

    dataConnector = GeqeAPI.GeqeRestHelper(args.serviceUrl)
    elaticsearchConnetor = GeqeAPI.ElasticSearchHelper(
        args.elasticsearchHost,
        port=args.elasticsearchPort) if args.elasticsearchHost else None

    # Job
    (responseCode, job) = dataConnector.getJob(args.jobId)
    if 200 != responseCode:
        raise Exception("Could not read job: " + args.jobId + ' response: ' +
                        str(responseCode))
    job['status'] = 'RUNNING'
    (responseCode, job) = dataConnector.saveJob(job)
    if 200 != responseCode:
        raise Exception("Could not save job: " + args.jobId + ' response: ' +
                        str(responseCode))
    print 'JOB: ', job

    # dataset