def post(self, queryType, jobID): job = StochOptimJobWrapper.get_by_id(int(jobID)) data = json.loads(self.request.get('data')); #print data #print "=================================================" parameters = data["parameters"] modelName = job.modelName proposedName = data["proposedName"] model = ModelManager.getModelByName(self, modelName); del model["id"] if ModelManager.getModelByName(self, proposedName): self.response.write(json.dumps({"status" : False, "msg" : "Model name must be unique"})) return if not model: self.response.write(json.dumps({"status" : False, "msg" : "Model '{0}' does not exist anymore. Possibly deleted".format(modelName) })) return model["name"] = proposedName parameterByName = {} for parameter in model["parameters"]: parameterByName[parameter["name"]] = parameter for parameter in parameters: parameterByName[parameter]["value"] = str(parameters[parameter]) if ModelManager.updateModel(self, model): self.response.write(json.dumps({"status" : True, "msg" : "Model created", "url" : "/modeleditor?model_edited={0}".format(proposedName) })) return else: self.response.write(json.dumps({"status" : False, "msg" : "Model failed to be created, check logs"})) return
def post(self, queryType, jobID): job = StochOptimJobWrapper.get_by_id(int(jobID)) data = json.loads(self.request.get('data')); #print data #print "=================================================" parameters = data["parameters"] modelName = job.modelName proposedName = data["proposedName"] model = ModelManager.getModelByName(self, modelName); del model["id"] if ModelManager.getModelByName(self, proposedName): self.response.write(json.dumps({"status" : False, "msg" : "Model name must be unique"})) return if not model: self.response.write(json.dumps({"status" : False, "msg" : "Model '{0}' does not exist anymore. Possibly deleted".format(modelName) })) return model["name"] = proposedName parameterByName = {} for parameter in model["parameters"]: parameterByName[parameter["name"]] = parameter for parameter in parameters: parameterByName[parameter]["value"] = str(parameters[parameter]) if ModelManager.updateModel(self, model): self.response.write(json.dumps({"status" : True, "msg" : "Model created", "url" : "/modeleditor?model_edited={0}".format(proposedName) })) return else: self.response.write(json.dumps({"status" : False, "msg" : "Model failed to be created, check logs"})) return
def get(self, queryType = None, jobID = None): logging.info("JobID: {0}".format(jobID)); jobID = int(jobID) output = { "jobID" : jobID } if queryType == None: self.render_response('stochoptimvisualization.html', **output) return optimization = StochOptimJobWrapper.get_by_id(jobID) service = backend.backendservice.backendservices(self.user_data) # Might need to download the cloud data if optimization.resource in backendservices.SUPPORTED_CLOUD_RESOURCES: # if optimization.status == "Finished": if optimization.status == "Finished" and optimization.has_final_cloud_data(): # Nothing more to do pass elif optimization.status == "Finished" and not optimization.has_final_cloud_data(): # Download the final data and mark it finished cloud_result = self.__fetch_cloud_output(optimization) if cloud_result["status"]: optimization.mark_final_cloud_data() else: # Download current progress cloud_result = self.__fetch_cloud_output(optimization) # else: # logging.error("Failed to download final output data of {0} with reason {1}".format( # optimization.name, # cloud_result["msg"] # )) result = status.getJobStatus(service, optimization) try: fd = os.open("{0}/stdout".format(optimization.outData), os.O_RDONLY) f = os.fdopen(fd) output["stdout"] = f.read().strip() f.close() except: output["stdout"] = "" if len(output["stdout"]) == 0: if optimization.status == 'Running': output["stdout"] = "(Job running, no output available yet)" else: output["stdout"] = "(empty)" if queryType.lower() == "debug": try: fd = os.open("{0}/stderr".format(optimization.outData), os.O_RDONLY) f = os.fdopen(fd) output["stderr"] = f.read().strip() f.close() except: output["stderr"] = "" if len(output["stderr"]) == 0: output["stderr"] = "(empty)" # print optimization.nameToIndex # print optimization.indata output["nameToIndex"] = json.loads(optimization.nameToIndex) output["status"] = result["status"] output["jobName"] = optimization.name output["modelName"] = optimization.modelName output["resource"] = optimization.resource output["activate"] = json.loads(optimization.indata)["activate"] self.response.content_type = 'application/json' self.response.write(json.dumps(output)) return
def post(self): reqType = self.request.get('reqType') self.response.content_type = 'application/json' if reqType == 'newJob': data = json.loads(self.request.get('data')) job = db.GqlQuery("SELECT * FROM StochOptimJobWrapper WHERE user_id = :1 AND name = :2", self.user.user_id(), data["jobName"].strip()).get() if job != None: self.response.write(json.dumps({"status" : False, "msg" : "Job name must be unique"})) return try: if data["resource"] == "local": # This function takes full responsibility for writing responses out to the world. This is probably a bad design mechanism result = self.runLocal(data) else: # cloud result = self.runCloud(data=data) return self.response.write(json.dumps({ "status": True, "msg": "Job launched", "id": result.key().id() })) except Exception as e: logging.exception(e) result = {'status':False, 'msg':'Error: {0}'.format(e)} self.response.write(json.dumps(result)) return elif reqType == 'stopJob': jobID = json.loads(self.request.get('id')) jobID = int(jobID) job = StochOptimJobWrapper.get_by_id(jobID) if job.user_id == self.user.user_id(): if job.resource in backendservices.SUPPORTED_CLOUD_RESOURCES: # try: # logging.info("Stopping StochOptim poll task pid={0}".format(job.pollProcessPID)) # os.kill(job.pollProcessPID, signal.SIGTERM) # except Exception as e: # logging.error("StochOptimPage.post.stopJob(): exception during kill process: {0}".format(e)) success = job.stop(self) if not success: return self.response.write(json.dumps({ 'status': False, 'msg': 'Could not stop the job '+job.name +'. Unexpected error.' })) else: job.stop(self) else: self.response.write(json.dumps({"status" : False, "msg" : "No permissions to delete this job (this should never happen)"})) return elif reqType == 'delJob': jobID = json.loads(self.request.get('id')) jobID = int(jobID) job = StochOptimJobWrapper.get_by_id(jobID) if job.user_id == self.user.user_id(): job.delete(self) else: self.response.write(json.dumps({"status" : False, "msg" : "No permissions to delete this job (this should never happen)"})) return elif reqType == 'getDataLocal': jobID = json.loads(self.request.get('id')) jobID = int(jobID) job = StochOptimJobWrapper.get_by_id(jobID) if not job.zipFileName: szip = exportimport.SuperZip(os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'), preferredName = job.name + "_") job.zipFileName = szip.getFileName() szip.addStochOptimJob(job, True) szip.close() # Save the updated status job.put() relpath = '/' + os.path.relpath(job.zipFileName, os.path.abspath(os.path.dirname(__file__) + '/../')) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps({ 'status' : True, 'msg' : 'Job prepared', 'url' : relpath })) return self.response.write(json.dumps({ 'status' : True, 'msg' : 'Success'}))
def get(self, queryType = None, jobID = None): logging.info("JobID: {0}".format(jobID)); jobID = int(jobID) output = { "jobID" : jobID } if queryType == None: self.render_response('stochoptimvisualization.html', **output) return optimization = StochOptimJobWrapper.get_by_id(jobID) service = backend.backendservice.backendservices(self.user_data) # Might need to download the cloud data if optimization.resource in backendservices.SUPPORTED_CLOUD_RESOURCES: # if optimization.status == "Finished": if optimization.status == "Finished" and optimization.has_final_cloud_data(): # Nothing more to do pass elif optimization.status == "Finished" and not optimization.has_final_cloud_data(): # Download the final data and mark it finished cloud_result = self.__fetch_cloud_output(optimization) if cloud_result["status"]: optimization.mark_final_cloud_data() else: # Download current progress cloud_result = self.__fetch_cloud_output(optimization) # else: # logging.error("Failed to download final output data of {0} with reason {1}".format( # optimization.name, # cloud_result["msg"] # )) try: fd = os.open("{0}/stdout".format(optimization.outData), os.O_RDONLY) f = os.fdopen(fd) output["stdout"] = f.read().strip() f.close() except: output["stdout"] = "" if len(output["stdout"]) == 0: if optimization.status == 'Running': output["stdout"] = "(Job running, no output available yet)" else: output["stdout"] = "(empty)" if queryType.lower() == "debug": try: fd = os.open("{0}/stderr".format(optimization.outData), os.O_RDONLY) f = os.fdopen(fd) output["stderr"] = f.read().strip() f.close() except: output["stderr"] = "" if len(output["stderr"]) == 0: output["stderr"] = "(empty)" # print optimization.nameToIndex # print optimization.indata output["nameToIndex"] = json.loads(optimization.nameToIndex) output["status"] = optimization.status output["jobName"] = optimization.name output["modelName"] = optimization.modelName output["resource"] = optimization.resource output["activate"] = json.loads(optimization.indata)["activate"] self.response.content_type = 'application/json' self.response.write(json.dumps(output)) return
def post(self): reqType = self.request.get('reqType') self.response.content_type = 'application/json' if reqType == 'newJob': data = json.loads(self.request.get('data')) job = db.GqlQuery("SELECT * FROM StochOptimJobWrapper WHERE user_id = :1 AND name = :2", self.user.user_id(), data["jobName"].strip()).get() if job != None: self.response.write(json.dumps({"status" : False, "msg" : "Job name must be unique"})) return try: if data["resource"] == "local": # This function takes full responsibility for writing responses out to the world. This is probably a bad design mechanism result = self.runLocal(data) else: # cloud result = self.runCloud(data=data) return self.response.write(json.dumps({ "status": True, "msg": "Job launched", "id": result.key().id() })) except Exception as e: logging.exception(e) result = {'status':False, 'msg':'Error: {0}'.format(e)} self.response.write(json.dumps(result)) return elif reqType == 'stopJob': jobID = json.loads(self.request.get('id')) jobID = int(jobID) job = StochOptimJobWrapper.get_by_id(jobID) if job.user_id == self.user.user_id(): if job.resource in backendservices.SUPPORTED_CLOUD_RESOURCES: # try: # logging.info("Stopping StochOptim poll task pid={0}".format(job.pollProcessPID)) # os.kill(job.pollProcessPID, signal.SIGTERM) # except Exception as e: # logging.error("StochOptimPage.post.stopJob(): exception during kill process: {0}".format(e)) success = job.stop(self) if not success: return self.response.write(json.dumps({ 'status': False, 'msg': 'Could not stop the job '+job.name +'. Unexpected error.' })) else: job.stop(self) else: self.response.write(json.dumps({"status" : False, "msg" : "No permissions to delete this job (this should never happen)"})) return elif reqType == 'delJob': jobID = json.loads(self.request.get('id')) jobID = int(jobID) job = StochOptimJobWrapper.get_by_id(jobID) if job.user_id == self.user.user_id(): job.delete(self) else: self.response.write(json.dumps({"status" : False, "msg" : "No permissions to delete this job (this should never happen)"})) return elif reqType == 'getDataLocal': jobID = json.loads(self.request.get('id')) jobID = int(jobID) job = StochOptimJobWrapper.get_by_id(jobID) if not job.zipFileName: szip = exportimport.SuperZip(os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'), preferredName = job.name + "_") job.zipFileName = szip.getFileName() szip.addStochOptimJob(job, True) szip.close() # Save the updated status job.put() relpath = '/' + os.path.relpath(job.zipFileName, os.path.abspath(os.path.dirname(__file__) + '/../')) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps({ 'status' : True, 'msg' : 'Job prepared', 'url' : relpath })) return self.response.write(json.dumps({ 'status' : True, 'msg' : 'Success'}))