def runMolns(self, data): self.user_data.set_selected(2) modelDb = StochKitModelWrapper.get_by_id(data["modelID"]) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir=basedir + 'output') job = ParameterSweepJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.inData = json.dumps(data) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" job.output_stored = False # # execute cloud task try: template_filename = 'parametersweep_template_{0}.py'.format( data['modelType']) logging.error( "parametersweep.runMolns() template_filename={0}".format( template_filename)) logging.error("*" * 80) with open(os.path.join(path, template_filename), 'r') as f: template = f.read() templateData = { "name": modelDb.name, "modelType": modelDb.type, "species": modelDb.species, "parameters": modelDb.parameters, "reactions": modelDb.reactions, "speciesSelect": data['speciesSelect'], "maxTime": data['maxTime'], "increment": data['increment'], "trajectories": data['trajectories'], "seed": data['seed'], "parameterA": data['parameterA'], "minValueA": data['minValueA'], "maxValueA": data['maxValueA'], "stepsA": data['stepsA'], "logA": data['logA'], "parameterB": data['parameterB'], "minValueB": data['minValueB'], "maxValueB": data['maxValueB'], "stepsB": data['stepsB'], "logB": data['logB'], "variableCount": data['variableCount'], "isSpatial": modelDb.isSpatial, "isLocal": False } if modelDb.isSpatial: try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id( modelDb.spatial["mesh_wrapper_id"]) except Exception as e: raise Exception( "No Mesh file set. Choose one in the Mesh tab of the Model Editor" ) try: meshFileObj = fileserver.FileManager.getFile( self, meshWrapperDb.meshFileId, noFile=False) templateData["mesh"] = meshFileObj["data"] except IOError as e: raise Exception("Mesh file inaccessible. Try another mesh") templateData[ 'reaction_subdomain_assignments'] = modelDb.spatial[ "reactions_subdomain_assignments"] templateData[ 'species_subdomain_assignments'] = modelDb.spatial[ "species_subdomain_assignments"] templateData[ 'species_diffusion_coefficients'] = modelDb.spatial[ "species_diffusion_coefficients"] templateData['initial_conditions'] = modelDb.spatial[ "initial_conditions"] templateData['subdomains'] = meshWrapperDb.subdomains program = os.path.join(dataDir, 'program.py') with open(program, 'w') as f: jsonString = json.dumps(templateData, indent=4, sort_keys=True) # We've got to double escape the strings here cause of how we're substituting the JSON data in a source file jsonString = jsonString.replace('\\', '\\\\') f.write(template.replace('___JSON_STRING___', jsonString)) molnsConfigDb = db.GqlQuery( "SELECT * FROM MolnsConfigWrapper WHERE user_id = :1", self.user.user_id()).get() if not molnsConfigDb: raise Exception("Molns not initialized") config = molns.MOLNSConfig(config_dir=molnsConfigDb.folder) result = molns.MOLNSExec.start_job( ['EC2_controller', "python {0}".format(program)], config) job.resource = "molns" job.molnsPID = result['id'] job.put() except Exception as e: job.status = 'Failed' job.delete(self) raise return job
def runQsub(self, data, cluster_info): logging.error("*" * 80) logging.error("parametersweep.runQsub() modelType={0}".format( data['modelType'])) logging.error("*" * 80) modelDb = StochKitModelWrapper.get_by_id(data["modelID"]) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir=basedir + 'output') job = ParameterSweepJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.inData = json.dumps(data) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" job.output_stored = False try: templateData = { "name": modelDb.name, "modelType": modelDb.type, "species": modelDb.species, "parameters": modelDb.parameters, "reactions": modelDb.reactions, "speciesSelect": data['speciesSelect'], "maxTime": data['maxTime'], "increment": data['increment'], "trajectories": data['trajectories'], "seed": data['seed'], "parameterA": data['parameterA'], "minValueA": data['minValueA'], "maxValueA": data['maxValueA'], "stepsA": data['stepsA'], "logA": data['logA'], "parameterB": data['parameterB'], "minValueB": data['minValueB'], "maxValueB": data['maxValueB'], "stepsB": data['stepsB'], "logB": data['logB'], "variableCount": data['variableCount'], "isSpatial": modelDb.isSpatial, "isLocal": True } if modelDb.isSpatial: try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id( modelDb.spatial["mesh_wrapper_id"]) except Exception as e: logging.exception(e) logging.error( "No Mesh file set. Choose one in the Mesh tab of the Model Editor" ) raise Exception( "No Mesh file set. Choose one in the Mesh tab of the Model Editor" ) try: meshFileObj = fileserver.FileManager.getFile( self, meshWrapperDb.meshFileId, noFile=False) templateData["mesh"] = meshFileObj["data"] except IOError as e: logging.exception(e) logging.error("Mesh file inaccessible. Try another mesh") raise Exception("Mesh file inaccessible. Try another mesh") templateData[ 'reaction_subdomain_assignments'] = modelDb.spatial[ "reactions_subdomain_assignments"] templateData[ 'species_subdomain_assignments'] = modelDb.spatial[ "species_subdomain_assignments"] templateData[ 'species_diffusion_coefficients'] = modelDb.spatial[ "species_diffusion_coefficients"] templateData['initial_conditions'] = modelDb.spatial[ "initial_conditions"] templateData['subdomains'] = meshWrapperDb.subdomains if data['modelType'] == "stochastic": job.qsubHandle = pickle.dumps( parametersweep_qsub.stochastic(templateData, cluster_info)) elif data['modelType'] == "deterministic": job.qsubHandle = pickle.dumps( parametersweep_qsub.deterministic(templateData, cluster_info)) elif data['modelType'] == "spatial": job.qsubHandle = pickle.dumps( parametersweep_qsub.spatial(templateData, cluster_info)) else: raise Exception( "Trying to runQsub on unsupported modelType {0}".format( data['modelType'])) job.resource = "qsub" job.put() except Exception as e: logging.exception(e) job.status = 'Failed' job.delete(self) raise return job
def runQsub(self, data, cluster_info): logging.error("*"*80) logging.error("parametersweep.runQsub() modelType={0}".format(data['modelType'])) logging.error("*"*80) modelDb = StochKitModelWrapper.get_by_id(data["modelID"]) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') job = ParameterSweepJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.inData = json.dumps(data) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" job.output_stored = False try: templateData = { "name" : modelDb.name, "modelType" : modelDb.type, "species" : modelDb.species, "parameters" : modelDb.parameters, "reactions" : modelDb.reactions, "speciesSelect" : data['speciesSelect'], "maxTime" : data['maxTime'], "increment" : data['increment'], "trajectories" : data['trajectories'], "seed" : data['seed'], "parameterA" : data['parameterA'], "minValueA" : data['minValueA'], "maxValueA" : data['maxValueA'], "stepsA" : data['stepsA'], "logA" : data['logA'], "parameterB" : data['parameterB'], "minValueB" : data['minValueB'], "maxValueB" : data['maxValueB'], "stepsB" : data['stepsB'], "logB" : data['logB'], "variableCount" : data['variableCount'], "isSpatial" : modelDb.isSpatial, "isLocal" : True } if modelDb.isSpatial: try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id(modelDb.spatial["mesh_wrapper_id"]) except Exception as e: logging.exception(e) logging.error("No Mesh file set. Choose one in the Mesh tab of the Model Editor") raise Exception("No Mesh file set. Choose one in the Mesh tab of the Model Editor") try: meshFileObj = fileserver.FileManager.getFile(self, meshWrapperDb.meshFileId, noFile = False) templateData["mesh"] = meshFileObj["data"] except IOError as e: logging.exception(e) logging.error("Mesh file inaccessible. Try another mesh") raise Exception("Mesh file inaccessible. Try another mesh") templateData['reaction_subdomain_assignments'] = modelDb.spatial["reactions_subdomain_assignments"] templateData['species_subdomain_assignments'] = modelDb.spatial["species_subdomain_assignments"] templateData['species_diffusion_coefficients'] = modelDb.spatial["species_diffusion_coefficients"] templateData['initial_conditions'] = modelDb.spatial["initial_conditions"] templateData['subdomains'] = meshWrapperDb.subdomains if data['modelType'] == "stochastic": job.qsubHandle = pickle.dumps(parametersweep_qsub.stochastic(templateData, cluster_info)) elif data['modelType'] == "deterministic": job.qsubHandle = pickle.dumps(parametersweep_qsub.deterministic(templateData, cluster_info)) elif data['modelType'] == "spatial": job.qsubHandle = pickle.dumps(parametersweep_qsub.spatial(templateData, cluster_info)) else: raise Exception("Trying to runQsub on unsupported modelType {0}".format(data['modelType'])) job.resource = "qsub" job.put() except Exception as e: logging.exception(e) job.status='Failed' job.delete(self) raise return job
def runMolns(self, data): modelDb = StochKitModelWrapper.get_by_id(data["modelID"]) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') job = ParameterSweepJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.inData = json.dumps(data) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" job.output_stored = False # # execute cloud task try: with open(os.path.join(path, 'parametersweep_template.py'), 'r') as f: template = f.read() templateData = { "name" : modelDb.name, "modelType" : modelDb.type, "species" : modelDb.species, "parameters" : modelDb.parameters, "reactions" : modelDb.reactions, "speciesSelect" : data['speciesSelect'], "maxTime" : data['maxTime'], "increment" : data['increment'], "trajectories" : data['trajectories'], "seed" : data['seed'], "parameterA" : data['parameterA'], "minValueA" : data['minValueA'], "maxValueA" : data['maxValueA'], "stepsA" : data['stepsA'], "logA" : data['logA'], "parameterB" : data['parameterB'], "minValueB" : data['minValueB'], "maxValueB" : data['maxValueB'], "stepsB" : data['stepsB'], "logB" : data['logB'], "variableCount" : data['variableCount'], "isSpatial" : modelDb.isSpatial, "isLocal" : False } if modelDb.isSpatial: try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id(modelDb.spatial["mesh_wrapper_id"]) except Exception as e: raise Exception("No Mesh file set. Choose one in the Mesh tab of the Model Editor") try: meshFileObj = fileserver.FileManager.getFile(self, meshWrapperDb.meshFileId, noFile = False) templateData["mesh"] = meshFileObj["data"] except IOError as e: raise Exception("Mesh file inaccessible. Try another mesh") templateData['reaction_subdomain_assignments'] = modelDb.spatial["reactions_subdomain_assignments"] templateData['species_subdomain_assignments'] = modelDb.spatial["species_subdomain_assignments"] templateData['species_diffusion_coefficients'] = modelDb.spatial["species_diffusion_coefficients"] templateData['initial_conditions'] = modelDb.spatial["initial_conditions"] templateData['subdomains'] = meshWrapperDb.subdomains program = os.path.join(dataDir, 'program.py') with open(program, 'w') as f: jsonString = json.dumps(templateData, indent = 4, sort_keys = True) # We've got to double escape the strings here cause of how we're substituting the JSON data in a source file jsonString = jsonString.replace('\\', '\\\\') f.write(template.replace('___JSON_STRING___', jsonString)) molnsConfigDb = db.GqlQuery("SELECT * FROM MolnsConfigWrapper WHERE user_id = :1", self.user.user_id()).get() if not molnsConfigDb: raise Exception("Molns not initialized") config = molns.MOLNSConfig(config_dir=molnsConfigDb.folder) result = molns.MOLNSExec.start_job(['EC2_controller', "python {0}".format(program)], config) job.resource = "molns" job.molnsPID = result['id'] job.put() except Exception as e: job.status='Failed' job.delete(self) raise return job
def runLocal(self, data): self.user_data.set_selected(0) logging.error("*"*80) logging.error("parametersweep.runLocal() modelType={0}".format(data['modelType'])) logging.error("*"*80) modelDb = StochKitModelWrapper.get_by_id(data["modelID"]) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') job = ParameterSweepJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.inData = json.dumps(data) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" job.output_stored = False # # execute local task try: template_filename = 'parametersweep_template_{0}.py'.format(data['modelType']) logging.error("parametersweep.runLocal() template_filename={0}".format(template_filename)) logging.error("*"*80) with open(os.path.join(path,template_filename ), 'r') as f: template = f.read() templateData = { "name" : modelDb.name, "modelType" : modelDb.type, "species" : modelDb.species, "parameters" : modelDb.parameters, "reactions" : modelDb.reactions, "speciesSelect" : data['speciesSelect'], "maxTime" : data['maxTime'], "increment" : data['increment'], "trajectories" : data['trajectories'], "seed" : data['seed'], "parameterA" : data['parameterA'], "minValueA" : data['minValueA'], "maxValueA" : data['maxValueA'], "stepsA" : data['stepsA'], "logA" : data['logA'], "parameterB" : data['parameterB'], "minValueB" : data['minValueB'], "maxValueB" : data['maxValueB'], "stepsB" : data['stepsB'], "logB" : data['logB'], "variableCount" : data['variableCount'], "isSpatial" : modelDb.isSpatial, "isLocal" : True } if modelDb.isSpatial: try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id(modelDb.spatial["mesh_wrapper_id"]) except Exception as e: logging.exception(e) logging.error("No Mesh file set. Choose one in the Mesh tab of the Model Editor") raise Exception("No Mesh file set. Choose one in the Mesh tab of the Model Editor") try: meshFileObj = fileserver.FileManager.getFile(self, meshWrapperDb.meshFileId, noFile = False) templateData["mesh"] = meshFileObj["data"] except IOError as e: logging.exception(e) logging.error("Mesh file inaccessible. Try another mesh") raise Exception("Mesh file inaccessible. Try another mesh") templateData['reaction_subdomain_assignments'] = modelDb.spatial["reactions_subdomain_assignments"] templateData['species_subdomain_assignments'] = modelDb.spatial["species_subdomain_assignments"] templateData['species_diffusion_coefficients'] = modelDb.spatial["species_diffusion_coefficients"] templateData['initial_conditions'] = modelDb.spatial["initial_conditions"] templateData['subdomains'] = meshWrapperDb.subdomains program = os.path.join(dataDir, 'stochss_parametersweep_program.py') with open(program, 'w') as f: jsonString = json.dumps(templateData, indent = 4, sort_keys = True) # We've got to double escape the strings here cause of how we're substituting the JSON data in a source file jsonString = jsonString.replace('\\', '\\\\') f.write(template.replace('___JSON_STRING___', jsonString)) #? molnsConfigDb = db.GqlQuery("SELECT * FROM MolnsConfigWrapper WHERE user_id = :1", self.user.user_id()).get() #? if not molnsConfigDb: #? raise Exception("Molns not initialized") #? #? config = molns.MOLNSConfig(config_dir=molnsConfigDb.folder) #? result = molns.MOLNSExec.start_job(['EC2_controller', "python {0}".format(program)], config) cmd = "python {0}".format(program) logging.info('parametersweep.runLocal(): cmd={0}'.format(cmd)) logging.info('*'*80) exstring = '{0}/backend/wrapper.py {1}/stdout {1}/stderr {1}/return_code {2}'.format(basedir, dataDir, cmd) logging.info('parametersweep.runLocal(): exstring={0}'.format(exstring)) logging.info('*'*80) handle = subprocess.Popen(exstring.split(), preexec_fn=os.setsid) job.pid = handle.pid logging.info("parametersweep.runLocal() job started pid={0}".format(job.pid)) logging.info('*'*80) job.resource = "local" job.put() except Exception as e: logging.exception(e) job.status='Failed' job.delete(self) raise return job