def runCloud(self, data): modelDb = StochKitModelWrapper.get_by_id(data["modelID"]) berniemodel = StochOptimModel() success, msgs = berniemodel.fromStochKitModel(modelDb.createStochKitModel()) if not success: raise Exception(msgs) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') job = StochOptimJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.indata = json.dumps(data) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" data["exec"] = "'bash'" data["steps"] = ("C" if data["crossEntropyStep"] else "") + ("E" if data["emStep"] else "") + ("U" if data["uncertaintyStep"] else "") # data["cores"] = 4 data["options"] = "" cmd = "exec/mcem2.r --steps {steps} --seed {seed} --K.ce {Kce} --K.em {Kem} --K.lik {Klik} --K.cov {Kcov} --rho {rho} --perturb {perturb} --alpha {alpha} --beta {beta} --gamma {gamma} --k {k} --pcutoff {pcutoff} --qcutoff {qcutoff} --numIter {numIter} --numConverge {numConverge} --command {exec}".format(**data) # cmd = "exec/mcem2.r --K.ce 1000 --K.em 100 --rho .01 --pcutoff .05" stringModel, nameToIndex = berniemodel.serialize(data["activate"], True) job.nameToIndex = json.dumps(nameToIndex) jFileData = fileserver.FileManager.getFile(self, data["trajectoriesID"], noFile = False) iFileData = fileserver.FileManager.getFile(self, data["initialDataID"], noFile = False) job.put() cloud_params = { "job_id" : job.key().id(), "job_type": "mcem2", # "cores": data["cores"], "paramstring": cmd, "model_file": stringModel, "model_data": { "content": self.addWeightColumnIfNecessary(iFileData["data"]), "extension": "txt" }, "final_data": { "content": self.addWeightColumnIfNecessary(jFileData["data"]), "extension": "txt" }, "key_prefix": self.user.user_id(), "credentials": self.user_data.getCredentials(), "bucketname": self.user_data.getBucketName() } # # execute cloud task try: service = backend.backendservice.backendservices(self.user_data) cloud_result = service.submit_cloud_task(params=cloud_params) if not cloud_result["success"]: raise Exception(cloud_result["reason"]) job.cloudDatabaseID = cloud_result["db_id"] job.resource = cloud_result['resource'] job.celeryPID = cloud_result["celery_pid"] # job.pollProcessPID = int(cloud_result["poll_process_pid"]) # job.pid = handle.pid job.put() except Exception as e: job.status='Failed' job.delete(self) raise return job
def runMolns(self, data): self.user_data.set_selected(2) modelDb = StochKitModelWrapper.get_by_id(data["modelID"]) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir=basedir + 'output') job = ParameterSweepJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.inData = json.dumps(data) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" job.output_stored = False # # execute cloud task try: template_filename = 'parametersweep_template_{0}.py'.format( data['modelType']) logging.error( "parametersweep.runMolns() template_filename={0}".format( template_filename)) logging.error("*" * 80) with open(os.path.join(path, template_filename), 'r') as f: template = f.read() templateData = { "name": modelDb.name, "modelType": modelDb.type, "species": modelDb.species, "parameters": modelDb.parameters, "reactions": modelDb.reactions, "speciesSelect": data['speciesSelect'], "maxTime": data['maxTime'], "increment": data['increment'], "trajectories": data['trajectories'], "seed": data['seed'], "parameterA": data['parameterA'], "minValueA": data['minValueA'], "maxValueA": data['maxValueA'], "stepsA": data['stepsA'], "logA": data['logA'], "parameterB": data['parameterB'], "minValueB": data['minValueB'], "maxValueB": data['maxValueB'], "stepsB": data['stepsB'], "logB": data['logB'], "variableCount": data['variableCount'], "isSpatial": modelDb.isSpatial, "isLocal": False } if modelDb.isSpatial: try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id( modelDb.spatial["mesh_wrapper_id"]) except Exception as e: raise Exception( "No Mesh file set. Choose one in the Mesh tab of the Model Editor" ) try: meshFileObj = fileserver.FileManager.getFile( self, meshWrapperDb.meshFileId, noFile=False) templateData["mesh"] = meshFileObj["data"] except IOError as e: raise Exception("Mesh file inaccessible. Try another mesh") templateData[ 'reaction_subdomain_assignments'] = modelDb.spatial[ "reactions_subdomain_assignments"] templateData[ 'species_subdomain_assignments'] = modelDb.spatial[ "species_subdomain_assignments"] templateData[ 'species_diffusion_coefficients'] = modelDb.spatial[ "species_diffusion_coefficients"] templateData['initial_conditions'] = modelDb.spatial[ "initial_conditions"] templateData['subdomains'] = meshWrapperDb.subdomains program = os.path.join(dataDir, 'program.py') with open(program, 'w') as f: jsonString = json.dumps(templateData, indent=4, sort_keys=True) # We've got to double escape the strings here cause of how we're substituting the JSON data in a source file jsonString = jsonString.replace('\\', '\\\\') f.write(template.replace('___JSON_STRING___', jsonString)) molnsConfigDb = db.GqlQuery( "SELECT * FROM MolnsConfigWrapper WHERE user_id = :1", self.user.user_id()).get() if not molnsConfigDb: raise Exception("Molns not initialized") config = molns.MOLNSConfig(config_dir=molnsConfigDb.folder) result = molns.MOLNSExec.start_job( ['EC2_controller', "python {0}".format(program)], config) job.resource = "molns" job.molnsPID = result['id'] job.put() except Exception as e: job.status = 'Failed' job.delete(self) raise return job
def runLocal(self, data): ''' ''' modelDb = StochKitModelWrapper.get_by_id(data["modelID"]) berniemodel = StochOptimModel() success, msgs = berniemodel.fromStochKitModel(modelDb.createStochKitModel()) if not success: raise Exception(msgs) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') job = StochOptimJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.indata = json.dumps(data) job.outData = dataDir job.modelName = modelDb.name job.resource = "local" job.status = "Running" # Convert model and write to file model_file_file = tempfile.mktemp(prefix = 'modelFile', suffix = '.R', dir = dataDir) mff = open(model_file_file, 'w') stringModel, nameToIndex = berniemodel.serialize(data["activate"], True) job.nameToIndex = json.dumps(nameToIndex) mff.write(stringModel) mff.close() data["model_file_file"] = model_file_file model_data_file = tempfile.mktemp(prefix = 'dataFile', suffix = '.txt', dir = dataDir) mdf = open(model_data_file, 'w') jFileData = fileserver.FileManager.getFile(self, data["trajectoriesID"], noFile = False) mdf.write(self.addWeightColumnIfNecessary(jFileData["data"])) mdf.close() data["model_data_file"] = model_data_file model_initial_data_file = tempfile.mktemp(prefix = 'dataFile', suffix = '.txt', dir = dataDir) midf = open(model_initial_data_file, 'w') iFileData = fileserver.FileManager.getFile(self, data["initialDataID"], noFile = False) midf.write(self.addWeightColumnIfNecessary(iFileData["data"])) midf.close() data["model_initial_data_file"] = model_initial_data_file data["exec"] = "\"bash&\"" data["steps"] = ("C" if data["crossEntropyStep"] else "") + ("E" if data["emStep"] else "") + ("U" if data["uncertaintyStep"] else "") try: import multiprocessing data["cores"] = multiprocessing.cpu_count() except: data["cores"] = 1 data["options"] = "" data["path"] = path cmd = "Rscript --vanilla {path}/../../stochoptim/exec/mcem2.r --model {model_file_file} --data {model_initial_data_file} --finalData {model_data_file} --steps {steps} --seed {seed} --cores {cores} --K.ce {Kce} --K.em {Kem} --K.lik {Klik} --K.cov {Kcov} --rho {rho} --perturb {perturb} --alpha {alpha} --beta {beta} --gamma {gamma} --k {k} --pcutoff {pcutoff} --qcutoff {qcutoff} --numIter {numIter} --numConverge {numConverge} --command {exec}".format(**data) exstring = '{0}/backend/wrapper.py {1}/stdout {1}/stderr {1}/return_code {2}'.format(basedir, dataDir, cmd) handle = subprocess.Popen(exstring, shell=True, preexec_fn=os.setsid) job.pid = handle.pid job.put() return job
def runStochKitLocal(self, params): """ Submit a local StochKit job """ modelDb = StochKitModelWrapper.get_by_id(params["id"]) if not modelDb: return {'status':False, 'msg':'Failed to retrive the model to simulate.'} model = modelDb.createStochKitModel() # Execute as concentration or population? execType = params['execType'].lower() if execType not in ["deterministic", "stochastic", "sensitivity"]: raise Exception('exec_type must be deterministic, sensitivity, or stochastic. Found "{0}"'.format(execType)) if model.units.lower() == 'concentration' and execType.lower() == 'stochastic': raise Exception('Concentration models cannot be executed stochastically') # Assemble the argument list args = '' args += ' -t {0} '.format(params['time']) num_output_points = int(float(params['time'])/float(params['increment'])) args += ' -i {0} '.format(num_output_points) path = os.path.abspath(os.path.dirname(__file__)) # Algorithm, SSA or Tau-leaping? if params['execType'] != 'deterministic': executable = "{0}/../../StochKit/{1}".format(path, params['algorithm']) args += ' --realizations {0} '.format(params['realizations']) args += ' --keep-trajectories ' if int(params['seed']) < 0: random.seed() params['seed'] = random.randint(0, 2147483647) args += '--seed {0} '.format(params['seed']) else: executable = "{0}/../../ode/stochkit_ode.py".format(path) # Columns need to be labeled for visulatization page to work. args += ' --label' cmd = executable + ' ' + args basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') # Wow, what a hack if params['execType'] == 'deterministic' and model.units.lower() == 'population': document = model.serialize() model = StochMLDocument.fromString(document).toModel(model.name) for reactionN in model.getAllReactions(): reaction = model.getAllReactions()[reactionN] if reaction.massaction: if len(reaction.reactants) == 1 and reaction.reactants.values()[0] == 2: reaction.marate.setExpression(reaction.marate.expression + ' / 2') modelFileName = '{0}/{1}.xml'.format(dataDir, model.name) fmodelHandle = open(modelFileName, 'w') fmodelHandle.write(model.serialize()) fmodelHandle.close() cmd += ' -m {0} --out-dir {1}/result'.format(modelFileName, dataDir) logging.info("cmd =\n{}".format(cmd)) logging.debug('simulation.runLocal(): cmd={0}'.format(cmd)) logging.debug('*'*80) logging.debug('*'*80) #ode = "{0}/../../ode/stochkit_ode.py {1}".format(path, args) exstring = '{0}/backend/wrapper.py {1}/stdout {1}/stderr {1}/return_code {2}'.format(basedir, dataDir, cmd) logging.debug('simulation.runLocal(): exstring={0}'.format(exstring)) logging.debug('*'*80) logging.debug('*'*80) handle = subprocess.Popen(exstring.split(), preexec_fn=os.setsid) # Create a wrapper to store the Job description in the datastore job = StochKitJobWrapper() job.resource = 'Local' # stochkit_job.uuid = res['uuid'] job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = params['jobName'] job.modelName = model.name job.pid = handle.pid # Create a StochKitJob instance job.indata = json.dumps( { "type" : 'StochKit2 Ensemble', "final_time" : params['time'], "realizations" : params['realizations'], "increment" : params['increment'], "seed" : params['seed'], "exec_type" : params['execType'], "units" : model.units.lower(), "epsilon" : params['epsilon'], "threshold" : params['threshold'] } ) job.outData = dataDir job.stdout = '{0}/stdout'.format(dataDir) job.stderr = '{0}/stderr'.format(dataDir) job.status = 'Running' job.put() return job
def construct_pyurdme_model(self, data): ''' ''' json_model_refs = ModelManager.getModel(self, data["id"]) # data["id"] is the model id of the selected model I think stochkit_model_obj = StochKitModelWrapper.get_by_id(data["id"]).createStochKitModel() #print 'json_model_refs["spatial"]["mesh_wrapper_id"]:', json_model_refs["spatial"]["mesh_wrapper_id"] try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id(json_model_refs["spatial"]["mesh_wrapper_id"]) except Exception as e: raise Exception("No Mesh file set. Choose one in the Mesh tab of the Model Editor") try: meshFileObj = fileserver.FileManager.getFile(self, meshWrapperDb.meshFileId) mesh_filename = meshFileObj["storePath"] except IOError as e: #blowup here, need a mesh #self.response.write(json.dumps({"status" : False, # "msg" : "No Mesh file given"})) #return raise Exception("Mesh file inaccessible. Try another mesh") #TODO: if we get advanced options, we don't need a mesh reaction_subdomain_assigments = json_model_refs["spatial"]["reactions_subdomain_assignments"] #e.g. {'R1':[1,2,3]} species_subdomain_assigments = json_model_refs["spatial"]["species_subdomain_assignments"] #e.g. {'S1':[1,2,3]} species_diffusion_coefficients = json_model_refs["spatial"]["species_diffusion_coefficients"] #e.g. {'S1':0.5} initial_conditions = json_model_refs["spatial"]["initial_conditions"] #e.g. { ic0 : { type : "place", species : "S0", x : 5.0, y : 10.0, z : 1.0, count : 5000 }, ic1 : { type : "scatter",species : "S0", subdomain : 1, count : 100 }, ic2 : { type : "distribute",species : "S0", subdomain : 2, count : 100 } } for species in stochkit_model_obj.listOfSpecies: if species not in species_diffusion_coefficients: raise Exception("Species '{0}' does not have a diffusion coefficient set. Please do that in the Species tab of the Model Editor".format(species)) simulation_end_time = data['time'] simulation_time_increment = data['increment'] simulation_algorithm = data['algorithm'] # Don't trust this! I haven't implemented the algorithm selection for this yet simulation_exec_type = data['execType'] # This should contain 'spatial' -- Not that you really need it, only spatial requests will be routed here simulation_realizations = data['realizations'] simulation_seed = data['seed'] # If this is set to -1, it means choose a seed at random! (Whatever that means) #### Construct the PyURDME object from the Stockkit model and mesh and other inputs try: # model pymodel = pyurdme.URDMEModel(name=stochkit_model_obj.name) # mesh pymodel.mesh = pyurdme.URDMEMesh.read_dolfin_mesh(str(mesh_filename)) # timespan pymodel.timespan(numpy.arange(0,simulation_end_time+simulation_time_increment, simulation_time_increment)) # subdomains if len(meshWrapperDb.subdomains) > 0: pymodel.set_subdomain_vector(numpy.array(meshWrapperDb.subdomains)) # species for s in stochkit_model_obj.listOfSpecies: pymodel.add_species(pyurdme.Species(name=s, diffusion_constant=float(species_diffusion_coefficients[s]))) # species subdomain restriction for s, sd_list in species_subdomain_assigments.iteritems(): spec = pymodel.listOfSpecies[s] pymodel.restrict(spec, sd_list) # parameters for p_name, p in stochkit_model_obj.listOfParameters.iteritems(): pymodel.add_parameter(pyurdme.Parameter(name=p_name, expression=p.expression)) # reactions for r_name, r in stochkit_model_obj.listOfReactions.iteritems(): if r.massaction: pymodel.add_reaction(pyurdme.Reaction(name=r_name, reactants=r.reactants, products=r.products, rate=r.marate, massaction=True)) else: pymodel.add_reaction(pyurdme.Reaction(name=r_name, reactants=r.reactants, products=r.products, propensity_function=r.propensity_function)) # reaction subdomain restrictions for r in reaction_subdomain_assigments: pymodel.listOfReactions[r].restrict_to = reaction_subdomain_assigments[r] # Initial Conditions # initial_conditions = json_model_refs["spatial"]["initial_conditions"] #e.g. { ic0 : { type : "place", species : "S0", x : 5.0, y : 10.0, z : 1.0, count : 5000 }, ic1 : { type : "scatter",species : "S0", subdomain : 1, count : 100 }, ic2 : { type : "distribute",species : "S0", subdomain : 2, count : 100 } } for ic in initial_conditions: spec = pymodel.listOfSpecies[ic['species']] if ic['type'] == "place": pymodel.set_initial_condition_place_near({spec:int(ic['count'])}, point=[float(ic['x']),float(ic['y']),float(ic['z'])]) elif ic['type'] == "scatter": pymodel.set_initial_condition_scatter({spec:int(ic['count'])},subdomains=[int(ic['subdomain'])]) elif ic['type'] == "distribute": pymodel.set_initial_condition_distribute_uniformly({spec:int(ic['count'])},subdomains=[int(ic['subdomain'])]) else: #self.response.write(json.dumps({"status" : False, # "msg" : "Unknown initial condition type {0}".format(ic['type'])})) #return raise Exception("Unknown initial condition type {0}".format(ic['type'])) except Exception as e: raise Exception("Error while assembling the model: {0}".format(e)) return pymodel
def construct_pyurdme_model(self, data): ''' ''' json_model_refs = ModelManager.getModel( self, int(data["id"]) ) # data["id"] is the model id of the selected model I think stochkit_model_obj = StochKitModelWrapper.get_by_id(int( data["id"])).createStochKitModel() #print 'json_model_refs["spatial"]["mesh_wrapper_id"]:', json_model_refs["spatial"]["mesh_wrapper_id"] try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id( json_model_refs["spatial"]["mesh_wrapper_id"]) except Exception as e: raise Exception( "No Mesh file set. Choose one in the Mesh tab of the Model Editor" ) try: meshFileObj = fileserver.FileManager.getFile( self, meshWrapperDb.meshFileId) mesh_filename = meshFileObj["storePath"] except IOError as e: #blowup here, need a mesh #self.response.write(json.dumps({"status" : False, # "msg" : "No Mesh file given"})) #return raise Exception("Mesh file inaccessible. Try another mesh") #TODO: if we get advanced options, we don't need a mesh reaction_subdomain_assigments = json_model_refs["spatial"][ "reactions_subdomain_assignments"] #e.g. {'R1':[1,2,3]} species_subdomain_assigments = json_model_refs["spatial"][ "species_subdomain_assignments"] #e.g. {'S1':[1,2,3]} species_diffusion_coefficients = json_model_refs["spatial"][ "species_diffusion_coefficients"] #e.g. {'S1':0.5} initial_conditions = json_model_refs["spatial"][ "initial_conditions"] #e.g. { ic0 : { type : "place", species : "S0", x : 5.0, y : 10.0, z : 1.0, count : 5000 }, ic1 : { type : "scatter",species : "S0", subdomain : 1, count : 100 }, ic2 : { type : "distribute",species : "S0", subdomain : 2, count : 100 } } for species in stochkit_model_obj.listOfSpecies: if species not in species_diffusion_coefficients: raise Exception( "Species '{0}' does not have a diffusion coefficient set. Please do that in the Species tab of the Model Editor" .format(species)) simulation_end_time = data['time'] simulation_time_increment = data['increment'] simulation_algorithm = data[ 'algorithm'] # Don't trust this! I haven't implemented the algorithm selection for this yet simulation_exec_type = data[ 'execType'] # This should contain 'spatial' -- Not that you really need it, only spatial requests will be routed here simulation_realizations = data['realizations'] simulation_seed = data[ 'seed'] # If this is set to -1, it means choose a seed at random! (Whatever that means) #### Construct the PyURDME object from the Stockkit model and mesh and other inputs try: # model pymodel = pyurdme.URDMEModel(name=stochkit_model_obj.name) # mesh pymodel.mesh = pyurdme.URDMEMesh.read_dolfin_mesh( str(mesh_filename)) # timespan pymodel.timespan( numpy.arange(0, simulation_end_time + simulation_time_increment, simulation_time_increment)) # subdomains if len(meshWrapperDb.subdomains) > 0: pymodel.set_subdomain_vector( numpy.array(meshWrapperDb.subdomains)) # species for s in stochkit_model_obj.listOfSpecies: pymodel.add_species( pyurdme.Species(name=s, diffusion_constant=float( species_diffusion_coefficients[s]))) # species subdomain restriction for s, sd_list in species_subdomain_assigments.iteritems(): spec = pymodel.listOfSpecies[s] pymodel.restrict(spec, sd_list) # parameters for p_name, p in stochkit_model_obj.listOfParameters.iteritems(): pymodel.add_parameter( pyurdme.Parameter(name=p_name, expression=p.expression)) # reactions for r_name, r in stochkit_model_obj.listOfReactions.iteritems(): if r.massaction: pymodel.add_reaction( pyurdme.Reaction(name=r_name, reactants=r.reactants, products=r.products, rate=r.marate, massaction=True)) else: pymodel.add_reaction( pyurdme.Reaction( name=r_name, reactants=r.reactants, products=r.products, propensity_function=r.propensity_function)) # reaction subdomain restrictions for r in reaction_subdomain_assigments: pymodel.listOfReactions[ r].restrict_to = reaction_subdomain_assigments[r] # Initial Conditions # initial_conditions = json_model_refs["spatial"]["initial_conditions"] #e.g. { ic0 : { type : "place", species : "S0", x : 5.0, y : 10.0, z : 1.0, count : 5000 }, ic1 : { type : "scatter",species : "S0", subdomain : 1, count : 100 }, ic2 : { type : "distribute",species : "S0", subdomain : 2, count : 100 } } for ic in initial_conditions: spec = pymodel.listOfSpecies[ic['species']] if ic['type'] == "place": pymodel.set_initial_condition_place_near( {spec: int(ic['count'])}, point=[float(ic['x']), float(ic['y']), float(ic['z'])]) elif ic['type'] == "scatter": pymodel.set_initial_condition_scatter( {spec: int(ic['count'])}, subdomains=[int(ic['subdomain'])]) elif ic['type'] == "distribute": pymodel.set_initial_condition_distribute_uniformly( {spec: int(ic['count'])}, subdomains=[int(ic['subdomain'])]) else: #self.response.write(json.dumps({"status" : False, # "msg" : "Unknown initial condition type {0}".format(ic['type'])})) #return raise Exception( "Unknown initial condition type {0}".format( ic['type'])) except Exception as e: raise Exception("Error while assembling the model: {0}".format(e)) return pymodel
def runMolns(self, params): """ Submit a remote molns StochKit job """ modelDb = StochKitModelWrapper.get_by_id(params["id"]) sys.stderr.write("*"*80 + "\n") sys.stderr.write("*"*80 + "\n") sys.stderr.write("runMolns\n") logging.info('runMolns') sys.stderr.write("*"*80 + "\n") sys.stderr.write("*"*80 + "\n") if not modelDb: return {'status':False, 'msg':'Failed to retrive the model to simulate.'} model = modelDb.createStochKitModel() # Execute as concentration or population? execType = params['execType'].lower() if execType not in ["deterministic", "stochastic", "sensitivity"]: raise Exception('exec_type must be deterministic, sensitivity, or stochastic. Found "{0}"'.format(execType)) if model.units.lower() == 'concentration' and execType.lower() == 'stochastic': raise Exception('Concentration models cannot be executed stochastically') # Assemble the argument list args = '' args += ' -t {0} '.format(params['time']) num_output_points = int(float(params['time'])/float(params['increment'])) args += ' -i {0} '.format(num_output_points) path = os.path.abspath(os.path.dirname(__file__)) # Algorithm, SSA or Tau-leaping? if params['execType'] != 'deterministic': executable = "/usr/local/StochKit/{0}".format(params['algorithm']) args += ' --realizations {0} '.format(params['realizations']) args += ' --keep-trajectories ' if int(params['seed']) < 0: random.seed() params['seed'] = random.randint(0, 2147483647) args += '--seed {0} '.format(params['seed']) else: executable = "/usr/local/ode-1.0.2/stochkit_ode.py" # Columns need to be labeled for visulatization page to work. args += ' --label' cmd = executable + ' ' + args basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') # Wow, what a hack if params['execType'] == 'deterministic' and model.units.lower() == 'population': document = model.serialize() model = StochMLDocument.fromString(document).toModel(model.name) for reactionN in model.getAllReactions(): reaction = model.getAllReactions()[reactionN] if reaction.massaction: if len(reaction.reactants) == 1 and reaction.reactants.values()[0] == 2: reaction.marate.setExpression(reaction.marate.expression + ' / 2') modelFileName = '{0}/{1}.xml'.format(dataDir, model.name) with open(modelFileName, 'w') as fmodelHandle: fmodelHandle.write(model.serialize()) cmd += ' -m {0} --out-dir ./result'.format(os.path.basename(modelFileName)) sys.stderr.write('*'*80+"\n") logging.error("cmd =\n{}".format(cmd)) sys.stderr.write('simulation.runMolns(): cmd={0}\n'.format(cmd)) with tempfile.NamedTemporaryFile() as exec_file: exec_file.write(cmd+"\n") exec_file.write("tar -czf result.tar.gz result") exec_file.flush() controllerName = 'EC2_controller' #TODO: look this up exec_str = "bash {0} {1}".format(exec_file.name, modelFileName) sys.stderr.write("result = molns.MOLNSExec.start_job(['{0}', '{1}])".format(controllerName, exec_str)) sys.stderr.write('*'*80+"\n") molnsConfigDb = db.GqlQuery("SELECT * FROM MolnsConfigWrapper WHERE user_id = :1", self.user.user_id()).get() if not molnsConfigDb: raise Exception("Molns not initialized") config = molns.MOLNSConfig(config_dir=molnsConfigDb.folder) result = molns.MOLNSExec.start_job([controllerName, exec_str], config) sys.stderr.write('result = {0}'.format(result)) sys.stderr.write('*'*80+"\n") # Create a wrapper to store the Job description in the datastore # Create a StochKitJob instance job = StochKitJobWrapper() job.resource = 'Molns' job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = params['jobName'] job.modelName = model.name #job.pid = None job.pid = result['id'] job.indata = json.dumps( { "type" : 'StochKit2 Ensemble', "final_time" : params['time'], "realizations" : params['realizations'], "increment" : params['increment'], "seed" : params['seed'], "exec_type" : params['execType'], "units" : model.units.lower(), "epsilon" : params['epsilon'], "threshold" : params['threshold'] } ) job.outData = dataDir job.status = 'Running' job.put() return job
def runMolns(self, params): """ Submit a remote molns StochKit job """ modelDb = StochKitModelWrapper.get_by_id(params["id"]) sys.stderr.write("*" * 80 + "\n") sys.stderr.write("*" * 80 + "\n") sys.stderr.write("runMolns\n") logging.info('runMolns') sys.stderr.write("*" * 80 + "\n") sys.stderr.write("*" * 80 + "\n") if not modelDb: return { 'status': False, 'msg': 'Failed to retrive the model to simulate.' } model = modelDb.createStochKitModel() # Execute as concentration or population? execType = params['execType'].lower() if execType not in ["deterministic", "stochastic", "sensitivity"]: raise Exception( 'exec_type must be deterministic, sensitivity, or stochastic. Found "{0}"' .format(execType)) if model.units.lower() == 'concentration' and execType.lower( ) == 'stochastic': raise Exception( 'Concentration models cannot be executed stochastically') # Assemble the argument list args = '' args += ' -t {0} '.format(params['time']) num_output_points = int( float(params['time']) / float(params['increment'])) args += ' -i {0} '.format(num_output_points) path = os.path.abspath(os.path.dirname(__file__)) # Algorithm, SSA or Tau-leaping? if params['execType'] != 'deterministic': executable = "/usr/local/StochKit/{0}".format(params['algorithm']) args += ' --realizations {0} '.format(params['realizations']) args += ' --keep-trajectories ' if int(params['seed']) < 0: random.seed() params['seed'] = random.randint(0, 2147483647) args += '--seed {0} '.format(params['seed']) else: executable = "/usr/local/ode-1.0.2/stochkit_ode.py" # Columns need to be labeled for visulatization page to work. args += ' --label' cmd = executable + ' ' + args basedir = path + '/../' dataDir = tempfile.mkdtemp(dir=basedir + 'output') # Wow, what a hack if params['execType'] == 'deterministic' and model.units.lower( ) == 'population': document = model.serialize() model = StochMLDocument.fromString(document).toModel(model.name) for reactionN in model.getAllReactions(): reaction = model.getAllReactions()[reactionN] if reaction.massaction: if len(reaction.reactants ) == 1 and reaction.reactants.values()[0] == 2: reaction.marate.setExpression( reaction.marate.expression + ' / 2') modelFileName = '{0}/{1}.xml'.format(dataDir, model.name) with open(modelFileName, 'w') as fmodelHandle: fmodelHandle.write(model.serialize()) cmd += ' -m {0} --out-dir ./result'.format( os.path.basename(modelFileName)) sys.stderr.write('*' * 80 + "\n") logging.error("cmd =\n{}".format(cmd)) sys.stderr.write('simulation.runMolns(): cmd={0}\n'.format(cmd)) with tempfile.NamedTemporaryFile() as exec_file: exec_file.write(cmd + "\n") exec_file.write("tar -czf result.tar.gz result") exec_file.flush() controllerName = 'EC2_controller' #TODO: look this up exec_str = "bash {0} {1}".format(exec_file.name, modelFileName) sys.stderr.write( "result = molns.MOLNSExec.start_job(['{0}', '{1}])".format( controllerName, exec_str)) sys.stderr.write('*' * 80 + "\n") molnsConfigDb = db.GqlQuery( "SELECT * FROM MolnsConfigWrapper WHERE user_id = :1", self.user.user_id()).get() if not molnsConfigDb: raise Exception("Molns not initialized") config = molns.MOLNSConfig(config_dir=molnsConfigDb.folder) result = molns.MOLNSExec.start_job([controllerName, exec_str], config) sys.stderr.write('result = {0}'.format(result)) sys.stderr.write('*' * 80 + "\n") # Create a wrapper to store the Job description in the datastore # Create a StochKitJob instance job = StochKitJobWrapper() job.resource = 'Molns' job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = params['jobName'] job.modelName = model.name #job.pid = None job.pid = result['id'] job.indata = json.dumps({ "type": 'StochKit2 Ensemble', "final_time": params['time'], "realizations": params['realizations'], "increment": params['increment'], "seed": params['seed'], "exec_type": params['execType'], "units": model.units.lower(), "epsilon": params['epsilon'], "threshold": params['threshold'] }) job.outData = dataDir job.status = 'Running' job.put() return job
def runCloud(self, params): model = StochKitModelWrapper.get_by_id( params["id"]).createStochKitModel() if not model: raise Exception( 'Failed to retrive the model \'{0}\' to simulate'.format( params["id"])) #the parameter dictionary to be passed to the backend param = {} # Execute as concentration or population? exec_type = params['execType'].lower() if exec_type not in ["deterministic", "stochastic"]: raise Exception( 'exec_type must be concentration or population. Found \'{0}\''. format(exec_type)) if model.units.lower() == 'concentration' and exec_type.lower( ) == 'stochastic': raise Exception( 'Concentration models cannot be executed Stochastically') executable = exec_type.lower() document = model.serialize() # Wow, what a hack if executable == 'deterministic' and model.units.lower( ) == 'population': model = StochMLDocument.fromString(document).toModel(model.name) for reactionN in model.getAllReactions(): reaction = model.getAllReactions()[reactionN] if reaction.massaction: if len(reaction.reactants ) == 1 and reaction.reactants.values()[0] == 2: reaction.marate.setExpression( reaction.marate.expression + ' / 2') document = model.serialize() params['document'] = str(document) filepath = "" params['file'] = filepath ensemblename = params['jobName'] stime = params['time'] realizations = params['realizations'] increment = params['increment'] if int(params['seed']) < 0: random.seed() params['seed'] = random.randint(0, 2147483647) seed = params['seed'] # Assemble the argument list args = '' args += ' -t ' args += str(stime) num_output_points = str(int(float(stime) / float(increment))) args += ' -i ' + str(num_output_points) path = os.path.dirname(__file__) # Algorithm, SSA or Tau-leaping? if executable != 'deterministic': params['job_type'] = 'stochkit' executable = params['algorithm'] args += ' --realizations ' args += str(realizations) # We keep all the trajectories by default. The user can select to only store means and variance # through the advanced options. if not "only-moments" in params: args += ' --keep-trajectories' if "keep-histograms" in params: args += ' --keep-histograms' args += ' --seed ' args += str(seed) else: params['job_type'] = 'stochkit_ode' executable = "stochkit_ode.py" # Columns need to be labeled for visulatization page to work. args += ' --label' cmd = executable + ' ' + args params['paramstring'] = cmd bucketname = self.user_data.getBucketName() params['bucketname'] = bucketname params['user_id'] = self.user.user_id() # Call backendservices and execute StochKit service = backendservices(self.user_data) cloud_result = service.submit_cloud_task(params) if not cloud_result["success"]: e = cloud_result["exception"] raise Exception('Cloud execution failed: {0}'.format(e)) celery_task_id = cloud_result["celery_pid"] taskid = cloud_result["db_id"] # Create a StochKitJob instance job = StochKitJobWrapper() job.resource = cloud_result['resource'] # stochkit_job.uuid = res['uuid'] job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = params['jobName'] job.modelName = model.name #job.pid = taskid job.celeryPID = celery_task_id job.cloudDatabaseID = taskid # Create a StochKitJob instance job.indata = json.dumps({ "type": 'StochKit2 Ensemble', "final_time": params['time'], "realizations": params['realizations'], "increment": params['increment'], "seed": params['seed'], "exec_type": params['execType'], "units": model.units.lower(), "epsilon": params['epsilon'], "rTol": params['rTol'], "aTol": params['aTol'], "mxSteps": params['mxSteps'], "threshold": params['threshold'] }) job.output_stored = 'True' job.outData = None #job.stdout = '{0}/stdout'.format(dataDir) #job.stderr = '{0}/stderr'.format(dataDir) job.status = 'Running' job.put() return job
def runQsub(self, data, cluster_info): logging.error("*"*80) logging.error("parametersweep.runQsub() modelType={0}".format(data['modelType'])) logging.error("*"*80) modelDb = StochKitModelWrapper.get_by_id(data["modelID"]) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') job = ParameterSweepJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.inData = json.dumps(data) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" job.output_stored = False try: templateData = { "name" : modelDb.name, "modelType" : modelDb.type, "species" : modelDb.species, "parameters" : modelDb.parameters, "reactions" : modelDb.reactions, "speciesSelect" : data['speciesSelect'], "maxTime" : data['maxTime'], "increment" : data['increment'], "trajectories" : data['trajectories'], "seed" : data['seed'], "parameterA" : data['parameterA'], "minValueA" : data['minValueA'], "maxValueA" : data['maxValueA'], "stepsA" : data['stepsA'], "logA" : data['logA'], "parameterB" : data['parameterB'], "minValueB" : data['minValueB'], "maxValueB" : data['maxValueB'], "stepsB" : data['stepsB'], "logB" : data['logB'], "variableCount" : data['variableCount'], "isSpatial" : modelDb.isSpatial, "isLocal" : True } if modelDb.isSpatial: try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id(modelDb.spatial["mesh_wrapper_id"]) except Exception as e: logging.exception(e) logging.error("No Mesh file set. Choose one in the Mesh tab of the Model Editor") raise Exception("No Mesh file set. Choose one in the Mesh tab of the Model Editor") try: meshFileObj = fileserver.FileManager.getFile(self, meshWrapperDb.meshFileId, noFile = False) templateData["mesh"] = meshFileObj["data"] except IOError as e: logging.exception(e) logging.error("Mesh file inaccessible. Try another mesh") raise Exception("Mesh file inaccessible. Try another mesh") templateData['reaction_subdomain_assignments'] = modelDb.spatial["reactions_subdomain_assignments"] templateData['species_subdomain_assignments'] = modelDb.spatial["species_subdomain_assignments"] templateData['species_diffusion_coefficients'] = modelDb.spatial["species_diffusion_coefficients"] templateData['initial_conditions'] = modelDb.spatial["initial_conditions"] templateData['subdomains'] = meshWrapperDb.subdomains if data['modelType'] == "stochastic": job.qsubHandle = pickle.dumps(parametersweep_qsub.stochastic(templateData, cluster_info)) elif data['modelType'] == "deterministic": job.qsubHandle = pickle.dumps(parametersweep_qsub.deterministic(templateData, cluster_info)) elif data['modelType'] == "spatial": job.qsubHandle = pickle.dumps(parametersweep_qsub.spatial(templateData, cluster_info)) else: raise Exception("Trying to runQsub on unsupported modelType {0}".format(data['modelType'])) job.resource = "qsub" job.put() except Exception as e: logging.exception(e) job.status='Failed' job.delete(self) raise return job
def runMolns(self, data): modelDb = StochKitModelWrapper.get_by_id(data["modelID"]) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') job = ParameterSweepJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.inData = json.dumps(data) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" job.output_stored = False # # execute cloud task try: with open(os.path.join(path, 'parametersweep_template.py'), 'r') as f: template = f.read() templateData = { "name" : modelDb.name, "modelType" : modelDb.type, "species" : modelDb.species, "parameters" : modelDb.parameters, "reactions" : modelDb.reactions, "speciesSelect" : data['speciesSelect'], "maxTime" : data['maxTime'], "increment" : data['increment'], "trajectories" : data['trajectories'], "seed" : data['seed'], "parameterA" : data['parameterA'], "minValueA" : data['minValueA'], "maxValueA" : data['maxValueA'], "stepsA" : data['stepsA'], "logA" : data['logA'], "parameterB" : data['parameterB'], "minValueB" : data['minValueB'], "maxValueB" : data['maxValueB'], "stepsB" : data['stepsB'], "logB" : data['logB'], "variableCount" : data['variableCount'], "isSpatial" : modelDb.isSpatial, "isLocal" : False } if modelDb.isSpatial: try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id(modelDb.spatial["mesh_wrapper_id"]) except Exception as e: raise Exception("No Mesh file set. Choose one in the Mesh tab of the Model Editor") try: meshFileObj = fileserver.FileManager.getFile(self, meshWrapperDb.meshFileId, noFile = False) templateData["mesh"] = meshFileObj["data"] except IOError as e: raise Exception("Mesh file inaccessible. Try another mesh") templateData['reaction_subdomain_assignments'] = modelDb.spatial["reactions_subdomain_assignments"] templateData['species_subdomain_assignments'] = modelDb.spatial["species_subdomain_assignments"] templateData['species_diffusion_coefficients'] = modelDb.spatial["species_diffusion_coefficients"] templateData['initial_conditions'] = modelDb.spatial["initial_conditions"] templateData['subdomains'] = meshWrapperDb.subdomains program = os.path.join(dataDir, 'program.py') with open(program, 'w') as f: jsonString = json.dumps(templateData, indent = 4, sort_keys = True) # We've got to double escape the strings here cause of how we're substituting the JSON data in a source file jsonString = jsonString.replace('\\', '\\\\') f.write(template.replace('___JSON_STRING___', jsonString)) molnsConfigDb = db.GqlQuery("SELECT * FROM MolnsConfigWrapper WHERE user_id = :1", self.user.user_id()).get() if not molnsConfigDb: raise Exception("Molns not initialized") config = molns.MOLNSConfig(config_dir=molnsConfigDb.folder) result = molns.MOLNSExec.start_job(['EC2_controller', "python {0}".format(program)], config) job.resource = "molns" job.molnsPID = result['id'] job.put() except Exception as e: job.status='Failed' job.delete(self) raise return job
def runLocal(self, data): self.user_data.set_selected(0) logging.error("*"*80) logging.error("parametersweep.runLocal() modelType={0}".format(data['modelType'])) logging.error("*"*80) modelDb = StochKitModelWrapper.get_by_id(data["modelID"]) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') job = ParameterSweepJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.inData = json.dumps(data) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" job.output_stored = False # # execute local task try: template_filename = 'parametersweep_template_{0}.py'.format(data['modelType']) logging.error("parametersweep.runLocal() template_filename={0}".format(template_filename)) logging.error("*"*80) with open(os.path.join(path,template_filename ), 'r') as f: template = f.read() templateData = { "name" : modelDb.name, "modelType" : modelDb.type, "species" : modelDb.species, "parameters" : modelDb.parameters, "reactions" : modelDb.reactions, "speciesSelect" : data['speciesSelect'], "maxTime" : data['maxTime'], "increment" : data['increment'], "trajectories" : data['trajectories'], "seed" : data['seed'], "parameterA" : data['parameterA'], "minValueA" : data['minValueA'], "maxValueA" : data['maxValueA'], "stepsA" : data['stepsA'], "logA" : data['logA'], "parameterB" : data['parameterB'], "minValueB" : data['minValueB'], "maxValueB" : data['maxValueB'], "stepsB" : data['stepsB'], "logB" : data['logB'], "variableCount" : data['variableCount'], "isSpatial" : modelDb.isSpatial, "isLocal" : True } if modelDb.isSpatial: try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id(modelDb.spatial["mesh_wrapper_id"]) except Exception as e: logging.exception(e) logging.error("No Mesh file set. Choose one in the Mesh tab of the Model Editor") raise Exception("No Mesh file set. Choose one in the Mesh tab of the Model Editor") try: meshFileObj = fileserver.FileManager.getFile(self, meshWrapperDb.meshFileId, noFile = False) templateData["mesh"] = meshFileObj["data"] except IOError as e: logging.exception(e) logging.error("Mesh file inaccessible. Try another mesh") raise Exception("Mesh file inaccessible. Try another mesh") templateData['reaction_subdomain_assignments'] = modelDb.spatial["reactions_subdomain_assignments"] templateData['species_subdomain_assignments'] = modelDb.spatial["species_subdomain_assignments"] templateData['species_diffusion_coefficients'] = modelDb.spatial["species_diffusion_coefficients"] templateData['initial_conditions'] = modelDb.spatial["initial_conditions"] templateData['subdomains'] = meshWrapperDb.subdomains program = os.path.join(dataDir, 'stochss_parametersweep_program.py') with open(program, 'w') as f: jsonString = json.dumps(templateData, indent = 4, sort_keys = True) # We've got to double escape the strings here cause of how we're substituting the JSON data in a source file jsonString = jsonString.replace('\\', '\\\\') f.write(template.replace('___JSON_STRING___', jsonString)) #? molnsConfigDb = db.GqlQuery("SELECT * FROM MolnsConfigWrapper WHERE user_id = :1", self.user.user_id()).get() #? if not molnsConfigDb: #? raise Exception("Molns not initialized") #? #? config = molns.MOLNSConfig(config_dir=molnsConfigDb.folder) #? result = molns.MOLNSExec.start_job(['EC2_controller', "python {0}".format(program)], config) cmd = "python {0}".format(program) logging.info('parametersweep.runLocal(): cmd={0}'.format(cmd)) logging.info('*'*80) exstring = '{0}/backend/wrapper.py {1}/stdout {1}/stderr {1}/return_code {2}'.format(basedir, dataDir, cmd) logging.info('parametersweep.runLocal(): exstring={0}'.format(exstring)) logging.info('*'*80) handle = subprocess.Popen(exstring.split(), preexec_fn=os.setsid) job.pid = handle.pid logging.info("parametersweep.runLocal() job started pid={0}".format(job.pid)) logging.info('*'*80) job.resource = "local" job.put() except Exception as e: logging.exception(e) job.status='Failed' job.delete(self) raise return job
def runStochKitLocal(self, params): """ Submit a local StochKit job """ try: model = StochKitModelWrapper.get_by_id(params["id"]) if not model: return {"status": False, "msg": "Failed to retrive the model to simulate."} model = model.model # Execute as concentration or population? execType = params["execType"] if not (execType == "deterministic" or execType == "stochastic" or execType == "sensitivity"): result = { "status": False, "msg": "exec_type must be deterministic, sensitivity, or stochastic. Try refreshing page, or e-mail developers", } return result if model.units.lower() == "concentration" and execType.lower() == "stochastic": result = { "status": False, "msg": "GUI Error: Concentration models cannot be executed Stochastically. Try leaving and returning to this page", } return result executable = execType.lower() # Assemble the argument list args = "" args += " -t {0} ".format(params["time"]) num_output_points = int(float(params["time"]) / float(params["increment"])) args += " -i {0} ".format(num_output_points) path = os.path.abspath(os.path.dirname(__file__)) # Algorithm, SSA or Tau-leaping? if params["execType"] != "deterministic": executable = "{0}/../../StochKit/{1}".format(path, params["algorithm"]) args += " --realizations {0} ".format(params["realizations"]) args += " --keep-trajectories --seed {0} ".format(params["seed"]) else: executable = "{0}/../../ode/stochkit_ode.py".format(path) # Columns need to be labeled for visulatization page to work. args += " --label" cmd = executable + " " + args basedir = path + "/../" dataDir = tempfile.mkdtemp(dir=basedir + "output") # Wow, what a hack if executable == "deterministic" and model.units.lower() == "population": document = model.serialize() model = StochMLDocument.fromString(document).toModel(model.name) for reactionN in model.getAllReactions(): reaction = model.getAllReactions()[reactionN] if reaction.massaction: if len(reaction.reactants) == 1 and reaction.reactants.values()[0] == 2: reaction.marate.setExpression(reaction.marate.expression + " / 2") modelFileName = "{0}/{1}.xml".format(dataDir, model.name) fmodelHandle = open(modelFileName, "w") fmodelHandle.write(model.serialize()) fmodelHandle.close() cmd += " -m {0} --out-dir {1}/result".format(modelFileName, dataDir) print cmd # ode = "{0}/../../ode/stochkit_ode.py {1}".format(path, args) exstring = "{0}/backend/wrapper.sh {1}/stdout {1}/stderr {2}".format(basedir, dataDir, cmd) handle = subprocess.Popen(exstring.split()) # Create a StochKitJob instance stochkit_job = StochKitJob( name=params["jobName"], final_time=params["time"], realizations=params["realizations"], increment=params["increment"], seed=params["seed"], exec_type=params["execType"], units=model.units.lower(), ) stochkit_job.resource = "Local" stochkit_job.type = "StochKit2 Ensemble" stochkit_job.pid = handle.pid stochkit_job.output_location = dataDir # stochkit_job.uuid = res['uuid'] stochkit_job.status = "Running" stochkit_job.stdout = "{0}/stdout".format(dataDir) stochkit_job.stderr = "{0}/stderr".format(dataDir) # Create a wrapper to store the Job description in the datastore stochkit_job_db = StochKitJobWrapper() stochkit_job_db.user_id = self.user.user_id() stochkit_job_db.startDate = time.strftime("%Y-%m-%d-%H-%M-%S") stochkit_job_db.name = stochkit_job.name stochkit_job_db.stochkit_job = stochkit_job stochkit_job_db.stdout = stochkit_job.stdout stochkit_job_db.stderr = stochkit_job.stderr stochkit_job_db.put() result = {"status": True, "msg": "Job submitted sucessfully"} except None: # Exception,e: raise e # result = {'status':False,'msg':'Local execution failed: '+str(e)} return result
def runCloud(self, params): try: model = StochKitModelWrapper.get_by_id(params["id"]).model if not model: return {"status": False, "msg": "Failed to retrive the model to simulate."} db_credentials = self.user_data.getCredentials() # Set the environmental variables os.environ["AWS_ACCESS_KEY_ID"] = db_credentials["EC2_ACCESS_KEY"] os.environ["AWS_SECRET_ACCESS_KEY"] = db_credentials["EC2_SECRET_KEY"] if os.environ["AWS_ACCESS_KEY_ID"] == "": result = {"status": False, "msg": "Access Key not set. Check : Settings > Cloud Computing"} return result if os.environ["AWS_SECRET_ACCESS_KEY"] == "": result = {"status": False, "msg": "Secret Key not set. Check : Settings > Cloud Computing"} return result # the parameter dictionary to be passed to the backend param = {} # Execute as concentration or population? exec_type = params["execType"] if not (exec_type == "deterministic" or exec_type == "stochastic"): result = { "status": False, "msg": "exec_type must be concentration or population. Try refreshing page, or e-mail developers", } return result if model.units.lower() == "concentration" and exec_type.lower() == "stochastic": result = { "status": False, "msg": "GUI Error: Concentration models cannot be executed Stochastically. Try leaving and returning to this page", } return result executable = exec_type.lower() document = model.serialize() if executable == "deterministic" and model.units.lower() == "population": model = StochMLDocument.fromString(document).toModel(model.name) for reactionN in model.getAllReactions(): reaction = model.getAllReactions()[reactionN] if reaction.massaction: if len(reaction.reactants) == 1 and reaction.reactants.values()[0] == 2: reaction.marate.setExpression(reaction.marate.expression + " / 2") document = model.serialize() params["document"] = str(document) filepath = "" params["file"] = filepath ensemblename = params["jobName"] stime = params["time"] realizations = params["realizations"] increment = params["increment"] seed = params["seed"] # Assemble the argument list args = "" args += " -t " args += str(stime) num_output_points = str(int(float(stime) / float(increment))) args += " -i " + str(num_output_points) path = os.path.dirname(__file__) # Algorithm, SSA or Tau-leaping? if executable != "deterministic": params["job_type"] = "stochkit" executable = params["algorithm"] args += " --realizations " args += str(realizations) # We keep all the trajectories by default. The user can select to only store means and variance # through the advanced options. if not "only-moments" in params: args += " --keep-trajectories" if "keep-histograms" in params: args += " --keep-histograms" args += " --seed " args += str(seed) else: params["job_type"] = "stochkit_ode" executable = "stochkit_ode.py" print executable # Columns need to be labeled for visulatization page to work. args += " --label" cmd = executable + " " + args params["paramstring"] = cmd bucketname = self.user_data.getBucketName() params["bucketname"] = bucketname # Call backendservices and execute StochKit service = backendservices() celery_task_id, taskid = service.executeTask(params) if celery_task_id == None: result = {"status": False, "msg": "Cloud execution failed. "} return result # Create a StochKitJob instance stochkit_job = StochKitJob( name=ensemblename, final_time=stime, realizations=realizations, increment=increment, seed=seed, exec_type=exec_type, units=model.units.lower(), ) stochkit_job.resource = "Cloud" stochkit_job.type = "StochKit2 Ensemble" # The jobs pid is the DB/S3 ID. stochkit_job.pid = taskid # The celery_pid is the Celery Task ID. stochkit_job.celery_pid = celery_task_id stochkit_job.status = "Running" stochkit_job.output_location = None # stochkit_job.output_location = 'output/%s' % taskid # stochkit_job.stdout = stochkit_job.output_location + '/stdout.log' # stochkit_job.stderr = stochkit_job.output_location + '/stderr.log' # Create a wrapper to store the Job description in the datastore stochkit_job_db = StochKitJobWrapper() stochkit_job_db.startDate = time.strftime("%Y-%m-%d-%H-%M-%S") stochkit_job_db.user_id = self.user.user_id() stochkit_job_db.name = stochkit_job.name stochkit_job_db.stochkit_job = stochkit_job stochkit_job_db.put() result = {"status": True, "msg": "Job submitted sucessfully."} except Exception, e: result = {"status": False, "msg": "Cloud execution failed: " + str(e)}
def runStochKitLocal(self, params): """ Submit a local StochKit job """ modelDb = StochKitModelWrapper.get_by_id(params["id"]) if not modelDb: return { 'status': False, 'msg': 'Failed to retrive the model to simulate.' } model = modelDb.createStochKitModel() # Execute as concentration or population? execType = params['execType'].lower() if execType not in ["deterministic", "stochastic", "sensitivity"]: raise Exception( 'exec_type must be deterministic, sensitivity, or stochastic. Found "{0}"' .format(execType)) if model.units.lower() == 'concentration' and execType.lower( ) == 'stochastic': raise Exception( 'Concentration models cannot be executed stochastically') # Assemble the argument list args = '' args += ' -t {0} '.format(params['time']) num_output_points = int( float(params['time']) / float(params['increment'])) args += ' -i {0} '.format(num_output_points) path = os.path.abspath(os.path.dirname(__file__)) # Algorithm, SSA or Tau-leaping? if params['execType'] != 'deterministic': executable = "{0}/../../StochKit/{1}".format( path, params['algorithm']) args += ' --realizations {0} '.format(params['realizations']) args += ' --keep-trajectories ' if int(params['seed']) < 0: random.seed() params['seed'] = random.randint(0, 2147483647) args += '--seed {0} '.format(params['seed']) else: executable = "{0}/../../ode/stochkit_ode.py".format(path) # Columns need to be labeled for visulatization page to work. args += ' --label' cmd = executable + ' ' + args basedir = path + '/../' dataDir = tempfile.mkdtemp(dir=basedir + 'output') # Wow, what a hack if params['execType'] == 'deterministic' and model.units.lower( ) == 'population': document = model.serialize() model = StochMLDocument.fromString(document).toModel(model.name) for reactionN in model.getAllReactions(): reaction = model.getAllReactions()[reactionN] if reaction.massaction: if len(reaction.reactants ) == 1 and reaction.reactants.values()[0] == 2: reaction.marate.setExpression( reaction.marate.expression + ' / 2') modelFileName = '{0}/{1}.xml'.format(dataDir, model.name) fmodelHandle = open(modelFileName, 'w') fmodelHandle.write(model.serialize()) fmodelHandle.close() cmd += ' -m {0} --out-dir {1}/result'.format(modelFileName, dataDir) if params['execType'] == 'deterministic': cmd += ' -r {0} -a {1} --mxsteps {2}'.format( params['rTol'], params['aTol'], params['mxSteps']) logging.info("cmd =\n{}".format(cmd)) logging.debug('simulation.runLocal(): cmd={0}'.format(cmd)) logging.debug('*' * 80) logging.debug('*' * 80) #ode = "{0}/../../ode/stochkit_ode.py {1}".format(path, args) exstring = '{0}/backend/wrapper.py {1}/stdout {1}/stderr {1}/return_code {2}'.format( basedir, dataDir, cmd) logging.debug('simulation.runLocal(): exstring={0}'.format(exstring)) logging.debug('*' * 80) logging.debug('*' * 80) handle = subprocess.Popen(exstring.split(), preexec_fn=os.setsid) # Create a wrapper to store the Job description in the datastore job = StochKitJobWrapper() job.resource = 'local' # stochkit_job.uuid = res['uuid'] job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = params['jobName'] job.modelName = model.name job.pid = handle.pid # Create a StochKitJob instance job.indata = json.dumps({ "type": 'StochKit2 Ensemble', "final_time": params['time'], "realizations": params['realizations'], "increment": params['increment'], "seed": params['seed'], "exec_type": params['execType'], "units": model.units.lower(), "epsilon": params['epsilon'], "rTol": params['rTol'], "aTol": params['aTol'], "mxSteps": params['mxSteps'], "threshold": params['threshold'] }) job.outData = dataDir job.stdout = '{0}/stdout'.format(dataDir) job.stderr = '{0}/stderr'.format(dataDir) job.status = 'Running' job.put() return job
def runCloud(self, params): model = StochKitModelWrapper.get_by_id(params["id"]).createStochKitModel() if not model: raise Exception('Failed to retrive the model \'{0}\' to simulate'.format(params["id"])) #the parameter dictionary to be passed to the backend param = {} # Execute as concentration or population? exec_type = params['execType'].lower() if exec_type not in ["deterministic", "stochastic"]: raise Exception('exec_type must be concentration or population. Found \'{0}\''.format(exec_type)) if model.units.lower() == 'concentration' and exec_type.lower() == 'stochastic': raise Exception('Concentration models cannot be executed Stochastically' ) executable = exec_type.lower() document = model.serialize() # Wow, what a hack if executable == 'deterministic' and model.units.lower() == 'population': model = StochMLDocument.fromString(document).toModel(model.name) for reactionN in model.getAllReactions(): reaction = model.getAllReactions()[reactionN] if reaction.massaction: if len(reaction.reactants) == 1 and reaction.reactants.values()[0] == 2: reaction.marate.setExpression(reaction.marate.expression + ' / 2') document = model.serialize() params['document']=str(document) filepath = "" params['file'] = filepath ensemblename = params['jobName'] stime = params['time'] realizations = params['realizations'] increment = params['increment'] if int(params['seed']) < 0: random.seed() params['seed'] = random.randint(0, 2147483647) seed = params['seed'] # Assemble the argument list args = '' args+=' -t ' args+=str(stime) num_output_points = str(int(float(stime)/float(increment))) args+=' -i ' + str(num_output_points) path = os.path.dirname(__file__) # Algorithm, SSA or Tau-leaping? if executable != 'deterministic': params['job_type'] = 'stochkit' executable = params['algorithm'] args+=' --realizations ' args+=str(realizations) # We keep all the trajectories by default. The user can select to only store means and variance # through the advanced options. if not "only-moments" in params: args+=' --keep-trajectories' if "keep-histograms" in params: args+=' --keep-histograms' args+=' --seed ' args+=str(seed) else: params['job_type'] = 'stochkit_ode' executable = "stochkit_ode.py" # Columns need to be labeled for visulatization page to work. args += ' --label' cmd = executable+' '+args params['paramstring'] = cmd bucketname = self.user_data.getBucketName() params['bucketname'] = bucketname params['user_id'] = self.user.user_id() # Call backendservices and execute StochKit service = backendservices(self.user_data) cloud_result = service.submit_cloud_task(params) if not cloud_result["success"]: e = cloud_result["exception"] raise Exception('Cloud execution failed: {0}'.format(e)) celery_task_id = cloud_result["celery_pid"] taskid = cloud_result["db_id"] # Create a StochKitJob instance job = StochKitJobWrapper() job.resource = cloud_result['resource'] # stochkit_job.uuid = res['uuid'] job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = params['jobName'] job.modelName = model.name #job.pid = taskid job.celeryPID = celery_task_id job.cloudDatabaseID = taskid # Create a StochKitJob instance job.indata = json.dumps({ "type" : 'StochKit2 Ensemble', "final_time" : params['time'], "realizations" : params['realizations'], "increment" : params['increment'], "seed" : params['seed'], "exec_type" : params['execType'], "units" : model.units.lower(), "epsilon" : params['epsilon'], "threshold" : params['threshold'] }) job.output_stored = 'True' job.outData = None #job.stdout = '{0}/stdout'.format(dataDir) #job.stderr = '{0}/stderr'.format(dataDir) job.status = 'Running' job.put() return job
def runQsub(self, data, cluster_info): logging.error("*" * 80) logging.error("simulate.runQsub() modelType={0}".format( data['execType'])) logging.error("*" * 80) modelDb = StochKitModelWrapper.get_by_id(int(data["id"])) path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir=basedir + 'output') job = SpatialJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] job.indata = json.dumps(data) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" job.output_stored = "False" job.is_spatial = True try: templateData = { "name": modelDb.name, "modelType": modelDb.type, "species": modelDb.species, "parameters": modelDb.parameters, "reactions": modelDb.reactions, # "speciesSelect": data['speciesSelect'], "speciesSelect": data['selections'], # "maxTime": data['maxTime'], "maxTime": data['time'], "increment": data['increment'], # "trajectories": data['trajectories'], "trajectories": data['realizations'], "seed": data['seed'], "isSpatial": modelDb.isSpatial, "isLocal": True } if modelDb.isSpatial: try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id( modelDb.spatial["mesh_wrapper_id"]) except Exception as e: logging.exception(e) logging.error( "No Mesh file set. Choose one in the Mesh tab of the Model Editor" ) raise Exception( "No Mesh file set. Choose one in the Mesh tab of the Model Editor" ) try: meshFileObj = fileserver.FileManager.getFile( self, meshWrapperDb.meshFileId, noFile=False) templateData["mesh"] = meshFileObj["data"] except IOError as e: logging.exception(e) logging.error("Mesh file inaccessible. Try another mesh") raise Exception("Mesh file inaccessible. Try another mesh") templateData[ 'reaction_subdomain_assignments'] = modelDb.spatial[ "reactions_subdomain_assignments"] templateData[ 'species_subdomain_assignments'] = modelDb.spatial[ "species_subdomain_assignments"] templateData[ 'species_diffusion_coefficients'] = modelDb.spatial[ "species_diffusion_coefficients"] templateData['initial_conditions'] = modelDb.spatial[ "initial_conditions"] templateData['subdomains'] = meshWrapperDb.subdomains if data['execType'] == "stochastic": job.qsubHandle = pickle.dumps( parametersweep_qsub.stochastic( templateData, cluster_info, not_full_parameter_sweep=True)) elif data['execType'] == "deterministic": job.qsubHandle = pickle.dumps( parametersweep_qsub.deterministic( templateData, cluster_info, not_full_parameter_sweep=True)) elif data['execType'] == "spatial": job.qsubHandle = pickle.dumps( parametersweep_qsub.spatial( templateData, cluster_info, not_full_parameter_sweep=True)) # else: raise Exception( "Trying to runQsub on unsupported modelType {0}".format( data['modelType'])) job.resource = "qsub" job.put() except Exception as e: logging.exception(e) job.status = 'Failed' #job.delete(self) raise return job
def runQsub(self, data, cluster_info): from db_models.parameter_sweep_job import ParameterSweepJobWrapper from modeleditor import StochKitModelWrapper import parametersweep_qsub logging.error("*" * 80) logging.error("simulate.runQsub() modelType={0}".format(data['execType'])) logging.error("*" * 80) modelDb = StochKitModelWrapper.get_by_id(int(data["id"])) # TODO: Ben needs to fix the following code to work directly with StochKitModelWrappers # model = StochKitModelWrapper.get_by_id(params["id"]).createStochKitModel() # # if not model: # raise Exception('Failed to retrive the model \'{0}\' to simulate'.format(params["id"])) # # # Execute as concentration or population? # exec_type = params['execType'].lower() # # if exec_type not in ["deterministic", "stochastic"]: # raise Exception('exec_type must be concentration or population. Found \'{0}\''.format(exec_type)) # # if model.units.lower() == 'concentration' and exec_type.lower() == 'stochastic': # raise Exception('Concentration models cannot be executed Stochastically' ) # # document = model.serialize() # # # Wow, what a hack # # if executable == 'deterministic' and model.units.lower() == 'population': # model = StochMLDocument.fromString(document).toModel(model.name) # # for reactionN in model.getAllReactions(): # reaction = model.getAllReactions()[reactionN] # if reaction.massaction: # if len(reaction.reactants) == 1 and reaction.reactants.values()[0] == 2: # reaction.marate.setExpression(reaction.marate.expression + ' / 2') path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir=basedir + 'output') job = StochKitJobWrapper() job.user_id = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.name = data["jobName"] #job.inData = json.dumps(data) job.indata = json.dumps({ "type" : 'StochKit2 Ensemble', "final_time" : data['time'], "realizations" : data['realizations'], "increment" : data['increment'], "seed" : data['seed'], "exec_type" : data['execType'], "units" : modelDb.units.lower(), "epsilon" : data['epsilon'], "rTol" : data['rTol'], "aTol" : data['aTol'], "mxSteps" : data['mxSteps'], "threshold" : data['threshold'] }) job.modelName = modelDb.name job.outData = dataDir job.status = "Pending" job.output_stored = "False" job.is_simulation = True job.resource = "qsub" try: templateData = { "name": modelDb.name, "modelType": modelDb.type, "species": modelDb.species, "parameters": modelDb.parameters, "reactions": modelDb.reactions, # "speciesSelect": data['speciesSelect'], "speciesSelect": data['selections'], # "maxTime": data['maxTime'], "maxTime": data['time'], "increment": data['increment'], # "trajectories": data['trajectories'], "trajectories": data['realizations'], "seed": data['seed'], "isSpatial": modelDb.isSpatial, "isLocal": True } if modelDb.isSpatial: try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id(modelDb.spatial["mesh_wrapper_id"]) except Exception as e: logging.exception(e) logging.error("No Mesh file set. Choose one in the Mesh tab of the Model Editor") raise Exception("No Mesh file set. Choose one in the Mesh tab of the Model Editor") try: meshFileObj = fileserver.FileManager.getFile(self, meshWrapperDb.meshFileId, noFile=False) templateData["mesh"] = meshFileObj["data"] except IOError as e: logging.exception(e) logging.error("Mesh file inaccessible. Try another mesh") raise Exception("Mesh file inaccessible. Try another mesh") templateData['reaction_subdomain_assignments'] = modelDb.spatial["reactions_subdomain_assignments"] templateData['species_subdomain_assignments'] = modelDb.spatial["species_subdomain_assignments"] templateData['species_diffusion_coefficients'] = modelDb.spatial["species_diffusion_coefficients"] templateData['initial_conditions'] = modelDb.spatial["initial_conditions"] templateData['subdomains'] = meshWrapperDb.subdomains if data['execType'] == "stochastic": job.qsubHandle = pickle.dumps(parametersweep_qsub.stochastic(templateData, cluster_info, not_full_parameter_sweep=True)) elif data['execType'] == "deterministic": job.qsubHandle = pickle.dumps(parametersweep_qsub.deterministic(templateData, cluster_info, not_full_parameter_sweep=True)) elif data['execType'] == "spatial": job.qsubHandle = pickle.dumps(parametersweep_qsub.spatial(templateData, cluster_info, not_full_parameter_sweep=True)) else: raise Exception("Trying to runQsub on unsupported modelType {0}".format(data['modelType'])) job.put() except Exception as e: exc_info = sys.exc_info() logging.exception(e) job.status = 'Failed' try: job.delete(self) except Exception as e: pass raise exc_info[1], None, exc_info[2] return job