def post(self, queryType, jobID): job = StochOptimJobWrapper.get_by_id(int(jobID)) data = json.loads(self.request.get('data')); #print data #print "=================================================" parameters = data["parameters"] modelName = job.modelName proposedName = data["proposedName"] model = ModelManager.getModelByName(self, modelName); del model["id"] if ModelManager.getModelByName(self, proposedName): self.response.write(json.dumps({"status" : False, "msg" : "Model name must be unique"})) return if not model: self.response.write(json.dumps({"status" : False, "msg" : "Model '{0}' does not exist anymore. Possibly deleted".format(modelName) })) return model["name"] = proposedName parameterByName = {} for parameter in model["parameters"]: parameterByName[parameter["name"]] = parameter for parameter in parameters: parameterByName[parameter]["value"] = str(parameters[parameter]) if ModelManager.updateModel(self, model): self.response.write(json.dumps({"status" : True, "msg" : "Model created", "url" : "/modeleditor?model_edited={0}".format(proposedName) })) return else: self.response.write(json.dumps({"status" : False, "msg" : "Model failed to be created, check logs"})) return
def __get_context(self): context = {} result = {} context['resources'] = [] # Important for UI, do not change key_file_id. context['resources'].append(dict(json="{'uuid':0, 'key_file_id':0}", uuid=0, name="Default (local resources)")) for resource in self.user_data.get_cluster_node_info(): resource['json'] = json.dumps(resource) resource['name'] = 'Cluster: '+resource['username']+'@'+resource['ip'] context['resources'].append(resource) context['selected'] = self.user_data.get_selected() logging.info("context['selected'] = {0}".format(context['selected'])) context['initialData'] = json.dumps(ModelManager.getModels(self)) context = dict(result, **context) # logging.debug("Parametersweep.py\n" + str(context)) return context
def __get_context(self): context = {} result = {} context['resources'] = [] # Important for UI, do not change key_file_id. context['resources'].append( dict(json="{'uuid':0, 'key_file_id':0}", uuid=0, name="Default (local resources)")) for resource in self.user_data.get_cluster_node_info(): resource['json'] = json.dumps(resource) resource['name'] = 'Cluster: ' + resource[ 'username'] + '@' + resource['ip'] context['resources'].append(resource) context['selected'] = self.user_data.get_selected() logging.info("context['selected'] = {0}".format(context['selected'])) context['initialData'] = json.dumps(ModelManager.getModels(self)) context = dict(result, **context) # logging.debug("Parametersweep.py\n" + str(context)) return context
def construct_pyurdme_model(self, data): ''' ''' json_model_refs = ModelManager.getModel( self, int(data["id"]) ) # data["id"] is the model id of the selected model I think stochkit_model_obj = StochKitModelWrapper.get_by_id(int( data["id"])).createStochKitModel() #print 'json_model_refs["spatial"]["mesh_wrapper_id"]:', json_model_refs["spatial"]["mesh_wrapper_id"] try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id( json_model_refs["spatial"]["mesh_wrapper_id"]) except Exception as e: raise Exception( "No Mesh file set. Choose one in the Mesh tab of the Model Editor" ) try: meshFileObj = fileserver.FileManager.getFile( self, meshWrapperDb.meshFileId) mesh_filename = meshFileObj["storePath"] except IOError as e: #blowup here, need a mesh #self.response.write(json.dumps({"status" : False, # "msg" : "No Mesh file given"})) #return raise Exception("Mesh file inaccessible. Try another mesh") #TODO: if we get advanced options, we don't need a mesh reaction_subdomain_assigments = json_model_refs["spatial"][ "reactions_subdomain_assignments"] #e.g. {'R1':[1,2,3]} species_subdomain_assigments = json_model_refs["spatial"][ "species_subdomain_assignments"] #e.g. {'S1':[1,2,3]} species_diffusion_coefficients = json_model_refs["spatial"][ "species_diffusion_coefficients"] #e.g. {'S1':0.5} initial_conditions = json_model_refs["spatial"][ "initial_conditions"] #e.g. { ic0 : { type : "place", species : "S0", x : 5.0, y : 10.0, z : 1.0, count : 5000 }, ic1 : { type : "scatter",species : "S0", subdomain : 1, count : 100 }, ic2 : { type : "distribute",species : "S0", subdomain : 2, count : 100 } } for species in stochkit_model_obj.listOfSpecies: if species not in species_diffusion_coefficients: raise Exception( "Species '{0}' does not have a diffusion coefficient set. Please do that in the Species tab of the Model Editor" .format(species)) simulation_end_time = data['time'] simulation_time_increment = data['increment'] simulation_algorithm = data[ 'algorithm'] # Don't trust this! I haven't implemented the algorithm selection for this yet simulation_exec_type = data[ 'execType'] # This should contain 'spatial' -- Not that you really need it, only spatial requests will be routed here simulation_realizations = data['realizations'] simulation_seed = data[ 'seed'] # If this is set to -1, it means choose a seed at random! (Whatever that means) #### Construct the PyURDME object from the Stockkit model and mesh and other inputs try: # model pymodel = pyurdme.URDMEModel(name=stochkit_model_obj.name) # mesh pymodel.mesh = pyurdme.URDMEMesh.read_dolfin_mesh( str(mesh_filename)) # timespan pymodel.timespan( numpy.arange(0, simulation_end_time + simulation_time_increment, simulation_time_increment)) # subdomains if len(meshWrapperDb.subdomains) > 0: pymodel.set_subdomain_vector( numpy.array(meshWrapperDb.subdomains)) # species for s in stochkit_model_obj.listOfSpecies: pymodel.add_species( pyurdme.Species(name=s, diffusion_constant=float( species_diffusion_coefficients[s]))) # species subdomain restriction for s, sd_list in species_subdomain_assigments.iteritems(): spec = pymodel.listOfSpecies[s] pymodel.restrict(spec, sd_list) # parameters for p_name, p in stochkit_model_obj.listOfParameters.iteritems(): pymodel.add_parameter( pyurdme.Parameter(name=p_name, expression=p.expression)) # reactions for r_name, r in stochkit_model_obj.listOfReactions.iteritems(): if r.massaction: pymodel.add_reaction( pyurdme.Reaction(name=r_name, reactants=r.reactants, products=r.products, rate=r.marate, massaction=True)) else: pymodel.add_reaction( pyurdme.Reaction( name=r_name, reactants=r.reactants, products=r.products, propensity_function=r.propensity_function)) # reaction subdomain restrictions for r in reaction_subdomain_assigments: pymodel.listOfReactions[ r].restrict_to = reaction_subdomain_assigments[r] # Initial Conditions # initial_conditions = json_model_refs["spatial"]["initial_conditions"] #e.g. { ic0 : { type : "place", species : "S0", x : 5.0, y : 10.0, z : 1.0, count : 5000 }, ic1 : { type : "scatter",species : "S0", subdomain : 1, count : 100 }, ic2 : { type : "distribute",species : "S0", subdomain : 2, count : 100 } } for ic in initial_conditions: spec = pymodel.listOfSpecies[ic['species']] if ic['type'] == "place": pymodel.set_initial_condition_place_near( {spec: int(ic['count'])}, point=[float(ic['x']), float(ic['y']), float(ic['z'])]) elif ic['type'] == "scatter": pymodel.set_initial_condition_scatter( {spec: int(ic['count'])}, subdomains=[int(ic['subdomain'])]) elif ic['type'] == "distribute": pymodel.set_initial_condition_distribute_uniformly( {spec: int(ic['count'])}, subdomains=[int(ic['subdomain'])]) else: #self.response.write(json.dumps({"status" : False, # "msg" : "Unknown initial condition type {0}".format(ic['type'])})) #return raise Exception( "Unknown initial condition type {0}".format( ic['type'])) except Exception as e: raise Exception("Error while assembling the model: {0}".format(e)) return pymodel
def construct_pyurdme_model(self, data): ''' ''' json_model_refs = ModelManager.getModel(self, data["id"]) # data["id"] is the model id of the selected model I think stochkit_model_obj = StochKitModelWrapper.get_by_id(data["id"]).createStochKitModel() #print 'json_model_refs["spatial"]["mesh_wrapper_id"]:', json_model_refs["spatial"]["mesh_wrapper_id"] try: meshWrapperDb = mesheditor.MeshWrapper.get_by_id(json_model_refs["spatial"]["mesh_wrapper_id"]) except Exception as e: raise Exception("No Mesh file set. Choose one in the Mesh tab of the Model Editor") try: meshFileObj = fileserver.FileManager.getFile(self, meshWrapperDb.meshFileId) mesh_filename = meshFileObj["storePath"] except IOError as e: #blowup here, need a mesh #self.response.write(json.dumps({"status" : False, # "msg" : "No Mesh file given"})) #return raise Exception("Mesh file inaccessible. Try another mesh") #TODO: if we get advanced options, we don't need a mesh reaction_subdomain_assigments = json_model_refs["spatial"]["reactions_subdomain_assignments"] #e.g. {'R1':[1,2,3]} species_subdomain_assigments = json_model_refs["spatial"]["species_subdomain_assignments"] #e.g. {'S1':[1,2,3]} species_diffusion_coefficients = json_model_refs["spatial"]["species_diffusion_coefficients"] #e.g. {'S1':0.5} initial_conditions = json_model_refs["spatial"]["initial_conditions"] #e.g. { ic0 : { type : "place", species : "S0", x : 5.0, y : 10.0, z : 1.0, count : 5000 }, ic1 : { type : "scatter",species : "S0", subdomain : 1, count : 100 }, ic2 : { type : "distribute",species : "S0", subdomain : 2, count : 100 } } for species in stochkit_model_obj.listOfSpecies: if species not in species_diffusion_coefficients: raise Exception("Species '{0}' does not have a diffusion coefficient set. Please do that in the Species tab of the Model Editor".format(species)) simulation_end_time = data['time'] simulation_time_increment = data['increment'] simulation_algorithm = data['algorithm'] # Don't trust this! I haven't implemented the algorithm selection for this yet simulation_exec_type = data['execType'] # This should contain 'spatial' -- Not that you really need it, only spatial requests will be routed here simulation_realizations = data['realizations'] simulation_seed = data['seed'] # If this is set to -1, it means choose a seed at random! (Whatever that means) #### Construct the PyURDME object from the Stockkit model and mesh and other inputs try: # model pymodel = pyurdme.URDMEModel(name=stochkit_model_obj.name) # mesh pymodel.mesh = pyurdme.URDMEMesh.read_dolfin_mesh(str(mesh_filename)) # timespan pymodel.timespan(numpy.arange(0,simulation_end_time+simulation_time_increment, simulation_time_increment)) # subdomains if len(meshWrapperDb.subdomains) > 0: pymodel.set_subdomain_vector(numpy.array(meshWrapperDb.subdomains)) # species for s in stochkit_model_obj.listOfSpecies: pymodel.add_species(pyurdme.Species(name=s, diffusion_constant=float(species_diffusion_coefficients[s]))) # species subdomain restriction for s, sd_list in species_subdomain_assigments.iteritems(): spec = pymodel.listOfSpecies[s] pymodel.restrict(spec, sd_list) # parameters for p_name, p in stochkit_model_obj.listOfParameters.iteritems(): pymodel.add_parameter(pyurdme.Parameter(name=p_name, expression=p.expression)) # reactions for r_name, r in stochkit_model_obj.listOfReactions.iteritems(): if r.massaction: pymodel.add_reaction(pyurdme.Reaction(name=r_name, reactants=r.reactants, products=r.products, rate=r.marate, massaction=True)) else: pymodel.add_reaction(pyurdme.Reaction(name=r_name, reactants=r.reactants, products=r.products, propensity_function=r.propensity_function)) # reaction subdomain restrictions for r in reaction_subdomain_assigments: pymodel.listOfReactions[r].restrict_to = reaction_subdomain_assigments[r] # Initial Conditions # initial_conditions = json_model_refs["spatial"]["initial_conditions"] #e.g. { ic0 : { type : "place", species : "S0", x : 5.0, y : 10.0, z : 1.0, count : 5000 }, ic1 : { type : "scatter",species : "S0", subdomain : 1, count : 100 }, ic2 : { type : "distribute",species : "S0", subdomain : 2, count : 100 } } for ic in initial_conditions: spec = pymodel.listOfSpecies[ic['species']] if ic['type'] == "place": pymodel.set_initial_condition_place_near({spec:int(ic['count'])}, point=[float(ic['x']),float(ic['y']),float(ic['z'])]) elif ic['type'] == "scatter": pymodel.set_initial_condition_scatter({spec:int(ic['count'])},subdomains=[int(ic['subdomain'])]) elif ic['type'] == "distribute": pymodel.set_initial_condition_distribute_uniformly({spec:int(ic['count'])},subdomains=[int(ic['subdomain'])]) else: #self.response.write(json.dumps({"status" : False, # "msg" : "Unknown initial condition type {0}".format(ic['type'])})) #return raise Exception("Unknown initial condition type {0}".format(ic['type'])) except Exception as e: raise Exception("Error while assembling the model: {0}".format(e)) return pymodel
def runCloud(self, data): ''' ''' model = ModelManager.getModel(self, data["modelID"], modelAsString = False) berniemodel = StochOptimModel() success, msgs = berniemodel.fromStochKitModel(model["model"]) result = { "success": success } if not success: result["msg"] = os.linesep.join(msgs) return result path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') job = StochOptimJobWrapper() job.userId = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.jobName = data["jobName"] job.indata = json.dumps(data) job.modelName = model["name"] job.outData = dataDir job.status = "Pending" job.resource = "cloud" data["exec"] = "'bash'" data["steps"] = ("C" if data["crossEntropyStep"] else "") + ("E" if data["emStep"] else "") + ("U" if data["uncertaintyStep"] else "") # data["cores"] = 4 data["options"] = "" cmd = "exec/mcem2.r --steps {steps} --seed {seed} --K.ce {Kce} --K.em {Kem} --K.lik {Klik} --K.cov {Kcov} --rho {rho} --perturb {perturb} --alpha {alpha} --beta {beta} --gamma {gamma} --k {k} --pcutoff {pcutoff} --qcutoff {qcutoff} --numIter {numIter} --numConverge {numConverge} --command {exec}".format(**data) # cmd = "exec/mcem2.r --K.ce 1000 --K.em 100 --rho .01 --pcutoff .05" stringModel, nameToIndex = berniemodel.serialize(data["activate"], True) job.nameToIndex = json.dumps(nameToIndex) jFileData = fileserver.FileManager.getFile(self, data["trajectoriesID"], noFile = False) iFileData = fileserver.FileManager.getFile(self, data["initialDataID"], noFile = False) cloud_params = { "job_type": "mcem2", # "cores": data["cores"], "paramstring": cmd, "model_file": stringModel, "model_data": { "content": iFileData["data"], "extension": "txt" }, "final_data": { "content": jFileData["data"], "extension": "txt" }, "key_prefix": self.user.user_id(), "credentials": self.user_data.getCredentials(), "bucketname": self.user_data.getBucketName() } # Set the environmental variables os.environ["AWS_ACCESS_KEY_ID"] = self.user_data.getCredentials()['EC2_ACCESS_KEY'] os.environ["AWS_SECRET_ACCESS_KEY"] = self.user_data.getCredentials()['EC2_SECRET_KEY'] service = backend.backendservice.backendservices() cloud_result = service.executeTask(cloud_params) if not cloud_result["success"]: result = { "success": False, "msg": cloud_result["reason"] } try: result["exception"] = cloud_result["exception"] result["traceback"] = cloud_result["traceback"] except KeyError: pass return result job.cloudDatabaseID = cloud_result["db_id"] job.celeryPID = cloud_result["celery_pid"] # job.pid = handle.pid job.put() result["job"] = job result["id"] = job.key().id() return result
def runLocal(self, data): ''' ''' model = ModelManager.getModel(self, data["modelID"], modelAsString = False) berniemodel = StochOptimModel() success, msgs = berniemodel.fromStochKitModel(model["model"]) if not success: self.response.content_type = 'application/json' self.response.write(json.dumps({"status" : False, "msg" : msgs })) return path = os.path.abspath(os.path.dirname(__file__)) basedir = path + '/../' dataDir = tempfile.mkdtemp(dir = basedir + 'output') job = StochOptimJobWrapper() job.userId = self.user.user_id() job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S") job.jobName = data["jobName"] job.indata = json.dumps(data) job.outData = dataDir job.modelName = model["name"] job.resource = "local" job.status = "Running" # Convert model and write to file model_file_file = tempfile.mktemp(prefix = 'modelFile', suffix = '.R', dir = dataDir) mff = open(model_file_file, 'w') stringModel, nameToIndex = berniemodel.serialize(data["activate"], True) job.nameToIndex = json.dumps(nameToIndex) mff.write(stringModel) mff.close() data["model_file_file"] = model_file_file model_data_file = tempfile.mktemp(prefix = 'dataFile', suffix = '.txt', dir = dataDir) mdf = open(model_data_file, 'w') jFileData = fileserver.FileManager.getFile(self, data["trajectoriesID"], noFile = False) mdf.write(jFileData["data"]) mdf.close() data["model_data_file"] = model_data_file model_initial_data_file = tempfile.mktemp(prefix = 'dataFile', suffix = '.txt', dir = dataDir) midf = open(model_initial_data_file, 'w') iFileData = fileserver.FileManager.getFile(self, data["initialDataID"], noFile = False) midf.write(iFileData["data"]) midf.close() data["model_initial_data_file"] = model_initial_data_file data["exec"] = "\"bash&\"" data["steps"] = ("C" if data["crossEntropyStep"] else "") + ("E" if data["emStep"] else "") + ("U" if data["uncertaintyStep"] else "") try: import multiprocessing data["cores"] = multiprocessing.cpu_count() except: data["cores"] = 1 data["options"] = "" data["path"] = path cmd = "Rscript --vanilla {path}/../../stochoptim/exec/mcem2.r --model {model_file_file} --data {model_initial_data_file} --finalData {model_data_file} --steps {steps} --seed {seed} --cores {cores} --K.ce {Kce} --K.em {Kem} --K.lik {Klik} --K.cov {Kcov} --rho {rho} --perturb {perturb} --alpha {alpha} --beta {beta} --gamma {gamma} --k {k} --pcutoff {pcutoff} --qcutoff {qcutoff} --numIter {numIter} --numConverge {numConverge} --command {exec}".format(**data) print cmd exstring = '{0}/backend/wrapper.sh {1}/stdout {1}/stderr {2}'.format(basedir, dataDir, cmd) handle = subprocess.Popen(exstring, shell=True, preexec_fn=os.setsid) job.pid = handle.pid job.put() self.response.write(json.dumps({"status" : True, "msg" : "Job launched", "id" : job.key().id()}))
def get(self): self.render_response('parameter_sweep.html', **{ 'initialData' : json.dumps(ModelManager.getModels(self)) })
def get(self): self.render_response( 'parameter_sweep.html', **{'initialData': json.dumps(ModelManager.getModels(self))})