예제 #1
0
    def runCloud(self, data):
        self.user_data.set_selected(1)
        service = backendservices(self.user_data)
        if not service.isOneOrMoreComputeNodesRunning():
            raise Exception(
                'No cloud computing resources found. (Have they been started?)'
            )

        # If the seed is negative, this means choose a seed >= 0 randomly
        if int(data['seed']) < 0:
            random.seed()
            data['seed'] = random.randint(0, 2147483647)

        pymodel = self.construct_pyurdme_model(data)
        #logging.info('DATA: {0}'.format(data))
        #####
        cloud_params = {
            "job_type": "spatial",
            "simulation_algorithm": data['algorithm'],
            "simulation_realizations": data['realizations'],
            "simulation_seed": data['seed'],
            #            "bucketname" : self.user_data.getBucketName(),  #implys EC2, should be in backendservices
            "paramstring": '',
        }

        logging.debug('cloud_params = {}'.format(pprint.pformat(cloud_params)))

        cloud_params['document'] = pickle.dumps(pymodel)
        #logging.debug('PYURDME: {0}'.format(cloud_params['document']))

        # Send the task to the backend
        cloud_result = service.submit_cloud_task(params=cloud_params)

        if not cloud_result["success"]:
            e = cloud_result["exception"]
            raise Exception("Cloud execution failed: {0}".format(e))

        celery_task_id = cloud_result["celery_pid"]
        taskid = cloud_result["db_id"]

        job = SpatialJobWrapper()
        job.type = 'PyURDME Ensemble'
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.outData = None  # This is where the data should be locally, when we get data from cloud, it must be put here
        job.modelName = pymodel.name
        job.resource = cloud_result['resource']
        job.cloudDatabaseID = taskid
        job.celeryPID = celery_task_id
        job.status = "Running"
        job.output_stored = "True"
        job.put()

        return job
예제 #2
0
파일: spatial.py 프로젝트: StochSS/stochss
    def runCloud(self, data):
        self.user_data.set_selected(1)
        service = backendservices(self.user_data)
        if not service.isOneOrMoreComputeNodesRunning():
            raise Exception('No cloud computing resources found. (Have they been started?)')

        # If the seed is negative, this means choose a seed >= 0 randomly
        if int(data['seed']) < 0:
            random.seed()
            data['seed'] = random.randint(0, 2147483647)

        pymodel = self.construct_pyurdme_model(data)
        #logging.info('DATA: {0}'.format(data))
        #####
        cloud_params = {
            "job_type": "spatial",
            "simulation_algorithm" : data['algorithm'],
            "simulation_realizations" : data['realizations'],
            "simulation_seed" : data['seed'],
#            "bucketname" : self.user_data.getBucketName(),  #implys EC2, should be in backendservices
            "paramstring" : '',
        }

        logging.debug('cloud_params = {}'.format(pprint.pformat(cloud_params)))

        cloud_params['document'] = pickle.dumps(pymodel)
        #logging.debug('PYURDME: {0}'.format(cloud_params['document']))

        # Send the task to the backend
        cloud_result = service.submit_cloud_task(params=cloud_params)

        if not cloud_result["success"]:
            e = cloud_result["exception"]
            raise Exception("Cloud execution failed: {0}".format(e))
        
        celery_task_id = cloud_result["celery_pid"]
        taskid = cloud_result["db_id"]

        job = SpatialJobWrapper()
        job.type = 'PyURDME Ensemble'
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.outData = None  # This is where the data should be locally, when we get data from cloud, it must be put here
        job.modelName = pymodel.name
        job.resource = cloud_result['resource']
        job.cloudDatabaseID = taskid
        job.celeryPID = celery_task_id
        job.status = "Running"
        job.output_stored = "True"
        job.put()

        return job
예제 #3
0
    def runQsub(self, data, cluster_info):
        logging.error("*" * 80)
        logging.error("simulate.runQsub() modelType={0}".format(
            data['execType']))
        logging.error("*" * 80)

        modelDb = StochKitModelWrapper.get_by_id(int(data["id"]))
        path = os.path.abspath(os.path.dirname(__file__))
        basedir = path + '/../'
        dataDir = tempfile.mkdtemp(dir=basedir + 'output')
        job = SpatialJobWrapper()
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.modelName = modelDb.name
        job.outData = dataDir
        job.status = "Pending"
        job.output_stored = "False"
        job.is_spatial = True

        try:
            templateData = {
                "name": modelDb.name,
                "modelType": modelDb.type,
                "species": modelDb.species,
                "parameters": modelDb.parameters,
                "reactions": modelDb.reactions,
                # "speciesSelect": data['speciesSelect'],
                "speciesSelect": data['selections'],
                # "maxTime": data['maxTime'],
                "maxTime": data['time'],
                "increment": data['increment'],
                # "trajectories": data['trajectories'],
                "trajectories": data['realizations'],
                "seed": data['seed'],
                "isSpatial": modelDb.isSpatial,
                "isLocal": True
            }

            if modelDb.isSpatial:
                try:
                    meshWrapperDb = mesheditor.MeshWrapper.get_by_id(
                        modelDb.spatial["mesh_wrapper_id"])
                except Exception as e:
                    logging.exception(e)
                    logging.error(
                        "No Mesh file set. Choose one in the Mesh tab of the Model Editor"
                    )
                    raise Exception(
                        "No Mesh file set. Choose one in the Mesh tab of the Model Editor"
                    )
                try:
                    meshFileObj = fileserver.FileManager.getFile(
                        self, meshWrapperDb.meshFileId, noFile=False)
                    templateData["mesh"] = meshFileObj["data"]
                except IOError as e:
                    logging.exception(e)
                    logging.error("Mesh file inaccessible. Try another mesh")
                    raise Exception("Mesh file inaccessible. Try another mesh")

                templateData[
                    'reaction_subdomain_assignments'] = modelDb.spatial[
                        "reactions_subdomain_assignments"]
                templateData[
                    'species_subdomain_assignments'] = modelDb.spatial[
                        "species_subdomain_assignments"]
                templateData[
                    'species_diffusion_coefficients'] = modelDb.spatial[
                        "species_diffusion_coefficients"]
                templateData['initial_conditions'] = modelDb.spatial[
                    "initial_conditions"]
                templateData['subdomains'] = meshWrapperDb.subdomains

            if data['execType'] == "stochastic":
                job.qsubHandle = pickle.dumps(
                    parametersweep_qsub.stochastic(
                        templateData,
                        cluster_info,
                        not_full_parameter_sweep=True))
            elif data['execType'] == "deterministic":
                job.qsubHandle = pickle.dumps(
                    parametersweep_qsub.deterministic(
                        templateData,
                        cluster_info,
                        not_full_parameter_sweep=True))
            elif data['execType'] == "spatial":
                job.qsubHandle = pickle.dumps(
                    parametersweep_qsub.spatial(
                        templateData,
                        cluster_info,
                        not_full_parameter_sweep=True))  #
            else:
                raise Exception(
                    "Trying to runQsub on unsupported modelType {0}".format(
                        data['modelType']))

            job.resource = "qsub"
            job.put()
        except Exception as e:
            logging.exception(e)
            job.status = 'Failed'
            #job.delete(self)
            raise

        return job
예제 #4
0
파일: spatial.py 프로젝트: StochSS/stochss
    def runQsub(self, data, cluster_info):
        logging.error("*" * 80)
        logging.error("simulate.runQsub() modelType={0}".format(data['execType']))
        logging.error("*" * 80)

        modelDb = StochKitModelWrapper.get_by_id(int(data["id"]))
        path = os.path.abspath(os.path.dirname(__file__))
        basedir = path + '/../'
        dataDir = tempfile.mkdtemp(dir=basedir + 'output')
        job = SpatialJobWrapper()
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.modelName = modelDb.name
        job.outData = dataDir
        job.status = "Pending"
        job.output_stored = "False"
        job.is_spatial = True

        try:
            templateData = {
                "name": modelDb.name,
                "modelType": modelDb.type,
                "species": modelDb.species,
                "parameters": modelDb.parameters,
                "reactions": modelDb.reactions,
                # "speciesSelect": data['speciesSelect'],
                "speciesSelect": data['selections'],
                # "maxTime": data['maxTime'],
                "maxTime": data['time'],
                "increment": data['increment'],
                # "trajectories": data['trajectories'],
                "trajectories": data['realizations'],
                "seed": data['seed'],
                "isSpatial": modelDb.isSpatial,
                "isLocal": True
            }

            if modelDb.isSpatial:
                try:
                    meshWrapperDb = mesheditor.MeshWrapper.get_by_id(modelDb.spatial["mesh_wrapper_id"])
                except Exception as e:
                    logging.exception(e)
                    logging.error("No Mesh file set. Choose one in the Mesh tab of the Model Editor")
                    raise Exception("No Mesh file set. Choose one in the Mesh tab of the Model Editor")
                try:
                    meshFileObj = fileserver.FileManager.getFile(self, meshWrapperDb.meshFileId, noFile=False)
                    templateData["mesh"] = meshFileObj["data"]
                except IOError as e:
                    logging.exception(e)
                    logging.error("Mesh file inaccessible. Try another mesh")
                    raise Exception("Mesh file inaccessible. Try another mesh")

                templateData['reaction_subdomain_assignments'] = modelDb.spatial["reactions_subdomain_assignments"]
                templateData['species_subdomain_assignments'] = modelDb.spatial["species_subdomain_assignments"]
                templateData['species_diffusion_coefficients'] = modelDb.spatial["species_diffusion_coefficients"]
                templateData['initial_conditions'] = modelDb.spatial["initial_conditions"]
                templateData['subdomains'] = meshWrapperDb.subdomains

            if data['execType'] == "stochastic":
                job.qsubHandle = pickle.dumps(parametersweep_qsub.stochastic(templateData, cluster_info,
                                                                             not_full_parameter_sweep=True))
            elif data['execType'] == "deterministic":
                job.qsubHandle = pickle.dumps(parametersweep_qsub.deterministic(templateData, cluster_info,
                                                                                not_full_parameter_sweep=True))
            elif data['execType'] == "spatial":
                job.qsubHandle = pickle.dumps(parametersweep_qsub.spatial(templateData, cluster_info,
                                                                          not_full_parameter_sweep=True))#
            else:
                raise Exception("Trying to runQsub on unsupported modelType {0}".format(data['modelType']))

            job.resource = "qsub"
            job.put()
        except Exception as e:
            logging.exception(e)
            job.status = 'Failed'
            #job.delete(self)
            raise

        return job