Ejemplo n.º 1
0
    def runCloud(self, data):
        self.user_data.set_selected(1)
        service = backendservices(self.user_data)
        if not service.isOneOrMoreComputeNodesRunning():
            raise Exception(
                'No cloud computing resources found. (Have they been started?)'
            )

        # If the seed is negative, this means choose a seed >= 0 randomly
        if int(data['seed']) < 0:
            random.seed()
            data['seed'] = random.randint(0, 2147483647)

        pymodel = self.construct_pyurdme_model(data)
        #logging.info('DATA: {0}'.format(data))
        #####
        cloud_params = {
            "job_type": "spatial",
            "simulation_algorithm": data['algorithm'],
            "simulation_realizations": data['realizations'],
            "simulation_seed": data['seed'],
            #            "bucketname" : self.user_data.getBucketName(),  #implys EC2, should be in backendservices
            "paramstring": '',
        }

        logging.debug('cloud_params = {}'.format(pprint.pformat(cloud_params)))

        cloud_params['document'] = pickle.dumps(pymodel)
        #logging.debug('PYURDME: {0}'.format(cloud_params['document']))

        # Send the task to the backend
        cloud_result = service.submit_cloud_task(params=cloud_params)

        if not cloud_result["success"]:
            e = cloud_result["exception"]
            raise Exception("Cloud execution failed: {0}".format(e))

        celery_task_id = cloud_result["celery_pid"]
        taskid = cloud_result["db_id"]

        job = SpatialJobWrapper()
        job.type = 'PyURDME Ensemble'
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.outData = None  # This is where the data should be locally, when we get data from cloud, it must be put here
        job.modelName = pymodel.name
        job.resource = cloud_result['resource']
        job.cloudDatabaseID = taskid
        job.celeryPID = celery_task_id
        job.status = "Running"
        job.output_stored = "True"
        job.put()

        return job
Ejemplo n.º 2
0
    def runLocal(self, data):
        ''' Run a PyURDME run using local compute recources. '''
        self.user_data.set_selected(0)
        #####
        pymodel = self.construct_pyurdme_model(data)
        #####
        simulation_algorithm = data[
            'algorithm']  # Don't trust this! I haven't implemented the algorithm selection for this yet
        simulation_realizations = data['realizations']

        # If the seed is negative, this means choose a seed >= 0 randomly
        if int(data['seed']) < 0:
            random.seed()
            data['seed'] = random.randint(0, 2147483647)

        simulation_seed = data['seed']
        #####

        path = os.path.abspath(os.path.dirname(__file__))

        basedir = path + '/../'
        dataDir = tempfile.mkdtemp(dir=basedir + 'output')

        job = SpatialJobWrapper()
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.outData = dataDir
        job.modelName = pymodel.name
        job.resource = "local"

        job.status = "Running"

        model_file_pkl = "{0}/model_file.pkl".format(dataDir)
        result_dir = "{0}/results/".format(dataDir)
        os.makedirs(result_dir)

        # searilize the model and write it to a file in the data dir
        with open(model_file_pkl, 'w') as fd:
            pickle.dump(pymodel, fd)

        cmd = "{0}/../../pyurdme/pyurdme_wrapper.py {1} {2} {3} {4} {5}".format(
            path, model_file_pkl, result_dir, simulation_algorithm,
            simulation_realizations, simulation_seed)
        logging.info("cmd =\n{}".format(cmd))
        exstring = '{0}/backend/wrapper.py {1}/stdout.log {1}/stderr.log {1}/return_code {2}'.format(
            basedir, dataDir, cmd)
        handle = subprocess.Popen(exstring, shell=True, preexec_fn=os.setsid)

        job.pid = int(handle.pid)
        job.put()
        return job
Ejemplo n.º 3
0
    def runQsub(self, data, cluster_info):
        logging.error("*" * 80)
        logging.error("simulate.runQsub() modelType={0}".format(
            data['execType']))
        logging.error("*" * 80)

        modelDb = StochKitModelWrapper.get_by_id(int(data["id"]))
        path = os.path.abspath(os.path.dirname(__file__))
        basedir = path + '/../'
        dataDir = tempfile.mkdtemp(dir=basedir + 'output')
        job = SpatialJobWrapper()
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.modelName = modelDb.name
        job.outData = dataDir
        job.status = "Pending"
        job.output_stored = "False"
        job.is_spatial = True

        try:
            templateData = {
                "name": modelDb.name,
                "modelType": modelDb.type,
                "species": modelDb.species,
                "parameters": modelDb.parameters,
                "reactions": modelDb.reactions,
                # "speciesSelect": data['speciesSelect'],
                "speciesSelect": data['selections'],
                # "maxTime": data['maxTime'],
                "maxTime": data['time'],
                "increment": data['increment'],
                # "trajectories": data['trajectories'],
                "trajectories": data['realizations'],
                "seed": data['seed'],
                "isSpatial": modelDb.isSpatial,
                "isLocal": True
            }

            if modelDb.isSpatial:
                try:
                    meshWrapperDb = mesheditor.MeshWrapper.get_by_id(
                        modelDb.spatial["mesh_wrapper_id"])
                except Exception as e:
                    logging.exception(e)
                    logging.error(
                        "No Mesh file set. Choose one in the Mesh tab of the Model Editor"
                    )
                    raise Exception(
                        "No Mesh file set. Choose one in the Mesh tab of the Model Editor"
                    )
                try:
                    meshFileObj = fileserver.FileManager.getFile(
                        self, meshWrapperDb.meshFileId, noFile=False)
                    templateData["mesh"] = meshFileObj["data"]
                except IOError as e:
                    logging.exception(e)
                    logging.error("Mesh file inaccessible. Try another mesh")
                    raise Exception("Mesh file inaccessible. Try another mesh")

                templateData[
                    'reaction_subdomain_assignments'] = modelDb.spatial[
                        "reactions_subdomain_assignments"]
                templateData[
                    'species_subdomain_assignments'] = modelDb.spatial[
                        "species_subdomain_assignments"]
                templateData[
                    'species_diffusion_coefficients'] = modelDb.spatial[
                        "species_diffusion_coefficients"]
                templateData['initial_conditions'] = modelDb.spatial[
                    "initial_conditions"]
                templateData['subdomains'] = meshWrapperDb.subdomains

            if data['execType'] == "stochastic":
                job.qsubHandle = pickle.dumps(
                    parametersweep_qsub.stochastic(
                        templateData,
                        cluster_info,
                        not_full_parameter_sweep=True))
            elif data['execType'] == "deterministic":
                job.qsubHandle = pickle.dumps(
                    parametersweep_qsub.deterministic(
                        templateData,
                        cluster_info,
                        not_full_parameter_sweep=True))
            elif data['execType'] == "spatial":
                job.qsubHandle = pickle.dumps(
                    parametersweep_qsub.spatial(
                        templateData,
                        cluster_info,
                        not_full_parameter_sweep=True))  #
            else:
                raise Exception(
                    "Trying to runQsub on unsupported modelType {0}".format(
                        data['modelType']))

            job.resource = "qsub"
            job.put()
        except Exception as e:
            logging.exception(e)
            job.status = 'Failed'
            #job.delete(self)
            raise

        return job