Ejemplo n.º 1
0
    def runQsub(self, data, cluster_info):
        logging.error("*" * 80)
        logging.error("simulate.runQsub() modelType={0}".format(
            data['execType']))
        logging.error("*" * 80)

        modelDb = StochKitModelWrapper.get_by_id(int(data["id"]))
        path = os.path.abspath(os.path.dirname(__file__))
        basedir = path + '/../'
        dataDir = tempfile.mkdtemp(dir=basedir + 'output')
        job = SpatialJobWrapper()
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.modelName = modelDb.name
        job.outData = dataDir
        job.status = "Pending"
        job.output_stored = "False"
        job.is_spatial = True

        try:
            templateData = {
                "name": modelDb.name,
                "modelType": modelDb.type,
                "species": modelDb.species,
                "parameters": modelDb.parameters,
                "reactions": modelDb.reactions,
                # "speciesSelect": data['speciesSelect'],
                "speciesSelect": data['selections'],
                # "maxTime": data['maxTime'],
                "maxTime": data['time'],
                "increment": data['increment'],
                # "trajectories": data['trajectories'],
                "trajectories": data['realizations'],
                "seed": data['seed'],
                "isSpatial": modelDb.isSpatial,
                "isLocal": True
            }

            if modelDb.isSpatial:
                try:
                    meshWrapperDb = mesheditor.MeshWrapper.get_by_id(
                        modelDb.spatial["mesh_wrapper_id"])
                except Exception as e:
                    logging.exception(e)
                    logging.error(
                        "No Mesh file set. Choose one in the Mesh tab of the Model Editor"
                    )
                    raise Exception(
                        "No Mesh file set. Choose one in the Mesh tab of the Model Editor"
                    )
                try:
                    meshFileObj = fileserver.FileManager.getFile(
                        self, meshWrapperDb.meshFileId, noFile=False)
                    templateData["mesh"] = meshFileObj["data"]
                except IOError as e:
                    logging.exception(e)
                    logging.error("Mesh file inaccessible. Try another mesh")
                    raise Exception("Mesh file inaccessible. Try another mesh")

                templateData[
                    'reaction_subdomain_assignments'] = modelDb.spatial[
                        "reactions_subdomain_assignments"]
                templateData[
                    'species_subdomain_assignments'] = modelDb.spatial[
                        "species_subdomain_assignments"]
                templateData[
                    'species_diffusion_coefficients'] = modelDb.spatial[
                        "species_diffusion_coefficients"]
                templateData['initial_conditions'] = modelDb.spatial[
                    "initial_conditions"]
                templateData['subdomains'] = meshWrapperDb.subdomains

            if data['execType'] == "stochastic":
                job.qsubHandle = pickle.dumps(
                    parametersweep_qsub.stochastic(
                        templateData,
                        cluster_info,
                        not_full_parameter_sweep=True))
            elif data['execType'] == "deterministic":
                job.qsubHandle = pickle.dumps(
                    parametersweep_qsub.deterministic(
                        templateData,
                        cluster_info,
                        not_full_parameter_sweep=True))
            elif data['execType'] == "spatial":
                job.qsubHandle = pickle.dumps(
                    parametersweep_qsub.spatial(
                        templateData,
                        cluster_info,
                        not_full_parameter_sweep=True))  #
            else:
                raise Exception(
                    "Trying to runQsub on unsupported modelType {0}".format(
                        data['modelType']))

            job.resource = "qsub"
            job.put()
        except Exception as e:
            logging.exception(e)
            job.status = 'Failed'
            #job.delete(self)
            raise

        return job
Ejemplo n.º 2
0
    def runLocal(self, data):
        ''' Run a PyURDME run using local compute recources. '''
        self.user_data.set_selected(0)
        #####
        pymodel = self.construct_pyurdme_model(data)
        #####
        simulation_algorithm = data[
            'algorithm']  # Don't trust this! I haven't implemented the algorithm selection for this yet
        simulation_realizations = data['realizations']

        # If the seed is negative, this means choose a seed >= 0 randomly
        if int(data['seed']) < 0:
            random.seed()
            data['seed'] = random.randint(0, 2147483647)

        simulation_seed = data['seed']
        #####

        path = os.path.abspath(os.path.dirname(__file__))

        basedir = path + '/../'
        dataDir = tempfile.mkdtemp(dir=basedir + 'output')

        job = SpatialJobWrapper()
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.outData = dataDir
        job.modelName = pymodel.name
        job.resource = "local"

        job.status = "Running"

        model_file_pkl = "{0}/model_file.pkl".format(dataDir)
        result_dir = "{0}/results/".format(dataDir)
        os.makedirs(result_dir)

        # searilize the model and write it to a file in the data dir
        with open(model_file_pkl, 'w') as fd:
            pickle.dump(pymodel, fd)

        cmd = "{0}/../../pyurdme/pyurdme_wrapper.py {1} {2} {3} {4} {5}".format(
            path, model_file_pkl, result_dir, simulation_algorithm,
            simulation_realizations, simulation_seed)
        logging.info("cmd =\n{}".format(cmd))
        exstring = '{0}/backend/wrapper.py {1}/stdout.log {1}/stderr.log {1}/return_code {2}'.format(
            basedir, dataDir, cmd)
        handle = subprocess.Popen(exstring, shell=True, preexec_fn=os.setsid)

        job.pid = int(handle.pid)
        job.put()
        return job
Ejemplo n.º 3
0
    def runCloud(self, data):
        self.user_data.set_selected(1)
        service = backendservices(self.user_data)
        if not service.isOneOrMoreComputeNodesRunning():
            raise Exception(
                'No cloud computing resources found. (Have they been started?)'
            )

        # If the seed is negative, this means choose a seed >= 0 randomly
        if int(data['seed']) < 0:
            random.seed()
            data['seed'] = random.randint(0, 2147483647)

        pymodel = self.construct_pyurdme_model(data)
        #logging.info('DATA: {0}'.format(data))
        #####
        cloud_params = {
            "job_type": "spatial",
            "simulation_algorithm": data['algorithm'],
            "simulation_realizations": data['realizations'],
            "simulation_seed": data['seed'],
            #            "bucketname" : self.user_data.getBucketName(),  #implys EC2, should be in backendservices
            "paramstring": '',
        }

        logging.debug('cloud_params = {}'.format(pprint.pformat(cloud_params)))

        cloud_params['document'] = pickle.dumps(pymodel)
        #logging.debug('PYURDME: {0}'.format(cloud_params['document']))

        # Send the task to the backend
        cloud_result = service.submit_cloud_task(params=cloud_params)

        if not cloud_result["success"]:
            e = cloud_result["exception"]
            raise Exception("Cloud execution failed: {0}".format(e))

        celery_task_id = cloud_result["celery_pid"]
        taskid = cloud_result["db_id"]

        job = SpatialJobWrapper()
        job.type = 'PyURDME Ensemble'
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.outData = None  # This is where the data should be locally, when we get data from cloud, it must be put here
        job.modelName = pymodel.name
        job.resource = cloud_result['resource']
        job.cloudDatabaseID = taskid
        job.celeryPID = celery_task_id
        job.status = "Running"
        job.output_stored = "True"
        job.put()

        return job
Ejemplo n.º 4
0
    def post(self):
        reqType = self.request.get('reqType')
        self.response.content_type = 'application/json'

        if reqType == 'newJob':
            data = json.loads(self.request.get('data'))
            logging.debug('data =\n{}'.format(pprint.pformat(data)))
            job = db.GqlQuery(
                "SELECT * FROM SpatialJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            try:
                if data["resource"] == "local":
                    result = self.runLocal(data)
                elif data["resource"] == "cloud":
                    result = self.runCloud(data)
                elif data["resource"] == "qsub":
                    result = self.runQsubWrapper(data)
                else:
                    raise Exception("Unknown resource {0}".format(
                        data["resource"]))
                self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": result.key().id()
                    }))
                return
            except Exception as e:
                logging.exception(e)
                result = {'status': False, 'msg': 'Error: {0}'.format(e)}
                self.response.write(json.dumps(result))
                return

        elif reqType == 'stopJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.stop(self)
            except Exception as e:
                logging.execption(e)
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'delJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.delete(self)
            except Exception as e:
                logging.exception(e)
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'getDataCloud':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                service = backendservices(self.user_data)
                # Fetch
                service.fetchOutput(job)
                # Unpack
                os.system('tar -xf' + job.uuid + '.tar')
                # Record location
                job.outData = os.path.abspath(
                    os.path.dirname(__file__)) + '/../output/' + job.uuid
                # Clean up
                os.remove(job.uuid + '.tar')
                # Save the updated status
                job.put()
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        'status': True,
                        'msg': 'Job downloaded'
                    }))
                return
            except Exception as e:
                traceback.print_exc()
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'getDataLocal':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(
                    os.path.dirname(__file__) + '/../static/tmp/'),
                                             preferredName=job.name + "_")
                job.zipFileName = szip.getFileName()
                szip.addSpatialJob(job, True)
                szip.close()
                # Save the updated status
                job.put()
            relpath = '/' + os.path.relpath(
                job.zipFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return
        elif reqType == 'openJupyterNotebook' or reqType == 'redirectJupyterNotebook':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(
                    os.path.abspath(job.outData),
                    os.path.abspath(__file__ + '/../../../'))
                notebook_file_path = os.path.abspath(
                    job.outData) + "/" + notebook_filename
                notebook_template_path = os.path.abspath(
                    __file__ +
                    '/../../../jupyter_notebook_templates') + "/Spatial.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(
                        notebook_file_path, notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)

                if self.request.get('hostname') is not None:
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(
                    proto, host, port, local_path, notebook_filename)
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            'status': True,
                            'msg': 'Notebook ready',
                            'url': notebook_url
                        }))
                else:
                    self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            'status': False,
                            'msg': 'error:{0}'.format(e)
                        }))
                else:
                    self.response.write('Error: {0}'.format(e))
            return
        elif reqType == 'getVtkLocal':

            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(
                            os.path.join(root, file),
                            os.path.join(
                                prefix,
                                os.path.relpath(os.path.join(root, file),
                                                path)))

            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.vtkFileName:
                try:
                    tmpDir = None
                    indata = json.loads(job.indata)
                    tmpDir = tempfile.mkdtemp(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'))
                    for trajectory in range(indata["realizations"]):
                        resultFile = open(
                            str(job.outData +
                                '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()
                        for specie in result.model.listOfSpecies:
                            result.export_to_vtk(
                                specie,
                                os.path.join(
                                    tmpDir,
                                    "trajectory_{0}".format(trajectory),
                                    "species_{0}".format(specie)))

                    tmpFile = tempfile.NamedTemporaryFile(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix=job.name +
                                                          "_",
                                                          suffix='.zip',
                                                          delete=False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()
                    job.vtkFileName = tmpFile.name
                    tmpFile.close()
                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)

            relpath = '/' + os.path.relpath(
                job.vtkFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return
        elif reqType == 'getCsvLocal':

            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(
                            os.path.join(root, file),
                            os.path.join(
                                prefix,
                                os.path.relpath(os.path.join(root, file),
                                                path)))

            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = SpatialJobWrapper.get_by_id(jobID)

            if not job.csvFileName:
                try:
                    tmpDir = None

                    indata = json.loads(job.indata)

                    tmpDir = tempfile.mkdtemp(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'))

                    for trajectory in range(indata["realizations"]):
                        resultFile = open(
                            str(job.outData +
                                '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()

                        result.export_to_csv(
                            os.path.join(
                                tmpDir,
                                "trajectory_{0}".format(trajectory)).encode(
                                    'ascii', 'ignore'))

                    tmpFile = tempfile.NamedTemporaryFile(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix=job.name +
                                                          "_",
                                                          suffix='.zip',
                                                          delete=False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()

                    job.csvFileName = tmpFile.name

                    tmpFile.close()

                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)

            relpath = '/' + os.path.relpath(
                job.csvFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return

        self.response.write(
            json.dumps({
                'status':
                False,
                'msg':
                'Unknown Error processing request: no handler called'
            }))
Ejemplo n.º 5
0
    def get(self):
        logging.info('GET self.request.body = {}'.format(self.request.body))
        reqType = self.request.get('reqType')

        if reqType == 'getJobInfo':
            job = SpatialJobWrapper.get_by_id(int(self.request.get('id')))

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write({
                    "status": False,
                    "msg": "Not the right user"
                })

            result = {}
            stdout = ''
            stderr = ''
            complete = ''
            if job.outData is None:
                complete = 'yes'
            else:
                try:
                    fstdoutHandle = open(str(job.outData + '/stdout.log'), 'r')
                    stdout = fstdoutHandle.read()
                    fstdoutHandle.close()
                    fstderrHandle = open(str(job.outData + '/stderr.log'), 'r')
                    stderr = fstderrHandle.read()
                    fstderrHandle.close()
                    if os.path.exists("{0}/results/complete".format(
                            job.outData)):
                        complete = 'yes'
                except IOError as e:
                    traceback.print_exc()
                    result['status'] = False
                    result[
                        'msg'] = 'Error running the simulation: stdout/stderr outputs missing.'

            result.update({
                "id": int(self.request.get('id')),
                "jobStatus": job.status,
                "complete": complete,
                "resource": job.resource,
                "modelName": job.modelName,
                "outData": job.outData,
                "name": job.name,
                "uuid": job.cloudDatabaseID,
                "output_stored": job.output_stored,
                "stdout": stdout,
                "stderr": stderr,
                "indata": json.loads(job.indata)
            })

            logging.debug("result =\n\n{}".format(pprint.pformat(result)))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps(result))
            return
        elif reqType == 'getMeshData':
            try:
                job = SpatialJobWrapper.get_by_id(int(self.request.get('id')))

                data = json.loads(self.request.get('data'))

                logging.debug("data = {}".format(data))

                trajectory = data["trajectory"]
                timeIdx = data["timeIdx"]
                resultJS = {}

                #if not job.preprocessed or not os.path.exists(job.preprocessedDir):
                job.preprocess(trajectory)

                indir = job.preprocessedDir

                with open(os.path.join(indir, 'mesh.json'), 'r') as meshfile:
                    mesh = json.load(meshfile)

                with open(os.path.join(indir, 'voxelTuples.json'),
                          'r') as voxelTuplesFile:
                    voxelTuples = json.load(voxelTuplesFile)

                f = os.path.join(indir, 'result{0}'.format(trajectory))

                with h5py.File(f, 'r') as dataFile:
                    species = dataFile.keys()

                self.response.content_type = 'application/json'
                self.response.write(
                    json.dumps({
                        "mesh": mesh,
                        "voxelTuples": voxelTuples,
                        "species": species
                    }))

            except Exception as e:
                traceback.print_exc()
                result = {}
                result['status'] = False
                result['msg'] = 'Error: error fetching results {0}'.format(e)
                self.response.headers['Content-Type'] = 'application/json'

                self.response.write(json.dumps(result))
            return
        elif reqType == 'getTimeSeriesData':
            try:
                job = SpatialJobWrapper.get_by_id(int(self.request.get('id')))
                data = json.loads(self.request.get('data'))
                logging.debug(
                    'patial.get(onlyColorRange): data={0}'.format(data))
                trajectory = data["trajectory"]
                sTime = data["timeStart"]
                eTime = data["timeEnd"]

                #TODO: what is the right value here?
                if eTime is None:
                    eTime = 0
                dataType = "population" if "showPopulation" in data and data[
                    "showPopulation"] else "concentration"

                resultJS = {}

                if job.preprocessed is None or trajectory not in job.preprocessed or not os.path.exists(
                        job.preprocessedDir):
                    job.preprocess(trajectory)

                f = os.path.join(job.preprocessedDir,
                                 'result{0}'.format(trajectory))

                limits = {}

                logging.debug(
                    'Spatial.get(onlyColorRange): sTime={0} eTime={0}'.format(
                        sTime, eTime))

                with h5py.File(f, 'r') as dataFile:
                    dataTmp = {}
                    colorTmp = {}

                    for specie in dataFile.keys():
                        data2 = dataFile[specie][dataType][sTime:eTime + 1]

                        dataTmp[specie] = data2

                        limits[specie] = {
                            'min': dataFile[specie][dataType].attrs['min'],
                            'max': dataFile[specie][dataType].attrs['max']
                        }

                        cm.set_clim(dataFile[specie][dataType].attrs['min'],
                                    dataFile[specie][dataType].attrs['max'])
                        rgbas = cm.to_rgba(data2, bytes=True).astype('uint32')

                        rgbas = numpy.left_shift(
                            rgbas[:, :, 0], 16) + numpy.left_shift(
                                rgbas[:, :, 1], 8) + rgbas[:, :, 2]

                        #rgbaInts = numpy.zeros((rgbas.shape[0], rgbas.shape[1]))

                        #for i in range(rgbas.shape[0]):
                        #    for j in range(rgbas.shape[1]):
                        #        rgbaInts[i, j] = int('0x%02x%02x%02x' % tuple(rgbas[i, j][0:3]), 0)

                        colorTmp[specie] = []
                        for i in range(rgbas.shape[0]):
                            colorTmp[specie].append(
                                list(rgbas[i].astype('int')))

                    colors = {}
                    data = {}
                    for i in range(abs(eTime - sTime + 1)):
                        colors[sTime + i] = {}
                        data[sTime + i] = {}
                        for specie in dataFile.keys():
                            colors[sTime + i][specie] = colorTmp[specie][i]
                            data[sTime + i][specie] = list(dataTmp[specie][i])

                self.response.content_type = 'application/json'
                self.response.write(
                    json.dumps({
                        "colors": colors,
                        "raw": data,
                        "limits": limits
                    }))

            except Exception as e:
                traceback.print_exc()
                result = {}
                result['status'] = False
                result['msg'] = 'Error: error fetching results {0}'.format(e)
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(result))

            return

        self.render_response('spatial.html')
Ejemplo n.º 6
0
    def runCloud(self, data):
        service = backendservices(self.user_data)
        if not service.isOneOrMoreComputeNodesRunning():
            raise Exception('No cloud computing resources found')

        # If the seed is negative, this means choose a seed >= 0 randomly
        if int(data['seed']) < 0:
            random.seed()
            data['seed'] = random.randint(0, 2147483647)

        pymodel = self.construct_pyurdme_model(data)
        #logging.info('DATA: {0}'.format(data))
        #####
        cloud_params = {
            "job_type": "spatial",
            "simulation_algorithm" : data['algorithm'],
            "simulation_realizations" : data['realizations'],
            "simulation_seed" : data['seed'],
#            "bucketname" : self.user_data.getBucketName(),  #implys EC2, should be in backendservices
            "paramstring" : '',
        }

        logging.debug('cloud_params = {}'.format(pprint.pformat(cloud_params)))

        cloud_params['document'] = pickle.dumps(pymodel)
        #logging.debug('PYURDME: {0}'.format(cloud_params['document']))

        # Send the task to the backend
        cloud_result = service.submit_cloud_task(params=cloud_params)

        if not cloud_result["success"]:
            e = cloud_result["exception"]
            raise Exception("Cloud execution failed: {0}".format(e))
        
        celery_task_id = cloud_result["celery_pid"]
        taskid = cloud_result["db_id"]

        job = SpatialJobWrapper()
        job.type = 'PyURDME Ensemble'
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.outData = None  # This is where the data should be locally, when we get data from cloud, it must be put here
        job.modelName = pymodel.name
        job.resource = cloud_result['resource']
        job.cloudDatabaseID = taskid
        job.celeryPID = celery_task_id
        job.status = "Running"
        job.output_stored = "True"
        job.put()

        return job
Ejemplo n.º 7
0
    def get(self):
        logging.info('GET self.request.body = {}'.format(self.request.body))
        reqType = self.request.get('reqType')

        if reqType == 'getJobInfo':
            job = SpatialJobWrapper.get_by_id(int(self.request.get('id')))

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write({ "status" : False, "msg" : "Not the right user" })

            result = {}
            stdout = ''
            stderr = ''
            complete = ''
            if job.outData is None:
                complete = 'yes'
            else:
                try:
                    fstdoutHandle = open(str(job.outData + '/stdout.log'), 'r')
                    stdout = fstdoutHandle.read()
                    fstdoutHandle.close()
                    fstderrHandle = open(str(job.outData + '/stderr.log'), 'r')
                    stderr = fstderrHandle.read()
                    fstderrHandle.close()
                    if os.path.exists("{0}/results/complete".format(job.outData)):
                        complete = 'yes'
                except IOError as e:
                    traceback.print_exc()
                    result['status'] = False
                    result['msg'] = 'Error running the simulation: stdout/stderr outputs missing.'

            result.update({"id" : int(self.request.get('id')),
                           "jobStatus" : job.status,
                           "complete" : complete,
                           "resource" : job.resource,
                           "modelName" : job.modelName,
                           "outData" : job.outData,
                           "name" : job.name,
                           "uuid": job.cloudDatabaseID,
                           "output_stored": job.output_stored,
                           "stdout" : stdout,
                           "stderr" : stderr,
                           "indata" : json.loads(job.indata) })

            logging.debug("result =\n\n{}".format(pprint.pformat(result)))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps(result))
            return
        elif reqType == 'getMeshData':
            try:
                job = SpatialJobWrapper.get_by_id(int(self.request.get('id')))

                data = json.loads(self.request.get('data'))

                logging.debug("data = {}".format(data))

                trajectory = data["trajectory"]
                timeIdx = data["timeIdx"]                
                resultJS = {}

                #if not job.preprocessed or not os.path.exists(job.preprocessedDir):
                job.preprocess(trajectory)

                indir = job.preprocessedDir
                    
                with open(os.path.join(indir, 'mesh.json') ,'r') as meshfile:
                    mesh = json.load(meshfile)

                with open(os.path.join(indir, 'voxelTuples.json') ,'r') as voxelTuplesFile:
                    voxelTuples = json.load(voxelTuplesFile)

                f = os.path.join(indir, 'result{0}'.format(trajectory))
                
                with h5py.File(f, 'r') as dataFile:
                    species = dataFile.keys()

                self.response.content_type = 'application/json'
                self.response.write(json.dumps({ "mesh" : mesh, "voxelTuples" : voxelTuples, "species" : species }))
            
            except Exception as e:
                traceback.print_exc()
                result = {}
                result['status'] = False
                result['msg'] = 'Error: error fetching results {0}'.format(e)
                self.response.headers['Content-Type'] = 'application/json'

                self.response.write(json.dumps(result))
            return
        elif reqType == 'getTimeSeriesData':
            try:
                job = SpatialJobWrapper.get_by_id(int(self.request.get('id')))
                data = json.loads(self.request.get('data'))
                logging.debug('patial.get(onlyColorRange): data={0}'.format(data))
                trajectory = data["trajectory"]
                sTime= data["timeStart"]
                eTime = data["timeEnd"]

                #TODO: what is the right value here?
                if eTime is None:
                    eTime = 0
                dataType = "population" if "showPopulation" in data and data["showPopulation"] else "concentration"

                resultJS = {}

                if job.preprocessed is None or trajectory not in job.preprocessed or not os.path.exists(job.preprocessedDir):
                    job.preprocess(trajectory)

                f = os.path.join(job.preprocessedDir, 'result{0}'.format(trajectory))

                limits = {}

                logging.debug('Spatial.get(onlyColorRange): sTime={0} eTime={0}'.format(sTime,eTime))

                with h5py.File(f, 'r') as dataFile:
                    dataTmp = {}
                    colorTmp = {}

                    for specie in dataFile.keys():
                        data2 = dataFile[specie][dataType][sTime:eTime + 1]

                        dataTmp[specie] = data2
                        
                        limits[specie] = { 'min' : dataFile[specie][dataType].attrs['min'],
                                           'max' : dataFile[specie][dataType].attrs['max'] }

                        cm.set_clim(dataFile[specie][dataType].attrs['min'], dataFile[specie][dataType].attrs['max'])
                        rgbas = cm.to_rgba(data2, bytes = True).astype('uint32')

                        rgbas = numpy.left_shift(rgbas[:, :, 0], 16) + numpy.left_shift(rgbas[:, :, 1], 8) + rgbas[:, :, 2]
                        
                        #rgbaInts = numpy.zeros((rgbas.shape[0], rgbas.shape[1]))

                        #for i in range(rgbas.shape[0]):
                        #    for j in range(rgbas.shape[1]):
                        #        rgbaInts[i, j] = int('0x%02x%02x%02x' % tuple(rgbas[i, j][0:3]), 0)

                        colorTmp[specie] = []
                        for i in range(rgbas.shape[0]):
                            colorTmp[specie].append(list(rgbas[i].astype('int')))

                    colors = {}
                    data = {}
                    for i in range(abs(eTime - sTime + 1)):
                        colors[sTime + i] = {}
                        data[sTime + i] = {}
                        for specie in dataFile.keys():
                            colors[sTime + i][specie] = colorTmp[specie][i] 
                            data[sTime + i][specie] = list(dataTmp[specie][i])

                self.response.content_type = 'application/json'
                self.response.write(json.dumps( { "colors" : colors, "raw" : data, "limits" : limits } ))

            except Exception as e:
                traceback.print_exc()
                result = {}
                result['status'] = False
                result['msg'] = 'Error: error fetching results {0}'.format(e)
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(result))

            return
        
        self.render_response('spatial.html')
Ejemplo n.º 8
0
    def runLocal(self, data):
        ''' Run a PyURDME run using local compute recources. '''
        #####
        pymodel = self.construct_pyurdme_model(data)
        #####
        simulation_algorithm = data['algorithm'] # Don't trust this! I haven't implemented the algorithm selection for this yet
        simulation_realizations = data['realizations']

        # If the seed is negative, this means choose a seed >= 0 randomly
        if int(data['seed']) < 0:
            random.seed()
            data['seed'] = random.randint(0, 2147483647)

        simulation_seed = data['seed']
        #####

        path = os.path.abspath(os.path.dirname(__file__))

        basedir = path + '/../'
        dataDir = tempfile.mkdtemp(dir = basedir + 'output')

        job = SpatialJobWrapper()
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.outData = dataDir
        job.modelName = pymodel.name
        job.resource = "local"

        job.status = "Running"

        model_file_pkl = "{0}/model_file.pkl".format(dataDir)
        result_dir = "{0}/results/".format(dataDir)
        os.makedirs(result_dir)

        # searilize the model and write it to a file in the data dir
        with open(model_file_pkl, 'w') as fd:
            pickle.dump(pymodel, fd)

        cmd = "{0}/../../pyurdme/pyurdme_wrapper.py {1} {2} {3} {4} {5}".format(path, model_file_pkl, result_dir, simulation_algorithm, simulation_realizations, simulation_seed)
        logging.info("cmd =\n{}".format(cmd))
        exstring = '{0}/backend/wrapper.py {1}/stdout.log {1}/stderr.log {1}/return_code {2}'.format(basedir, dataDir, cmd)
        handle = subprocess.Popen(exstring, shell=True, preexec_fn=os.setsid)
        
        job.pid = int(handle.pid)
        job.put()
        return job
Ejemplo n.º 9
0
    def post(self):
        reqType = self.request.get('reqType')
        self.response.content_type = 'application/json'
        logging.error('spaital post reqType={0}'.format(reqType))

        if reqType == 'newJob':
            data = json.loads(self.request.get('data'))
            logging.debug('data =\n{}'.format(pprint.pformat(data)))
            job = db.GqlQuery("SELECT * FROM SpatialJobWrapper WHERE user_id = :1 AND name = :2",
                              self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Job name must be unique"}))
                return

            try:
                if data["resource"] == "local":
                    result = self.runLocal(data)
                elif data["resource"] == "cloud":
                    result = self.runCloud(data)
                else:
                    raise Exception("Unknown resource {0}".format(data["resource"]))
                self.response.write(json.dumps({"status" : True,
                                                "msg" : "Job launched",
                                                "id" : result.key().id()}))
                return
            except Exception as e:
                logging.exception(e)
                result = {'status':False,
                          'msg':'Error: {0}'.format(e)}
                self.response.write(json.dumps(result))
                return


        elif reqType == 'stopJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.stop(self)
            except Exception as e:
                logging.execption(e)
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Error: {0}".format(e)}))
                return

        elif reqType == 'delJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.delete(self)
            except Exception as e:
                logging.exception(e)
                self.response.write(json.dumps({"status" : False,
                                                    "msg" : "Error: {0}".format(e)}))
                return

        elif reqType == 'getDataCloud':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                service = backendservices(self.user_data)
                # Fetch
                service.fetchOutput(job)
                # Unpack
                os.system('tar -xf' +job.uuid+'.tar')
                # Record location
                job.outData = os.path.abspath(os.path.dirname(__file__))+'/../output/'+job.uuid
                # Clean up
                os.remove(job.uuid+'.tar')
                # Save the updated status
                job.put()
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ 'status' : True,
                                                 'msg' : 'Job downloaded'}))
                return
            except Exception as e:
                traceback.print_exc()
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Error: {0}".format(e)}))
                return

        elif reqType == 'getDataLocal':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'), preferredName = job.name + "_")
                job.zipFileName = szip.getFileName()
                szip.addSpatialJob(job, True)
                szip.close()
                # Save the updated status
                job.put()
            relpath = '/' + os.path.relpath(job.zipFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return
        elif reqType == 'openJupyterNotebook' or reqType == 'redirectJupyterNotebook':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(os.path.abspath(job.outData), os.path.abspath(__file__+'/../../../'))
                notebook_file_path =  os.path.abspath(job.outData) + "/" + notebook_filename
                notebook_template_path = os.path.abspath(__file__+'/../../../jupyter_notebook_templates')+"/Spatial.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(notebook_file_path,notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)


                if self.request.get('hostname') is not None:
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(proto,host,port,local_path,notebook_filename)
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(json.dumps({ 'status' : True,
                                                     'msg' : 'Notebook ready',
                                                     'url' : notebook_url }))
                else:
                    self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(json.dumps({ 'status' : False,
                                                     'msg' : 'error:{0}'.format(e) }))
                else:
                    self.response.write('Error: {0}'.format(e))
            return    
        elif reqType == 'getVtkLocal':
            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(os.path.join(root, file), os.path.join(prefix, os.path.relpath(os.path.join(root, file), path)))

            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.vtkFileName:
                try:
                    tmpDir = None
                    indata = json.loads(job.indata)
                    tmpDir = tempfile.mkdtemp(dir = os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'))
                    for trajectory in range(indata["realizations"]):
                        resultFile = open(str(job.outData + '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()
                        for specie in result.model.listOfSpecies:
                            result.export_to_vtk(specie, os.path.join(tmpDir, "trajectory_{0}".format(trajectory), "species_{0}".format(specie)))

                    tmpFile = tempfile.NamedTemporaryFile(dir = os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix = job.name + "_",
                                                          suffix = '.zip', delete = False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()
                    job.vtkFileName = tmpFile.name
                    tmpFile.close()
                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)
            
            relpath = '/' + os.path.relpath(job.vtkFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return
        elif reqType == 'getCsvLocal':
            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(os.path.join(root, file), os.path.join(prefix, os.path.relpath(os.path.join(root, file), path)))

            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = SpatialJobWrapper.get_by_id(jobID)

            if not job.csvFileName:
                try:
                    tmpDir = None

                    indata = json.loads(job.indata)

                    tmpDir = tempfile.mkdtemp(dir=os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'))

                    for trajectory in range(indata["realizations"]):
                        resultFile = open(str(job.outData + '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()

                        result.export_to_csv(os.path.join(tmpDir, "trajectory_{0}".format(trajectory)).encode('ascii', 'ignore'))

                    tmpFile = tempfile.NamedTemporaryFile(dir = os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix = job.name + "_",
                                                          suffix = '.zip', delete = False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()

                    job.csvFileName = tmpFile.name
                    
                    tmpFile.close()

                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)
            
            relpath = '/' + os.path.relpath(job.csvFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return


        self.response.write(json.dumps({ 'status' : False,
                                         'msg' : 'Unknown Error processing request: no handler called'}))
Ejemplo n.º 10
0
    def runQsub(self, data, cluster_info):
        logging.error("*" * 80)
        logging.error("simulate.runQsub() modelType={0}".format(data['execType']))
        logging.error("*" * 80)

        modelDb = StochKitModelWrapper.get_by_id(int(data["id"]))
        path = os.path.abspath(os.path.dirname(__file__))
        basedir = path + '/../'
        dataDir = tempfile.mkdtemp(dir=basedir + 'output')
        job = SpatialJobWrapper()
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.modelName = modelDb.name
        job.outData = dataDir
        job.status = "Pending"
        job.output_stored = "False"
        job.is_spatial = True

        try:
            templateData = {
                "name": modelDb.name,
                "modelType": modelDb.type,
                "species": modelDb.species,
                "parameters": modelDb.parameters,
                "reactions": modelDb.reactions,
                # "speciesSelect": data['speciesSelect'],
                "speciesSelect": data['selections'],
                # "maxTime": data['maxTime'],
                "maxTime": data['time'],
                "increment": data['increment'],
                # "trajectories": data['trajectories'],
                "trajectories": data['realizations'],
                "seed": data['seed'],
                "isSpatial": modelDb.isSpatial,
                "isLocal": True
            }

            if modelDb.isSpatial:
                try:
                    meshWrapperDb = mesheditor.MeshWrapper.get_by_id(modelDb.spatial["mesh_wrapper_id"])
                except Exception as e:
                    logging.exception(e)
                    logging.error("No Mesh file set. Choose one in the Mesh tab of the Model Editor")
                    raise Exception("No Mesh file set. Choose one in the Mesh tab of the Model Editor")
                try:
                    meshFileObj = fileserver.FileManager.getFile(self, meshWrapperDb.meshFileId, noFile=False)
                    templateData["mesh"] = meshFileObj["data"]
                except IOError as e:
                    logging.exception(e)
                    logging.error("Mesh file inaccessible. Try another mesh")
                    raise Exception("Mesh file inaccessible. Try another mesh")

                templateData['reaction_subdomain_assignments'] = modelDb.spatial["reactions_subdomain_assignments"]
                templateData['species_subdomain_assignments'] = modelDb.spatial["species_subdomain_assignments"]
                templateData['species_diffusion_coefficients'] = modelDb.spatial["species_diffusion_coefficients"]
                templateData['initial_conditions'] = modelDb.spatial["initial_conditions"]
                templateData['subdomains'] = meshWrapperDb.subdomains

            if data['execType'] == "stochastic":
                job.qsubHandle = pickle.dumps(parametersweep_qsub.stochastic(templateData, cluster_info,
                                                                             not_full_parameter_sweep=True))
            elif data['execType'] == "deterministic":
                job.qsubHandle = pickle.dumps(parametersweep_qsub.deterministic(templateData, cluster_info,
                                                                                not_full_parameter_sweep=True))
            elif data['execType'] == "spatial":
                job.qsubHandle = pickle.dumps(parametersweep_qsub.spatial(templateData, cluster_info,
                                                                          not_full_parameter_sweep=True))#
            else:
                raise Exception("Trying to runQsub on unsupported modelType {0}".format(data['modelType']))

            job.resource = "qsub"
            job.put()
        except Exception as e:
            logging.exception(e)
            job.status = 'Failed'
            #job.delete(self)
            raise

        return job