Esempio n. 1
0
def importExamplePublicModels(handler):
    try:
        path = os.path.abspath(os.path.dirname(__file__))
        szip = exportimport.SuperZip(zipFileName = path + "/../../examples/examples.zip")
    
        toImport = {}
        for name in szip.zipfb.namelist():
            if re.search('models/[a-zA-Z0-9\-_]*\.json$', name):
                toImport[json.loads(szip.zipfb.read(name))['name']] = name

        names = [model['name'] for model in ModelManager.getModels(handler, public = True)]

        for name in set(toImport.keys()) - set(names):
            path = toImport[name]
            modelDb = szip.extractStochKitModel(path, "", handler, rename = True)
            modelDb.user_id = ""
            modelDb.name = name
            modelDb.is_public = True
            modelDb.put()
        
            if modelDb.isSpatial:
                meshDb = mesheditor.MeshWrapper.get_by_id(modelDb.spatial["mesh_wrapper_id"])
                #meshDb.undeletable = True
                meshDb.put()

        szip.close()
    except:
        traceback.print_exc()
        print "ERROR: Failed to import example public models"
Esempio n. 2
0
    def get(self):
        if self.request.get('reqType') == 'exportToZip':
            modelId = int(self.request.get('id'));

            model = StochKitModelWrapper.get_by_id(modelId)
            
            try:
                if model.zipFileName:
                    if os.path.exists(model.zipFileName):
                        os.remove(model.zipFileName)

                szip = exportimport.SuperZip(os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'), preferredName = model.name + "_")
                
                model.zipFileName = szip.getFileName()
                
                szip.addStochKitModel(model)
                
                szip.close()
                
                # Save the updated status
                model.put()
                
                relpath = '/' + os.path.relpath(model.zipFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))
                
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ 'status' : True,
                                                 'msg' : 'Model prepared',
                                                 'url' : relpath }))
            except Exception as e:
                traceback.print_exc()
                result = {}
                result['status'] = False
                result['msg'] = 'Error: {0}'.format(e)
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(result))

            return

        #mesheditor.setupMeshes(self)

        self.render_response('modelEditor.html')
Esempio n. 3
0
    def post(self):
        reqType = self.request.get('reqType')
        self.response.content_type = 'application/json'

        if reqType == 'newJob':
            data = json.loads(self.request.get('data'))
            logging.debug('data =\n{}'.format(pprint.pformat(data)))
            job = db.GqlQuery(
                "SELECT * FROM SpatialJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            try:
                if data["resource"] == "local":
                    result = self.runLocal(data)
                elif data["resource"] == "cloud":
                    result = self.runCloud(data)
                elif data["resource"] == "qsub":
                    result = self.runQsubWrapper(data)
                else:
                    raise Exception("Unknown resource {0}".format(
                        data["resource"]))
                self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": result.key().id()
                    }))
                return
            except Exception as e:
                logging.exception(e)
                result = {'status': False, 'msg': 'Error: {0}'.format(e)}
                self.response.write(json.dumps(result))
                return

        elif reqType == 'stopJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.stop(self)
            except Exception as e:
                logging.execption(e)
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'delJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.delete(self)
            except Exception as e:
                logging.exception(e)
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'getDataCloud':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                service = backendservices(self.user_data)
                # Fetch
                service.fetchOutput(job)
                # Unpack
                os.system('tar -xf' + job.uuid + '.tar')
                # Record location
                job.outData = os.path.abspath(
                    os.path.dirname(__file__)) + '/../output/' + job.uuid
                # Clean up
                os.remove(job.uuid + '.tar')
                # Save the updated status
                job.put()
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        'status': True,
                        'msg': 'Job downloaded'
                    }))
                return
            except Exception as e:
                traceback.print_exc()
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'getDataLocal':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(
                    os.path.dirname(__file__) + '/../static/tmp/'),
                                             preferredName=job.name + "_")
                job.zipFileName = szip.getFileName()
                szip.addSpatialJob(job, True)
                szip.close()
                # Save the updated status
                job.put()
            relpath = '/' + os.path.relpath(
                job.zipFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return
        elif reqType == 'openJupyterNotebook' or reqType == 'redirectJupyterNotebook':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(
                    os.path.abspath(job.outData),
                    os.path.abspath(__file__ + '/../../../'))
                notebook_file_path = os.path.abspath(
                    job.outData) + "/" + notebook_filename
                notebook_template_path = os.path.abspath(
                    __file__ +
                    '/../../../jupyter_notebook_templates') + "/Spatial.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(
                        notebook_file_path, notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)

                if self.request.get('hostname') is not None:
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(
                    proto, host, port, local_path, notebook_filename)
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            'status': True,
                            'msg': 'Notebook ready',
                            'url': notebook_url
                        }))
                else:
                    self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            'status': False,
                            'msg': 'error:{0}'.format(e)
                        }))
                else:
                    self.response.write('Error: {0}'.format(e))
            return
        elif reqType == 'getVtkLocal':

            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(
                            os.path.join(root, file),
                            os.path.join(
                                prefix,
                                os.path.relpath(os.path.join(root, file),
                                                path)))

            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.vtkFileName:
                try:
                    tmpDir = None
                    indata = json.loads(job.indata)
                    tmpDir = tempfile.mkdtemp(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'))
                    for trajectory in range(indata["realizations"]):
                        resultFile = open(
                            str(job.outData +
                                '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()
                        for specie in result.model.listOfSpecies:
                            result.export_to_vtk(
                                specie,
                                os.path.join(
                                    tmpDir,
                                    "trajectory_{0}".format(trajectory),
                                    "species_{0}".format(specie)))

                    tmpFile = tempfile.NamedTemporaryFile(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix=job.name +
                                                          "_",
                                                          suffix='.zip',
                                                          delete=False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()
                    job.vtkFileName = tmpFile.name
                    tmpFile.close()
                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)

            relpath = '/' + os.path.relpath(
                job.vtkFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return
        elif reqType == 'getCsvLocal':

            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(
                            os.path.join(root, file),
                            os.path.join(
                                prefix,
                                os.path.relpath(os.path.join(root, file),
                                                path)))

            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = SpatialJobWrapper.get_by_id(jobID)

            if not job.csvFileName:
                try:
                    tmpDir = None

                    indata = json.loads(job.indata)

                    tmpDir = tempfile.mkdtemp(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'))

                    for trajectory in range(indata["realizations"]):
                        resultFile = open(
                            str(job.outData +
                                '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()

                        result.export_to_csv(
                            os.path.join(
                                tmpDir,
                                "trajectory_{0}".format(trajectory)).encode(
                                    'ascii', 'ignore'))

                    tmpFile = tempfile.NamedTemporaryFile(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix=job.name +
                                                          "_",
                                                          suffix='.zip',
                                                          delete=False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()

                    job.csvFileName = tmpFile.name

                    tmpFile.close()

                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)

            relpath = '/' + os.path.relpath(
                job.csvFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return

        self.response.write(
            json.dumps({
                'status':
                False,
                'msg':
                'Unknown Error processing request: no handler called'
            }))
Esempio n. 4
0
    def post(self):
        reqType = self.request.get('reqType')
        self.response.content_type = 'application/json'

        if reqType == 'newJob':
            data = json.loads(self.request.get('data'))

            job = db.GqlQuery("SELECT * FROM StochOptimJobWrapper WHERE user_id = :1 AND name = :2",
                              self.user.user_id(),
                              data["jobName"].strip()).get()

            if job != None:
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Job name must be unique"}))
                return

            try:
                if data["resource"] == "local":
                    # This function takes full responsibility for writing responses out to the world. This is probably a bad design mechanism
                    result = self.runLocal(data)
                else:
                    # cloud
                    result = self.runCloud(data=data)

                return self.response.write(json.dumps({
                    "status": True,
                    "msg": "Job launched",
                    "id": result.key().id()
                }))
            except Exception as e:
                logging.exception(e)
                result = {'status':False,
                          'msg':'Error: {0}'.format(e)}
                self.response.write(json.dumps(result))
                return

        elif reqType == 'stopJob':
            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = StochOptimJobWrapper.get_by_id(jobID)

            if job.user_id == self.user.user_id():
                if job.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
#                    try:
#                        logging.info("Stopping StochOptim poll task pid={0}".format(job.pollProcessPID))
#                        os.kill(job.pollProcessPID, signal.SIGTERM)
#                    except Exception as e:
#                        logging.error("StochOptimPage.post.stopJob(): exception during kill process: {0}".format(e))
                    success = job.stop(self)
                    if not success:
                        return self.response.write(json.dumps({
                            'status': False,
                            'msg': 'Could not stop the job '+job.name +'. Unexpected error.'
                        }))
                else:
                    job.stop(self)
            else:
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "No permissions to delete this job (this should never happen)"}))
                return
        elif reqType == 'delJob':
            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = StochOptimJobWrapper.get_by_id(jobID)

            if job.user_id == self.user.user_id():
                job.delete(self)
            else:
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "No permissions to delete this job (this should never happen)"}))
                return
        elif reqType == 'getDataLocal':
            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = StochOptimJobWrapper.get_by_id(jobID)

            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'), preferredName = job.name + "_")
                
                job.zipFileName = szip.getFileName()

                szip.addStochOptimJob(job, True)
                
                szip.close()

                # Save the updated status
                job.put()
            
            relpath = '/' + os.path.relpath(job.zipFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job prepared',
                                             'url' : relpath }))
            return


        self.response.write(json.dumps({ 'status' : True,
                                         'msg' : 'Success'}))
Esempio n. 5
0
    def post(self):
        reqType = self.request.get('reqType')
        self.response.content_type = 'application/json'

        if reqType == 'newJob':
            # Run via Molns cloud
            data = json.loads(self.request.get('data'))

            self.user_data.set_selected(2)

            job = db.GqlQuery(
                "SELECT * FROM ParameterSweepJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            try:
                result = self.runMolns(data=data)

                return self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": result.key().id()
                    }))
            except Exception as e:
                logging.exception(e)
                result = {'status': False, 'msg': 'Error: {0}'.format(e)}
                self.response.write(json.dumps(result))
                return
        elif reqType == 'newJobLocal':
            logging.error("*" * 80)
            logging.error("parametersweep.newJobLocal")
            logging.error("*" * 80)
            data = json.loads(self.request.get('data'))

            self.user_data.set_selected(0)

            job = db.GqlQuery(
                "SELECT * FROM ParameterSweepJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                logging.error(
                    "parametersweep.newJobLocal: error: Job name must be unique"
                )
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            try:
                result = self.runLocal(data=data)

                return self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": result.key().id()
                    }))
            except Exception as e:
                logging.exception(e)
                result = {'status': False, 'msg': 'Error: {0}'.format(e)}
                self.response.write(json.dumps(result))
                return
        elif reqType == 'newJobQsub':
            logging.error("*" * 80)
            logging.error("parametersweep.newJobQsub")
            logging.error("*" * 80)
            data = json.loads(self.request.get('data'))

            # cluster_node_info = self.user_data.get_cluster_node_info()[0]
            # files = fileserver.FileManager.getFiles(self, 'clusterKeyFiles')
            # cluster_ssh_key_info = {f['id']: {'id': f['id'], 'keyname': f['path']} for f in files}

            cluster_info = dict()
            received_cluster_info = json.loads(
                self.request.get('cluster_info'))
            cluster_info['ip_address'] = received_cluster_info['ip']
            cluster_info['username'] = received_cluster_info['username']
            cluster_info['ssh_key'] = fileserver.FileWrapper.get_by_id(
                received_cluster_info['key_file_id']).storePath

            self.user_data.set_selected(received_cluster_info['uuid'])

            #logging.info("PARAMETER_SWEEP_CLUSTER_INFO = {0}".format(cluster_info))
            #cluster_info = json.loads(self.request.get('cluster_info'))

            job = db.GqlQuery(
                "SELECT * FROM ParameterSweepJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                logging.error(
                    "parametersweep.newJobQsub: error: Job name must be unique"
                )
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            try:
                result = self.runQsub(data=data, cluster_info=cluster_info)

                return self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": result.key().id()
                    }))
            except Exception as e:
                logging.exception(e)
                result = {'status': False, 'msg': 'Error: {0}'.format(e)}
                self.response.write(json.dumps(result))
                return

        elif reqType == 'delJob':
            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = ParameterSweepJobWrapper.get_by_id(jobID)

            if job.user_id == self.user.user_id():
                job.delete(self)
            else:
                self.response.write(
                    json.dumps({
                        "status":
                        False,
                        "msg":
                        "No permissions to delete this job (this should never happen)"
                    }))
                return

        elif reqType == 'getDataCloud':
            try:
                jobID = json.loads(self.request.get('id'))
                job = ParameterSweepJobWrapper.get_by_id(int(jobID))

                molnsConfigDb = db.GqlQuery(
                    "SELECT * FROM MolnsConfigWrapper WHERE user_id = :1",
                    self.user.user_id()).get()

                if not molnsConfigDb:
                    return

                molnsConfig = molns.MOLNSConfig(
                    config_dir=molnsConfigDb.folder)
                try:
                    log = molns.MOLNSExec.job_logs([job.molnsPID], molnsConfig)
                    with open(os.path.join(job.outData, 'stdout'), 'w') as f:
                        f.write(log['msg'])
                    molns.MOLNSExec.fetch_job_results([
                        job.molnsPID, "results",
                        os.path.join(job.outData, 'results')
                    ], molnsConfig)
                    job.output_stored = True
                except (IOError, molns.MOLNSException) as e:
                    logging.info('Could not fetch results: {0}'.format(e))

                # Save the updated status
                job.put()
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        'status': True,
                        'msg': 'Job downloaded'
                    }))
                return
            except Exception as e:
                traceback.print_exc()
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'getDataLocal':
            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = ParameterSweepJobWrapper.get_by_id(jobID)

            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(
                    os.path.dirname(__file__) + '/../static/tmp/'),
                                             preferredName=job.name + "_")

                job.zipFileName = szip.getFileName()

                szip.addParameterSweepJob(job, True)

                szip.close()

                # Save the updated status
                job.put()

            relpath = '/' + os.path.relpath(
                job.zipFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job prepared',
                    'url': relpath
                }))
            return

        self.response.write(json.dumps({'status': True, 'msg': 'Success'}))
Esempio n. 6
0
    def post(self):
        reqType = self.request.get('reqType')

        if reqType == "jobInfo":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            jsonJob = {
                "id": int(self.request.get('id')),
                "userId": job.user_id,
                "jobName": job.name,
                "startTime": job.startTime,
                "indata": json.loads(job.indata),
                "outData": job.outData,
                "status": job.status,
                "resource": job.resource,
                "uuid": job.cloudDatabaseID,
                "output_stored": job.output_stored,
                "modelName": job.modelName
            }

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))

            if job.status == "Finished":
                if job.resource in backendservices.SUPPORTED_CLOUD_RESOURCES and job.outData is None:
                    # Let the user decide if they want to download it
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            "status": "Finished",
                            "values": [],
                            "job": jsonJob
                        }))
                    return
                outputdir = job.outData
                try:
                    # Load all data from file in JSON format
                    vhandle = open(outputdir + '/result/output.txt', 'r')
                    values = {
                        'time': [],
                        'trajectories': {},
                        'sensitivities': {},
                        'parameters': {}
                    }
                    parameters = []
                    columnToList = []
                    for i, line in enumerate(vhandle):
                        if i == 0:
                            continue
                        elif i == 1:
                            names = line.split()

                            parameterNames = []

                            for name in names:
                                if ':' in name:
                                    specie, parameter = name.split(':')
                                    if parameter not in parameterNames:
                                        parameterNames.append(parameter)

                            for name in names:
                                if name == 'time':
                                    columnToList.append(values['time'])
                                elif ':' in name:
                                    specie, parameter = name.split(':')

                                    if specie not in values['sensitivities']:
                                        values['sensitivities'][specie] = {}

                                    values[
                                        'sensitivities'][specie][parameter] = [
                                        ]  # Make a new timeseries for sensitivity
                                    columnToList.append(
                                        values['sensitivities'][specie]
                                        [parameter]
                                    )  # Store a reference here for future use
                                else:
                                    values['trajectories'][name] = [
                                    ]  # start a new timeseries for this name
                                    columnToList.append(
                                        values['trajectories'][name]
                                    )  # Store a reference here for future use
                        elif i == 2:
                            parameters = map(float, line.split())
                        elif i == 3:
                            for storage, value in zip(columnToList,
                                                      map(float,
                                                          line.split())):
                                storage.append(value)
                        elif i == 4:
                            continue
                        else:
                            for storage, value in zip(columnToList,
                                                      map(float,
                                                          line.split())):
                                storage.append(value)
                    vhandle.close()

                    values['parameters'] = dict(zip(parameterNames,
                                                    parameters))

                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            "status": "Finished",
                            "values": values,
                            "job": jsonJob
                        }))
                    return
                except IOError as ioe:
                    logging.error("caught error {0}".format(ioe))
                    job.status = "Failed"
                    logging.error("put job.status = Failed")
                    job.put()

            if job.status == "Failed":
                self.response.headers['Content-Type'] = 'application/json'

                stdout = ''
                stderr = ''
                try:
                    fstdoutHandle = open(job.outData + '/stdout.log', 'r')
                    stdout = fstdoutHandle.read()
                    fstdoutHandle.close()
                    fstderrHandle = open(job.outData + '/stderr.log', 'r')
                    stderr = fstderrHandle.read()
                    fstderrHandle.close()
                except IOError as ioe:
                    logging.error(
                        "could not open error log files in {0}".format(
                            job.outData))

                self.response.write(
                    json.dumps({
                        "status": "Failed",
                        "stdout": stdout,
                        "stderr": stderr,
                        "job": jsonJob
                    }))
                return

            # else
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({"status": "asdfasdf"}))
        elif reqType == "getFromCloud":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            service = backendservices(self.user_data)
            service.fetchOutput(job)
            # Unpack it to its local output location
            os.system('tar -xf' + job.cloudDatabaseID + '.tar')
            job.outData = os.path.dirname(os.path.abspath(
                __file__)) + '/../output/' + job.cloudDatabaseID
            job.outData = os.path.abspath(job.outData)
            # jsonJob["outData"] = job.outData
            # Clean up
            os.remove(job.cloudDatabaseID + '.tar')
            # Update the db entry
            job.put()

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded'
                }))
            return
        elif reqType == 'redirectJupyterNotebook':
            try:
                job = SensitivityJobWrapper.get_by_id(
                    int(self.request.get('id')))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(
                    os.path.abspath(job.outData),
                    os.path.abspath(__file__ + '/../../../'))
                notebook_file_path = os.path.abspath(
                    job.outData) + "/" + notebook_filename
                notebook_template_path = os.path.abspath(
                    __file__ + '/../../../jupyter_notebook_templates'
                ) + "/Sensitivity.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(
                        notebook_file_path, notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)

                if self.request.get('hostname') is not None:
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(
                    proto, host, port, local_path, notebook_filename)
                self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                self.response.write('Error: {0}'.format(e))
            return
        elif reqType == "getLocalData":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(
                    os.path.dirname(__file__) + '/../static/tmp/'),
                                             preferredName=job.name + "_")

                job.zipFileName = szip.getFileName()

                szip.addSensitivityJob(job, globalOp=True, ignoreStatus=True)

                szip.close()

                # Save the updated status
                job.put()

            relpath = os.path.relpath(
                job.zipFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return

        elif reqType == "delJob":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))
                return

            job.delete(self)
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    "status": True,
                    "msg": "Job deleted"
                }))

        elif reqType == "newJob":
            data = json.loads(self.request.get('data'))

            job = db.GqlQuery(
                "SELECT * FROM SensitivityJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            try:
                # Either local or cloud
                if data["resource"] == "local":
                    job = self.runLocal(data)

                elif data["resource"] == "cloud":
                    job = self.runCloud(data)
                else:
                    raise Exception("Unknown resource {0}".format(
                        data["resource"]))

                self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": job.key().id()
                    }))
                return
            except Exception as e:
                logging.exception(e)
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return
        else:
            self.response.write(
                json.dumps({
                    "status": False,
                    "msg": "No data submitted"
                }))
Esempio n. 7
0
    def post(self):
        """ Assemble the input to StochKit2 and submit the job (locally or via cloud). """

        reqType = self.request.get('reqType')

        if reqType == 'getFromCloud':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

            service = backendservices(self.user_data)
            service.fetchOutput(job)

            # Unpack it to its local output location
            os.system('tar -xf {0}.tar'.format(job.cloudDatabaseID))
            job.outData = os.path.abspath('{0}/../output/{1}'.format(
                os.path.abspath(os.path.dirname(__file__)),
                job.cloudDatabaseID))

            job.stdout = os.path.join(job.outData, '/stdout.log')
            job.stderr = os.path.join(job.outData, '/stderr.log')

            # Clean up
            os.remove('{0}.tar'.format(job.cloudDatabaseID))

            # Save the updated status
            job.put()

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded'
                }))
            return
        elif reqType == 'getDataLocal':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(
                    os.path.dirname(__file__) + '/../static/tmp/'),
                                             preferredName=job.name + "_")

                job.zipFileName = szip.getFileName()

                szip.addStochKitJob(job, globalOp=True, ignoreStatus=True)

                szip.close()

                # Save the updated status
                job.put()

            relpath = os.path.relpath(
                job.zipFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return
        elif reqType == 'delJob':
            try:
                job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

                if job.user_id == self.user.user_id():
                    job.delete(self)

                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        'status': True,
                        'msg': "Job deleted from the datastore."
                    }))
            except Exception as e:
                logging.exception(e)
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        'status': False,
                        'msg': "Error: {0}".format(e)
                    }))

            return
        elif reqType == 'redirectJupyterNotebook':
            try:
                job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(
                    os.path.abspath(job.outData),
                    os.path.abspath(__file__ + '/../../../'))
                notebook_file_path = os.path.abspath(
                    job.outData) + "/" + notebook_filename
                # TODO Deterministic or Stochastic template
                indata = json.loads(job.indata)
                if indata['exec_type'] == 'deterministic':
                    notebook_template_path = os.path.abspath(
                        __file__ + '/../../../jupyter_notebook_templates'
                    ) + "/Deterministic.ipynb"
                else:
                    notebook_template_path = os.path.abspath(
                        __file__ + '/../../../jupyter_notebook_templates'
                    ) + "/Stochastic.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(
                        notebook_file_path, notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)

                if self.request.get('hostname') is not None:
                    logging.info('hostname={0}'.format(
                        self.request.get('hostname')))
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(
                    proto, host, port, local_path, notebook_filename)
                logging.info('redirect: {0}'.format(notebook_url))
                self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                self.response.write('Error: {0}'.format(e))
            return
        elif reqType == 'jobInfo':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))
            indata = json.loads(job.indata)

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))

            if job.status == "Finished":
                try:
                    if (job.resource
                            in backendservices.SUPPORTED_CLOUD_RESOURCES
                            and job.output_stored == 'False') or (
                                job.resource
                                in backendservices.SUPPORTED_CLOUD_RESOURCES
                                and job.outData is None):
                        self.response.headers[
                            'Content-Type'] = 'application/json'
                        self.response.write(
                            json.dumps({
                                "status":
                                "Finished",
                                "values": [],
                                "job":
                                JobManager.getJob(self,
                                                  job.key().id())
                            }))
                        return
                    else:
                        outputdir = job.outData
                        # Load all data from file in JSON format
                        if indata['exec_type'] == 'stochastic':
                            tid = self.request.get('tid')

                            if tid != '' and tid != 'mean':
                                outfile = '/result/trajectories/trajectory{0}.txt'.format(
                                    tid)

                                vhandle = open(outputdir + outfile, 'r')

                                values = {'time': [], 'trajectories': {}}
                                columnToList = []
                                for i, line in enumerate(vhandle):
                                    if i == 0:
                                        names = line.split()
                                        for name in names:
                                            if name == 'time':
                                                columnToList.append(
                                                    values['time'])
                                            else:
                                                values['trajectories'][name] = [
                                                ]  # start a new timeseries for this name
                                                columnToList.append(
                                                    values['trajectories']
                                                    [name]
                                                )  # Store a reference here for future use
                                    else:
                                        for storage, value in zip(
                                                columnToList,
                                                map(float, line.split())):
                                            storage.append(value)
                                vhandle.close()
                            else:
                                outfile = '/result/stats/means.txt'

                                vhandle = open(outputdir + outfile, 'r')

                                values = {'time': [], 'trajectories': {}}
                                columnToList = []
                                for i, line in enumerate(vhandle):
                                    if i == 0:
                                        names = line.split()
                                        for name in names:
                                            if name == 'time':
                                                columnToList.append(
                                                    values['time'])
                                            else:
                                                values['trajectories'][name] = [
                                                ]  # start a new timeseries for this name
                                                columnToList.append(
                                                    values['trajectories']
                                                    [name]
                                                )  # Store a reference here for future use
                                    else:
                                        for storage, value in zip(
                                                columnToList,
                                                map(float, line.split())):
                                            storage.append(value)
                                vhandle.close()
                        else:
                            outfile = '/result/output.txt'
                            values = {'time': [], 'trajectories': {}}

                            #if not os.path.isfile(outputdir + outfile):

                            vhandle = open(outputdir + outfile, 'r')

                            columnToList = []
                            for i, line in enumerate(vhandle):
                                if i == 0:
                                    continue
                                elif i == 1:
                                    names = line.split()
                                    for name in names:
                                        if name == 'time':
                                            columnToList.append(values['time'])
                                        else:
                                            values['trajectories'][name] = [
                                            ]  # start a new timeseries for this name
                                            columnToList.append(
                                                values['trajectories'][name]
                                            )  # Store a reference here for future use
                                elif i == 2:
                                    continue
                                elif i == 3:
                                    for storage, value in zip(
                                            columnToList,
                                            map(float, line.split())):
                                        storage.append(value)
                                elif i == 4:
                                    continue
                                else:
                                    for storage, value in zip(
                                            columnToList,
                                            map(float, line.split())):
                                        storage.append(value)
                            vhandle.close()

                    self.response.headers['Content-Type'] = 'application/json'
                    result = {
                        "status": "Finished",
                        "values": values,
                        "job": JobManager.getJob(self,
                                                 job.key().id())
                    }
                    logging.debug("result = \n\n{}".format(result))
                    self.response.write(json.dumps(result))
                    return

                except Exception as e:
                    traceback.print_exc()
                    job.status = "Failed"
                    job.put()
                    logging.error(
                        "Failed to parse output data. Assuming job failed and continuing"
                    )

            if job.status == "Failed":
                self.response.headers['Content-Type'] = 'application/json'

                stdout = ""
                stderr = ""

                if job.outData is not None:
                    if os.path.isfile(job.outData + '/stdout'):
                        fstdoutHandle = open(job.outData + '/stdout', 'r')
                    else:
                        fstdoutHandle = open(job.outData + '/stdout.log', 'r')
                    stdout = fstdoutHandle.read()
                    fstdoutHandle.close()

                    if os.path.isfile(job.outData + '/stderr'):
                        fstderrHandle = open(job.outData + '/stderr', 'r')
                    else:
                        fstderrHandle = open(job.outData + '/stderr.log', 'r')
                    stderr = fstderrHandle.read()
                    fstderrHandle.close()

                self.response.write(
                    json.dumps({
                        "status": "Failed",
                        "job": JobManager.getJob(self,
                                                 job.key().id()),
                        "stdout": stdout,
                        "stderr": stderr
                    }))
            else:
                raise Exception(
                    'This page should never be accessed if job is not Finished or Running (current state of job {0} : {1})'
                    .format(job.id, job.status))
                traceback.print_exc()

                print job.status

                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({"status": "tttttttttasdfasfdfdsa"}))
        else:
            # Params is a dict that constains all response elements of the form
            params = json.loads(self.request.get('data'))

            self.response.headers['Content-Type'] = 'application/json'
            job = db.GqlQuery(
                "SELECT * FROM StochKitJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), params["jobName"].strip()).get()

            if job != None:
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            backend_services = backendservices(self.user_data)

            # Create a stochhkit_job instance
            try:
                if params['resource'] == "local":
                    job = self.runStochKitLocal(params)
                elif params['resource'] == 'cloud':
                    job = self.runCloud(params)
                elif params['resource'] == 'molns':
                    job = self.runMolns(params)
                else:
                    raise Exception("Unknown resource {0}".format(
                        params["resource"]))
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": job.key().id()
                    }))
            except Exception as e:
                traceback.print_exc()

                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": str(e)
                    }))