Ejemplo n.º 1
0
    def getJob(handler, job_id):
        job = StochKitJobWrapper.get_by_id(job_id)
        indata = json.loads(job.indata)
        logging.debug('getJob() job.id = {0} job.indata = {1}'.format(job.key().id(), indata))

        jsonJob = { "id" : job.key().id(),
                    "name" : job.name,
                    "stdout" : job.stdout,
                    "stderr" : job.stderr,
                    # These are things contained in the stochkit_job object
                    "type" : indata["type"],
                    "status" : job.status,
                    "startTime" : job.startTime,
                    "modelName" : job.modelName,
                    "output_stored": job.output_stored,
                    "output_location" : job.outData,
                    "zipFileName" : job.zipFileName,
                    "output_url" : job.outputURL,
                    "final_time" : indata["final_time"],
                    "increment" : indata["increment"],
                    "realizations" : indata["realizations"],
                    "exec_type" : indata["exec_type"],
                    "units" : indata["units"],
                    "resource" : job.resource,
                    "epsilon" : indata["epsilon"],
                    "threshold" : indata["threshold"],
                    "cloudDatabaseID" : job.cloudDatabaseID,
                    "seed" : indata["seed"],
                    "pid" : job.pid,
                    "result" : job.result }
            
        return jsonJob
Ejemplo n.º 2
0
    def getJob(handler, job_id):
        job = StochKitJobWrapper.get_by_id(job_id)
        indata = json.loads(job.indata)
        logging.debug('getJob() job.id = {0} job.indata = {1}'.format(
            job.key().id(), indata))

        jsonJob = {
            "id": job.key().id(),
            "name": job.name,
            "stdout": job.stdout,
            "stderr": job.stderr,
            # These are things contained in the stochkit_job object
            "type": indata["type"],
            "status": job.status,
            "startTime": job.startTime,
            "modelName": job.modelName,
            "output_stored": job.output_stored,
            "output_location": job.outData,
            "zipFileName": job.zipFileName,
            "output_url": job.outputURL,
            "final_time": indata["final_time"],
            "increment": indata["increment"],
            "realizations": indata["realizations"],
            "exec_type": indata["exec_type"],
            "units": indata["units"],
            "resource": job.resource,
            "epsilon": indata["epsilon"],
            "threshold": indata["threshold"],
            "cloudDatabaseID": job.cloudDatabaseID,
            "seed": indata["seed"],
            "pid": job.pid,
            "result": job.result
        }

        return jsonJob
Ejemplo n.º 3
0
    def updateJob(handler, job):
        jobWrap = StochKitJobWrapper.get_by_id(job_id)
        jobWrap.user_id = handler.user.user_id()
        jobWrap.put()
        
        jsonJob = { "id" : jobWrap.key().id() }

        return jsonJob
Ejemplo n.º 4
0
    def updateJob(handler, job):
        jobWrap = StochKitJobWrapper.get_by_id(job_id)
        jobWrap.user_id = handler.user.user_id()
        jobWrap.put()

        jsonJob = {"id": jobWrap.key().id()}

        return jsonJob
Ejemplo n.º 5
0
    def post(self):
        """ Assemble the input to StochKit2 and submit the job (locally or via cloud). """

        reqType = self.request.get('reqType')

        if reqType == 'getFromCloud':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

            service = backendservices(self.user_data)
            service.fetchOutput(job)
            
            # Unpack it to its local output location
            os.system('tar -xf {0}.tar'.format(job.cloudDatabaseID))
            job.outData = os.path.abspath('{0}/../output/{1}'.format(os.path.abspath(os.path.dirname(__file__)), job.cloudDatabaseID))

            job.stdout = os.path.join(job.outData, '/stdout.log')
            job.stderr = os.path.join(job.outData, '/stderr.log')

            # Clean up
            os.remove('{0}.tar'.format(job.cloudDatabaseID))

            # Save the updated status
            job.put()

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded'}))
            return
        elif reqType == 'getDataLocal':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'), preferredName = job.name + "_")
                
                job.zipFileName = szip.getFileName()

                szip.addStochKitJob(job, globalOp = True, ignoreStatus = True)
                
                szip.close()

                # Save the updated status
                job.put()
            
            
            relpath = os.path.relpath(job.zipFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return
        elif reqType == 'delJob':
            try:
                job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

                if job.user_id == self.user.user_id():
                    job.delete(self)
                    
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ 'status' : True,
                                                 'msg' : "Job deleted from the datastore."}))
            except Exception as e:
                logging.exception(e)
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ 'status' : False,
                                                 'msg' : "Error: {0}".format(e) }))

            return
        elif reqType == 'jobInfo':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))
            indata = json.loads(job.indata)

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))

            if job.status == "Finished":
                try:
                    if (job.resource in backendservices.SUPPORTED_CLOUD_RESOURCES and job.output_stored == 'False') or (job.resource in backendservices.SUPPORTED_CLOUD_RESOURCES and job.outData is None):
                        self.response.headers['Content-Type'] = 'application/json'
                        self.response.write(json.dumps({ "status" : "Finished",
                                                         "values" : [],
                                                         "job" : JobManager.getJob(self, job.key().id())}))
                        return
                    else:
                        outputdir = job.outData
                        # Load all data from file in JSON format
                        if indata['exec_type'] == 'stochastic':
                            tid = self.request.get('tid')

                            if tid != '' and tid != 'mean':
                                outfile = '/result/trajectories/trajectory{0}.txt'.format(tid)

                                vhandle = open(outputdir + outfile, 'r')
                        
                                values = { 'time' : [], 'trajectories' : {} }
                                columnToList = []
                                for i, line in enumerate(vhandle):
                                    if i == 0:
                                        names = line.split()
                                        for name in names:
                                            if name == 'time':
                                                columnToList.append(values['time'])
                                            else:
                                                values['trajectories'][name] = [] # start a new timeseries for this name
                                                columnToList.append(values['trajectories'][name]) # Store a reference here for future use
                                    else:
                                        for storage, value in zip(columnToList, map(float, line.split())):
                                            storage.append(value)
                                vhandle.close()
                            else:
                                outfile = '/result/stats/means.txt'

                                vhandle = open(outputdir + outfile, 'r')

                                values = { 'time' : [], 'trajectories' : {} }
                                columnToList = []
                                for i, line in enumerate(vhandle):
                                    if i == 0:
                                        names = line.split()
                                        for name in names:
                                            if name == 'time':
                                                columnToList.append(values['time'])
                                            else:
                                                values['trajectories'][name] = [] # start a new timeseries for this name
                                                columnToList.append(values['trajectories'][name]) # Store a reference here for future use
                                    else:
                                        for storage, value in zip(columnToList, map(float, line.split())):
                                            storage.append(value)
                                vhandle.close()
                        else:
                            outfile = '/result/output.txt'
                            values = { 'time' : [], 'trajectories' : {} }

                            #if not os.path.isfile(outputdir + outfile):

                            vhandle = open(outputdir + outfile, 'r')

                            columnToList = []
                            for i, line in enumerate(vhandle):
                                if i == 0:
                                    continue
                                elif i == 1:
                                    names = line.split()
                                    for name in names:
                                        if name == 'time':
                                            columnToList.append(values['time'])
                                        else:
                                            values['trajectories'][name] = [] # start a new timeseries for this name
                                            columnToList.append(values['trajectories'][name]) # Store a reference here for future use
                                elif i == 2:
                                    continue
                                elif i == 3:
                                    for storage, value in zip(columnToList, map(float, line.split())):
                                        storage.append(value)
                                elif i == 4:
                                    continue
                                else:
                                    for storage, value in zip(columnToList, map(float, line.split())):
                                        storage.append(value)
                            vhandle.close()

                    self.response.headers['Content-Type'] = 'application/json'
                    result = {"status" : "Finished",
                              "values" : values,
                              "job" : JobManager.getJob(self, job.key().id())}
                    logging.debug("result = \n\n{}".format(result))
                    self.response.write(json.dumps(result))
                    return

                except Exception as e:
                    traceback.print_exc()
                    job.status = "Failed"
                    job.put()
                    logging.error("Failed to parse output data. Assuming job failed and continuing")
            
            if job.status == "Failed":
                self.response.headers['Content-Type'] = 'application/json'

                stdout = ""
                stderr = ""
                
                if job.outData is not None:
                    if os.path.isfile(job.outData + '/stdout'):
                        fstdoutHandle = open(job.outData + '/stdout', 'r')
                    else:
                        fstdoutHandle = open(job.outData + '/stdout.log', 'r')
                    stdout = fstdoutHandle.read()
                    fstdoutHandle.close()

                    if os.path.isfile(job.outData + '/stderr'):
                        fstderrHandle = open(job.outData + '/stderr', 'r')
                    else:
                        fstderrHandle = open(job.outData + '/stderr.log', 'r')
                    stderr = fstderrHandle.read()
                    fstderrHandle.close()

                self.response.write(json.dumps({ "status" : "Failed",
                                                 "job" : JobManager.getJob(self, job.key().id()),
                                                 "stdout" : stdout,
                                                 "stderr" : stderr}))
            else:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ "status" : "asdfasfdfdsa" }))
        else:
            # Params is a dict that constains all response elements of the form
            params = json.loads(self.request.get('data'))

            self.response.headers['Content-Type'] = 'application/json'
            job = db.GqlQuery("SELECT * FROM StochKitJobWrapper WHERE user_id = :1 AND name = :2",
                              self.user.user_id(), params["jobName"].strip()).get()

            if job != None:
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Job name must be unique"}))
                return
            
            backend_services = backendservices(self.user_data)

            # Create a stochhkit_job instance
            try:
                if params['resource'] == "local":
                    job = self.runStochKitLocal(params)
                elif params['resource'] == 'cloud':
                    job = self.runCloud(params)
                else:
                    raise Exception("Unknown resource {0}".format(params["resource"]))
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps( { "status" : True,
                                                  "msg" : "Job launched",
                                                  "id" : job.key().id() } ))
            except Exception as e:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps( { "status" : False,
                                                  "msg" : str(e) } ))
Ejemplo n.º 6
0
 def deleteJob(handler, job_id):
     job = StochKitJobWrapper.get_by_id(job_id)
     job.delete()
Ejemplo n.º 7
0
    def post(self):
        """ Assemble the input to StochKit2 and submit the job (locally or via cloud). """

        reqType = self.request.get('reqType')

        if reqType == 'getFromCloud':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

            service = backendservices(self.user_data)
            service.fetchOutput(job)

            # Unpack it to its local output location
            os.system('tar -xf {0}.tar'.format(job.cloudDatabaseID))
            job.outData = os.path.abspath('{0}/../output/{1}'.format(
                os.path.abspath(os.path.dirname(__file__)),
                job.cloudDatabaseID))

            job.stdout = os.path.join(job.outData, '/stdout.log')
            job.stderr = os.path.join(job.outData, '/stderr.log')

            # Clean up
            os.remove('{0}.tar'.format(job.cloudDatabaseID))

            # Save the updated status
            job.put()

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded'
                }))
            return
        elif reqType == 'getDataLocal':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(
                    os.path.dirname(__file__) + '/../static/tmp/'),
                                             preferredName=job.name + "_")

                job.zipFileName = szip.getFileName()

                szip.addStochKitJob(job, globalOp=True, ignoreStatus=True)

                szip.close()

                # Save the updated status
                job.put()

            relpath = os.path.relpath(
                job.zipFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return
        elif reqType == 'delJob':
            try:
                job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

                if job.user_id == self.user.user_id():
                    job.delete(self)

                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        'status': True,
                        'msg': "Job deleted from the datastore."
                    }))
            except Exception as e:
                logging.exception(e)
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        'status': False,
                        'msg': "Error: {0}".format(e)
                    }))

            return
        elif reqType == 'redirectJupyterNotebook':
            try:
                job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(
                    os.path.abspath(job.outData),
                    os.path.abspath(__file__ + '/../../../'))
                notebook_file_path = os.path.abspath(
                    job.outData) + "/" + notebook_filename
                # TODO Deterministic or Stochastic template
                indata = json.loads(job.indata)
                if indata['exec_type'] == 'deterministic':
                    notebook_template_path = os.path.abspath(
                        __file__ + '/../../../jupyter_notebook_templates'
                    ) + "/Deterministic.ipynb"
                else:
                    notebook_template_path = os.path.abspath(
                        __file__ + '/../../../jupyter_notebook_templates'
                    ) + "/Stochastic.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(
                        notebook_file_path, notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)

                if self.request.get('hostname') is not None:
                    logging.info('hostname={0}'.format(
                        self.request.get('hostname')))
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(
                    proto, host, port, local_path, notebook_filename)
                logging.info('redirect: {0}'.format(notebook_url))
                self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                self.response.write('Error: {0}'.format(e))
            return
        elif reqType == 'jobInfo':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))
            indata = json.loads(job.indata)

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))

            if job.status == "Finished":
                try:
                    if (job.resource
                            in backendservices.SUPPORTED_CLOUD_RESOURCES
                            and job.output_stored == 'False') or (
                                job.resource
                                in backendservices.SUPPORTED_CLOUD_RESOURCES
                                and job.outData is None):
                        self.response.headers[
                            'Content-Type'] = 'application/json'
                        self.response.write(
                            json.dumps({
                                "status":
                                "Finished",
                                "values": [],
                                "job":
                                JobManager.getJob(self,
                                                  job.key().id())
                            }))
                        return
                    else:
                        outputdir = job.outData
                        # Load all data from file in JSON format
                        if indata['exec_type'] == 'stochastic':
                            tid = self.request.get('tid')

                            if tid != '' and tid != 'mean':
                                outfile = '/result/trajectories/trajectory{0}.txt'.format(
                                    tid)

                                vhandle = open(outputdir + outfile, 'r')

                                values = {'time': [], 'trajectories': {}}
                                columnToList = []
                                for i, line in enumerate(vhandle):
                                    if i == 0:
                                        names = line.split()
                                        for name in names:
                                            if name == 'time':
                                                columnToList.append(
                                                    values['time'])
                                            else:
                                                values['trajectories'][name] = [
                                                ]  # start a new timeseries for this name
                                                columnToList.append(
                                                    values['trajectories']
                                                    [name]
                                                )  # Store a reference here for future use
                                    else:
                                        for storage, value in zip(
                                                columnToList,
                                                map(float, line.split())):
                                            storage.append(value)
                                vhandle.close()
                            else:
                                outfile = '/result/stats/means.txt'

                                vhandle = open(outputdir + outfile, 'r')

                                values = {'time': [], 'trajectories': {}}
                                columnToList = []
                                for i, line in enumerate(vhandle):
                                    if i == 0:
                                        names = line.split()
                                        for name in names:
                                            if name == 'time':
                                                columnToList.append(
                                                    values['time'])
                                            else:
                                                values['trajectories'][name] = [
                                                ]  # start a new timeseries for this name
                                                columnToList.append(
                                                    values['trajectories']
                                                    [name]
                                                )  # Store a reference here for future use
                                    else:
                                        for storage, value in zip(
                                                columnToList,
                                                map(float, line.split())):
                                            storage.append(value)
                                vhandle.close()
                        else:
                            outfile = '/result/output.txt'
                            values = {'time': [], 'trajectories': {}}

                            #if not os.path.isfile(outputdir + outfile):

                            vhandle = open(outputdir + outfile, 'r')

                            columnToList = []
                            for i, line in enumerate(vhandle):
                                if i == 0:
                                    continue
                                elif i == 1:
                                    names = line.split()
                                    for name in names:
                                        if name == 'time':
                                            columnToList.append(values['time'])
                                        else:
                                            values['trajectories'][name] = [
                                            ]  # start a new timeseries for this name
                                            columnToList.append(
                                                values['trajectories'][name]
                                            )  # Store a reference here for future use
                                elif i == 2:
                                    continue
                                elif i == 3:
                                    for storage, value in zip(
                                            columnToList,
                                            map(float, line.split())):
                                        storage.append(value)
                                elif i == 4:
                                    continue
                                else:
                                    for storage, value in zip(
                                            columnToList,
                                            map(float, line.split())):
                                        storage.append(value)
                            vhandle.close()

                    self.response.headers['Content-Type'] = 'application/json'
                    result = {
                        "status": "Finished",
                        "values": values,
                        "job": JobManager.getJob(self,
                                                 job.key().id())
                    }
                    logging.debug("result = \n\n{}".format(result))
                    self.response.write(json.dumps(result))
                    return

                except Exception as e:
                    traceback.print_exc()
                    job.status = "Failed"
                    job.put()
                    logging.error(
                        "Failed to parse output data. Assuming job failed and continuing"
                    )

            if job.status == "Failed":
                self.response.headers['Content-Type'] = 'application/json'

                stdout = ""
                stderr = ""

                if job.outData is not None:
                    if os.path.isfile(job.outData + '/stdout'):
                        fstdoutHandle = open(job.outData + '/stdout', 'r')
                    else:
                        fstdoutHandle = open(job.outData + '/stdout.log', 'r')
                    stdout = fstdoutHandle.read()
                    fstdoutHandle.close()

                    if os.path.isfile(job.outData + '/stderr'):
                        fstderrHandle = open(job.outData + '/stderr', 'r')
                    else:
                        fstderrHandle = open(job.outData + '/stderr.log', 'r')
                    stderr = fstderrHandle.read()
                    fstderrHandle.close()

                self.response.write(
                    json.dumps({
                        "status": "Failed",
                        "job": JobManager.getJob(self,
                                                 job.key().id()),
                        "stdout": stdout,
                        "stderr": stderr
                    }))
            else:
                raise Exception(
                    'This page should never be accessed if job is not Finished or Running (current state of job {0} : {1})'
                    .format(job.id, job.status))
                traceback.print_exc()

                print job.status

                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({"status": "tttttttttasdfasfdfdsa"}))
        else:
            # Params is a dict that constains all response elements of the form
            params = json.loads(self.request.get('data'))

            self.response.headers['Content-Type'] = 'application/json'
            job = db.GqlQuery(
                "SELECT * FROM StochKitJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), params["jobName"].strip()).get()

            if job != None:
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            backend_services = backendservices(self.user_data)

            # Create a stochhkit_job instance
            try:
                if params['resource'] == "local":
                    job = self.runStochKitLocal(params)
                elif params['resource'] == 'cloud':
                    job = self.runCloud(params)
                elif params['resource'] == 'molns':
                    job = self.runMolns(params)
                else:
                    raise Exception("Unknown resource {0}".format(
                        params["resource"]))
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": job.key().id()
                    }))
            except Exception as e:
                traceback.print_exc()

                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": str(e)
                    }))
Ejemplo n.º 8
0
 def deleteJob(handler, job_id):
     job = StochKitJobWrapper.get_by_id(job_id)
     job.delete()