Example #1
0
 def handle( self ):
   while True:
     if self.lsock is False:
       # Setup listening socket for recieveing game output
       self.lsock = socket.socket()
       self.lsock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 )
       try:
         self.lsock.bind( ( '', self.PORT ) )
         self.lsock.listen( 1 )
         logging.info( 'Wsserver listening for game connection on port %d', self.PORT )
       except socket.error as e:
         logging.execption( 'Wsserver startup failed' )
         self.lsock.close()
         sys.exit( 1 )
     elif self.handshake is False:
       self.doHandshake()
     else:
       if self.gsock is False:
         try:
           self.gsock = self.lsock.accept()[ 0 ]
         except socket.error as e:
           logging.exception( 'Error accpeting connectiob from game' )
       else:
         # Wait for data from output modules to pass onto client
         # Incomming messages in format of:
         # [ size ][ data ] size os 4 bytes big endian, data is of size bytes
         size = self.gsock.recv( 4 )
         if len( size ) > 0:
           size = struct.unpack( '>I', size )[ 0 ]
           data = self.gsock.recv( size )  
           self.sendMsg( data )
         else:
           self.gsock.close()
           self.gsock = False
           logging.info( 'Game connection closed' )
Example #2
0
 def __init__(self, infile):
     self.file = ''
     try:
         fd = open(infile, "rb")
     except:
         logging.error('file open error')
         return
     try:
         head = gfarm_iostat_head()
         sread(fd, head, sizeof(head))
         specs = gfarm_iostat_spec * head.s_nitem
         aspec = specs()
         sread(fd, aspec, sizeof(specs))
         items = gfarm_iostat_items * (head.s_ncolumn * head.s_rowmax)
         aitems = items()
         fd.seek(head.s_item_off)
         sread(fd, aitems, sizeof(items))
     except:
         logging.execption('sread')
         fd.close()
         return
     self.file = infile
     self.ahead = head
     self.aspec = aspec
     self.aitems = aitems
     fd.close()
Example #3
0
	def __init__(self, infile):
		self.file = ''
		try:
			fd = open(infile, "rb")
		except:
			logging.error('file open error')
			return
		try:
			head = gfarm_iostat_head()
			sread(fd, head, sizeof(head))
			specs = gfarm_iostat_spec * head.s_nitem
			aspec = specs()
			sread(fd, aspec, sizeof(specs))
			items = gfarm_iostat_items * ((head.s_nitem + 1) *
				head.s_rowmax)
			aitems = items()
			sread(fd, aitems, sizeof(items))
		except:
			logging.execption('sread')
			fd.close()
			return
		self.file = infile
		self.ahead = head
		self.aspec = aspec
		self.aitems = aitems
		fd.close()
def pac_conf_edit(repo,  enable_repo=True):
    try:
        edit_next=False # because alwasys two lines have to be edited
        with NamedTemporaryFile(delete=False) as tmp_f, open(PACCONF) as f:
            for line in f:
                if edit_next:
                    if enable_repo:
                        tmp_f.write(line[1:])
                    else:
                        tmp_f.write("".join(("#", line)))
                    logging.info("changed {}".format(line))
                    edit_next = False
                    continue
                if enable_repo and line.startswith("".join(["#[",repo])): 
                    tmp_f.write(line[1:]) #remove the #, thus enabling the repo
                    logging.info("changed {}".format(line))
                    edit_next = True
                elif line.startswith("".join(("[",  repo))):
                    tmp_f.write("".join(("#", line) ))
                    logging.info("changed {}".format(line))
                    edit_next = True
                else:
                    tmp_f.write(line)
                    
        shutil.move(tmp_f.name,  f.name)
    except EnvironmentError:
        logging.execption("Something went terribly wrong. Do we have the right permissions?")
Example #5
0
    def post(self):
        reqType = self.request.get('reqType')
        self.response.content_type = 'application/json'

        if reqType == 'newJob':
            data = json.loads(self.request.get('data'))
            logging.debug('data =\n{}'.format(pprint.pformat(data)))
            job = db.GqlQuery(
                "SELECT * FROM SpatialJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            try:
                if data["resource"] == "local":
                    result = self.runLocal(data)
                elif data["resource"] == "cloud":
                    result = self.runCloud(data)
                elif data["resource"] == "qsub":
                    result = self.runQsubWrapper(data)
                else:
                    raise Exception("Unknown resource {0}".format(
                        data["resource"]))
                self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": result.key().id()
                    }))
                return
            except Exception as e:
                logging.exception(e)
                result = {'status': False, 'msg': 'Error: {0}'.format(e)}
                self.response.write(json.dumps(result))
                return

        elif reqType == 'stopJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.stop(self)
            except Exception as e:
                logging.execption(e)
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'delJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.delete(self)
            except Exception as e:
                logging.exception(e)
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'getDataCloud':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                service = backendservices(self.user_data)
                # Fetch
                service.fetchOutput(job)
                # Unpack
                os.system('tar -xf' + job.uuid + '.tar')
                # Record location
                job.outData = os.path.abspath(
                    os.path.dirname(__file__)) + '/../output/' + job.uuid
                # Clean up
                os.remove(job.uuid + '.tar')
                # Save the updated status
                job.put()
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        'status': True,
                        'msg': 'Job downloaded'
                    }))
                return
            except Exception as e:
                traceback.print_exc()
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'getDataLocal':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(
                    os.path.dirname(__file__) + '/../static/tmp/'),
                                             preferredName=job.name + "_")
                job.zipFileName = szip.getFileName()
                szip.addSpatialJob(job, True)
                szip.close()
                # Save the updated status
                job.put()
            relpath = '/' + os.path.relpath(
                job.zipFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return
        elif reqType == 'openJupyterNotebook' or reqType == 'redirectJupyterNotebook':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(
                    os.path.abspath(job.outData),
                    os.path.abspath(__file__ + '/../../../'))
                notebook_file_path = os.path.abspath(
                    job.outData) + "/" + notebook_filename
                notebook_template_path = os.path.abspath(
                    __file__ +
                    '/../../../jupyter_notebook_templates') + "/Spatial.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(
                        notebook_file_path, notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)

                if self.request.get('hostname') is not None:
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(
                    proto, host, port, local_path, notebook_filename)
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            'status': True,
                            'msg': 'Notebook ready',
                            'url': notebook_url
                        }))
                else:
                    self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            'status': False,
                            'msg': 'error:{0}'.format(e)
                        }))
                else:
                    self.response.write('Error: {0}'.format(e))
            return
        elif reqType == 'getVtkLocal':

            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(
                            os.path.join(root, file),
                            os.path.join(
                                prefix,
                                os.path.relpath(os.path.join(root, file),
                                                path)))

            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.vtkFileName:
                try:
                    tmpDir = None
                    indata = json.loads(job.indata)
                    tmpDir = tempfile.mkdtemp(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'))
                    for trajectory in range(indata["realizations"]):
                        resultFile = open(
                            str(job.outData +
                                '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()
                        for specie in result.model.listOfSpecies:
                            result.export_to_vtk(
                                specie,
                                os.path.join(
                                    tmpDir,
                                    "trajectory_{0}".format(trajectory),
                                    "species_{0}".format(specie)))

                    tmpFile = tempfile.NamedTemporaryFile(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix=job.name +
                                                          "_",
                                                          suffix='.zip',
                                                          delete=False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()
                    job.vtkFileName = tmpFile.name
                    tmpFile.close()
                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)

            relpath = '/' + os.path.relpath(
                job.vtkFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return
        elif reqType == 'getCsvLocal':

            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(
                            os.path.join(root, file),
                            os.path.join(
                                prefix,
                                os.path.relpath(os.path.join(root, file),
                                                path)))

            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = SpatialJobWrapper.get_by_id(jobID)

            if not job.csvFileName:
                try:
                    tmpDir = None

                    indata = json.loads(job.indata)

                    tmpDir = tempfile.mkdtemp(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'))

                    for trajectory in range(indata["realizations"]):
                        resultFile = open(
                            str(job.outData +
                                '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()

                        result.export_to_csv(
                            os.path.join(
                                tmpDir,
                                "trajectory_{0}".format(trajectory)).encode(
                                    'ascii', 'ignore'))

                    tmpFile = tempfile.NamedTemporaryFile(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix=job.name +
                                                          "_",
                                                          suffix='.zip',
                                                          delete=False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()

                    job.csvFileName = tmpFile.name

                    tmpFile.close()

                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)

            relpath = '/' + os.path.relpath(
                job.csvFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return

        self.response.write(
            json.dumps({
                'status':
                False,
                'msg':
                'Unknown Error processing request: no handler called'
            }))
Example #6
0
    def post(self):
        reqType = self.request.get('reqType')
        self.response.content_type = 'application/json'
        logging.error('spaital post reqType={0}'.format(reqType))

        if reqType == 'newJob':
            data = json.loads(self.request.get('data'))
            logging.debug('data =\n{}'.format(pprint.pformat(data)))
            job = db.GqlQuery("SELECT * FROM SpatialJobWrapper WHERE user_id = :1 AND name = :2",
                              self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Job name must be unique"}))
                return

            try:
                if data["resource"] == "local":
                    result = self.runLocal(data)
                elif data["resource"] == "cloud":
                    result = self.runCloud(data)
                else:
                    raise Exception("Unknown resource {0}".format(data["resource"]))
                self.response.write(json.dumps({"status" : True,
                                                "msg" : "Job launched",
                                                "id" : result.key().id()}))
                return
            except Exception as e:
                logging.exception(e)
                result = {'status':False,
                          'msg':'Error: {0}'.format(e)}
                self.response.write(json.dumps(result))
                return


        elif reqType == 'stopJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.stop(self)
            except Exception as e:
                logging.execption(e)
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Error: {0}".format(e)}))
                return

        elif reqType == 'delJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.delete(self)
            except Exception as e:
                logging.exception(e)
                self.response.write(json.dumps({"status" : False,
                                                    "msg" : "Error: {0}".format(e)}))
                return

        elif reqType == 'getDataCloud':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                service = backendservices(self.user_data)
                # Fetch
                service.fetchOutput(job)
                # Unpack
                os.system('tar -xf' +job.uuid+'.tar')
                # Record location
                job.outData = os.path.abspath(os.path.dirname(__file__))+'/../output/'+job.uuid
                # Clean up
                os.remove(job.uuid+'.tar')
                # Save the updated status
                job.put()
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ 'status' : True,
                                                 'msg' : 'Job downloaded'}))
                return
            except Exception as e:
                traceback.print_exc()
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Error: {0}".format(e)}))
                return

        elif reqType == 'getDataLocal':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'), preferredName = job.name + "_")
                job.zipFileName = szip.getFileName()
                szip.addSpatialJob(job, True)
                szip.close()
                # Save the updated status
                job.put()
            relpath = '/' + os.path.relpath(job.zipFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return
        elif reqType == 'openJupyterNotebook' or reqType == 'redirectJupyterNotebook':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(os.path.abspath(job.outData), os.path.abspath(__file__+'/../../../'))
                notebook_file_path =  os.path.abspath(job.outData) + "/" + notebook_filename
                notebook_template_path = os.path.abspath(__file__+'/../../../jupyter_notebook_templates')+"/Spatial.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(notebook_file_path,notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)


                if self.request.get('hostname') is not None:
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(proto,host,port,local_path,notebook_filename)
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(json.dumps({ 'status' : True,
                                                     'msg' : 'Notebook ready',
                                                     'url' : notebook_url }))
                else:
                    self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(json.dumps({ 'status' : False,
                                                     'msg' : 'error:{0}'.format(e) }))
                else:
                    self.response.write('Error: {0}'.format(e))
            return    
        elif reqType == 'getVtkLocal':
            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(os.path.join(root, file), os.path.join(prefix, os.path.relpath(os.path.join(root, file), path)))

            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.vtkFileName:
                try:
                    tmpDir = None
                    indata = json.loads(job.indata)
                    tmpDir = tempfile.mkdtemp(dir = os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'))
                    for trajectory in range(indata["realizations"]):
                        resultFile = open(str(job.outData + '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()
                        for specie in result.model.listOfSpecies:
                            result.export_to_vtk(specie, os.path.join(tmpDir, "trajectory_{0}".format(trajectory), "species_{0}".format(specie)))

                    tmpFile = tempfile.NamedTemporaryFile(dir = os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix = job.name + "_",
                                                          suffix = '.zip', delete = False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()
                    job.vtkFileName = tmpFile.name
                    tmpFile.close()
                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)
            
            relpath = '/' + os.path.relpath(job.vtkFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return
        elif reqType == 'getCsvLocal':
            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(os.path.join(root, file), os.path.join(prefix, os.path.relpath(os.path.join(root, file), path)))

            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = SpatialJobWrapper.get_by_id(jobID)

            if not job.csvFileName:
                try:
                    tmpDir = None

                    indata = json.loads(job.indata)

                    tmpDir = tempfile.mkdtemp(dir=os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'))

                    for trajectory in range(indata["realizations"]):
                        resultFile = open(str(job.outData + '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()

                        result.export_to_csv(os.path.join(tmpDir, "trajectory_{0}".format(trajectory)).encode('ascii', 'ignore'))

                    tmpFile = tempfile.NamedTemporaryFile(dir = os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix = job.name + "_",
                                                          suffix = '.zip', delete = False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()

                    job.csvFileName = tmpFile.name
                    
                    tmpFile.close()

                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)
            
            relpath = '/' + os.path.relpath(job.csvFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return


        self.response.write(json.dumps({ 'status' : False,
                                         'msg' : 'Unknown Error processing request: no handler called'}))