def init_bdd(self):
     liste = list()
     liste.append(Service())
     #ouvreture en ecriture binaire
     with open('portfolio/model/data',"wb") as fic:
         record = Pickler(fic)
         record.dump(liste)
Esempio n. 2
0
 def create_initial_file(self):
     """Creates the initial empty list bookshelf/bookshelf.dump file"""
     init_list = list()
     file = open(self.bookshelf_file_path, 'wb')
     pcklr = Pickler(file)
     pcklr.dump(init_list)
     file.close()
Esempio n. 3
0
            def save():
                print(Donnees.admin)
                print("Test : Saved")
                for i in range(Row):
                    test = self.table.item(i, 1).text()
                    if test == "True":
                        test = True
                    
                    else:
                        if self.table.item(i, 0).text() == "Shayajs" or self.table.item(i, 0).text() == "uadmin":
                            test = True
                            if self.table.item(i, 1).text() == "False":
                                self.label001.show()
                                
                        else:
                            test = False
                    
                    if test and self.table.item(i, 0).text() not in Donnees.admin:
                        Donnees.admin.append(self.table.item(i, 0).text())
                    elif not test and self.table.item(i, 0).text() in Donnees.admin:
                        Donnees.admin.remove(self.table.item(i, 0).text())

                print(Donnees.admin)
                with open("./bin/admin.spi", "wb") as adminfile:
                    a = Pickler(adminfile)
                    a.dump(Donnees.admin)

                b = Online()
                b.uploadftp("admin.spi", "./bin/")
Esempio n. 4
0
    def _testTracingOrProfileState(self, do_pickle=False, **kw):
        t = tasklet(self.Tasklet)
        t(**kw)
        t.run()

        self.foo()
        n = len(self.trace)
        self.foo()
        n2 = len(self.trace)
        self.assertEqual(n, n2)

        if do_pickle:
            io = StringIO()
            p = Pickler(io, -1)
            p.persistent_id = self.persistent_id
            p.dump(t)
            t.remove()
            t.bind(None)
            p = Unpickler(StringIO(io.getvalue()))
            p.persistent_load = self.persistent_load
            t = p.load()
            p = None
            io = None

        t.run()

        self.foo()
        n = len(self.trace)
        self.foo()
        n2 = len(self.trace)
        self.assertEqual(n, n2)
Esempio n. 5
0
    def testDeepCopyCanInvalidate(self):
        """
        Tests regression for invalidation problems related to missing
        readers and writers values in cloned objects (see
        http://mail.zope.org/pipermail/zodb-dev/2008-August/012054.html)
        """
        import ZODB.MappingStorage
        database = DB(ZODB.blob.BlobStorage(
            'blobs', ZODB.MappingStorage.MappingStorage()))
        connection = database.open()
        root = connection.root()
        transaction.begin()
        root['blob'] = Blob()
        transaction.commit()

        stream = StringIO()
        p = Pickler(stream, 1)
        p.dump(root['blob'])
        u = Unpickler(stream)
        stream.seek(0)
        clone = u.load()
        clone._p_invalidate()

        # it should also be possible to open the cloned blob
        # (even though it won't contain the original data)
        clone.open()

        # tearDown
        database.close()
Esempio n. 6
0
def deepCopy(obj):
    stream = StringIO()
    p = Pickler(stream, 1)
    p.dump(obj)
    stream.seek(0)
    u = Unpickler(stream)
    return u.load()
Esempio n. 7
0
    def testDeepCopyCanInvalidate(self):
        """
        Tests regression for invalidation problems related to missing
        readers and writers values in cloned objects (see
        http://mail.zope.org/pipermail/zodb-dev/2008-August/012054.html)
        """
        import ZODB.MappingStorage
        database = DB(
            ZODB.blob.BlobStorage('blobs',
                                  ZODB.MappingStorage.MappingStorage()))
        connection = database.open()
        root = connection.root()
        transaction.begin()
        root['blob'] = Blob()
        transaction.commit()

        stream = StringIO()
        p = Pickler(stream, 1)
        p.dump(root['blob'])
        u = Unpickler(stream)
        stream.seek(0)
        clone = u.load()
        clone._p_invalidate()

        # it should also be possible to open the cloned blob
        # (even though it won't contain the original data)
        clone.open()

        # tearDown
        database.close()
Esempio n. 8
0
	def import_from_file(self, file):
		try:
			with open(file, "r") as import_file:
				imported_sets = import_file.read()
				imported_sets = imported_sets.split('\n')
				for set in imported_sets:
					#Isolate setcode
					set = set.split()
					selected_code = set[0]
					#Recover full name
					selected_name = ""
					for i in range(1, len(set)):
						selected_name += set[i]
						if i < len(set) - 1:
							selected_name += " "
						#Register archetype
						if not self.is_name_in_sets(selected_name) and not self.is_code_in_sets(selected_code):
							self.add_archetype(selected_name, selected_code)
			#Save changes
			with open("util/sets", "wb") as set_file:
				pick = Pickler(set_file)
				pick.dump(self)
					
		except FileNotFoundError:
			print("Could not find file", file)
Esempio n. 9
0
    def parseLog(self):
        logData = {}
        logRE = re.compile('^([1-9][0-9]*\.[0-9]+)[^/]+/step([1-9])_.*\.log$')
        for logFile in glob.glob('[1-9]*/step[0-9]*.log'):
            m = logRE.match(logFile)
            if not m: continue
            wf = m.group(1)
            step = int(m.group(2)) - 1
            if step >= self.StepsPerJob: continue
            if not logData.has_key(wf):
                logData[wf] = {'events': [], 'failed': [], 'warning': []}
                for s in range(0, self.StepsPerJob):
                    for k in logData[wf].keys():
                        logData[wf][k].append(-1)
            warn = 0
            err = 0
            rd = 0
            inFile = open(logFile)
            for line in inFile:
                if '%MSG-w' in line: warn += 1
                if '%MSG-e' in line: err += 1
                if 'Begin processing the ' in line: rd += 1
            inFile.close()
            logData[wf]['events'][step] = rd
            logData[wf]['failed'][step] = err
            logData[wf]['warning'][step] = warn

        from pickle import Pickler
        outFile = open('runTheMatrixMsgs.pkl', 'w')
        pklFile = Pickler(outFile)
        pklFile.dump(logData)
        outFile.close()
        return
Esempio n. 10
0
 def take_snapshot(self, object):
     file = open(fu.next_snapshot_file(self.basedir), "wb")
     logger.debug("Taking snapshot on: " + file.name)
     pickler = Pickler(file, pickle.HIGHEST_PROTOCOL)
     pickler.dump(object)
     file.flush()
     file.close()
Esempio n. 11
0
 def __setitem__(self, key, value):
     if self.writeback:
         self.cache[key] = value
     f = BytesIO()
     p = Pickler(f, self._protocol)
     p.dump(value)
     self.dict[key.encode(self.keyencoding)] = f.getvalue()
Esempio n. 12
0
 def encode(self):
     buf = ringbuffer.RingBuffer(2**16)
     pckler = Pickler(buf, protocol=-1, fix_imports=False)
     pckler.dump(self.value)
     if buf.freespace == 0:
         raise ProtocolError("Data too large.")
     return buf.read()
Esempio n. 13
0
def picklify(objs):
    from StringIO import StringIO
    from pickle import Pickler
    sio = StringIO()
    pickler = Pickler(sio)
    pickler.dump(objs)
    return sio.getvalue()
Esempio n. 14
0
  def parseLog(self):
    logData = {}
    logRE = re.compile('^.*/([1-9][0-9]*(\.[0-9]+|))_[^/]+/step([1-9])_.*\.log$')
    max_steps = 0
    for logFile in glob.glob(self.basedir+'/[1-9]*/step[0-9]*.log'):
      m = logRE.match(logFile)
      if not m: continue
      wf = m.group(1)
      step = int(m.group(3))
      if step>max_steps: max_steps=step
      if not logData.has_key(wf):
        logData[wf] = {'steps': {}, 'events' : [], 'failed' : [], 'warning' : []}
      if not logData[wf]['steps'].has_key(step):
        logData[wf]['steps'][step]=logFile
    cache_read=0
    log_processed=0
    for wf in logData:
      for k in logData[wf]:
        if k == 'steps': continue
        for s in range(0, max_steps):
          logData[wf][k].append(-1)
      index =0
      for step in sorted(logData[wf]['steps']):
        data = [0, 0, 0]
        logFile = logData[wf]['steps'][step]
        json_cache = os.path.dirname(logFile)+"/logcache_"+str(step)+".json"
        if (not os.path.exists(json_cache)) or (os.path.getmtime(logFile)>os.path.getmtime(json_cache)):
          try:
            es_parse_log(logFile)
          except Exception as e:
            print "Sending log information to elasticsearch failed" , str(e)
          inFile = open(logFile)
          for line in inFile:
            if '%MSG-w' in line: data[1]=data[1]+1
            if '%MSG-e' in line: data[2]=data[2]+1
            if 'Begin processing the ' in line: data[0]=data[0]+1
          inFile.close()
          jfile = open(json_cache,"w")
          json.dump(data,jfile)
          jfile.close()
          log_processed+=1
        else:
          jfile = open(json_cache,"r")
          data = json.load(jfile)
          jfile.close()
          cache_read+=1
        logData[wf]['events'][index] = data[0]
        logData[wf]['failed'][index] = data[2]
        logData[wf]['warning'][index] = data[1]
        index+=1
      del logData[wf]['steps']

    print "Log processed: ",log_processed
    print "Caches read:",cache_read
    from pickle import Pickler
    outFile = open(os.path.join(self.outdir,'runTheMatrixMsgs.pkl'), 'w')
    pklFile = Pickler(outFile)
    pklFile.dump(logData)
    outFile.close()
    return
Esempio n. 15
0
class Environment(object):

    handle = None
    loader = None
    writer = None
    data = None


    def __init__(self):
    
        self.handle = open("environment.pickle", 'w+')
        self.loader = Unpickler(self.handle)
        self.writer = Pickler(self.handle)

        try:
            self.data = self.loader.load()
        except EOFError:
            print "WARNING: Empty environment, creating environment file."
            self.data = {}
            self.write(self.data)


    def write(self, data):

        self.writer.dump(data)
Esempio n. 16
0
 def create(dataset, file_handler, keys=None):
     if isinstance(file_handler, str):
         with open(file_handler, "wb") as file_handler:
             return PickledDataset.create(dataset, file_handler, keys=keys)
     index = {}
     pickler = Pickler(file_handler)
     # allocate space for index offset
     file_handler.seek(0)
     pickler.dump(1 << 65)  # 64 bits placeholder
     if keys is None:
         keys = dataset.list_keys()
     for key in keys:
         # pickle objects and build index
         index[key] = file_handler.tell()
         obj = dataset.query_item(key)
         pickler.dump(obj)
         pickler.memo.clear()
     # put index and record offset
     index_location = file_handler.tell()
     pickler.dump(index)
     # put context
     context = getattr(dataset, "_context", None)
     if context:
         pickler.dump({**context})
     # put index offset at the beginning of the file
     file_handler.seek(0)
     index_location ^= 1 << 65
     pickler.dump(index_location)
Esempio n. 17
0
 def __setitem__(self, key, value):
     if self.writeback:
         self.cache[key] = value
     f = BytesIO()
     p = Pickler(f, self._protocol)
     p.dump(value)
     self.dict[key.encode(self.keyencoding)] = f.getvalue()
Esempio n. 18
0
def save(basename, **data):
    """
    Save data using cPickle

    @type basename : str
    @param basename : The name of the file to save the data in, .cpickle
    will be appended to the file name if not provided
    @param data : The actuall data to be saved.

    """

    check_arg(basename, str, 0)

    # If zero data size just return
    if len(data) == 0:
        return

    filename = basename if ".cpickle" in basename else basename + ".cpickle"

    f = open(filename, "w")

    p = Pickler(f)

    # Dump the dictionary kwarg
    p.dump(data)
    f.close()
Esempio n. 19
0
 def saveMX(self, mx):
     ruta = os.path.split(sys.argv[0])
     abs = os.path.join(ruta[0], "db/matrix.mx")
     filemx = open(abs, 'w')
     serializer = Pickler(filemx)
     serializer.dump(mx)
     print 'matrix salvada'
Esempio n. 20
0
    def _testTracingOrProfileState(self, do_pickle=False, **kw):
        t = tasklet(self.Tasklet)
        t(**kw)
        t.run()

        self.foo()
        n = len(self.trace)
        self.foo()
        n2 = len(self.trace)
        self.assertEqual(n, n2)

        if do_pickle:
            io = StringIO()
            p = Pickler(io, -1)
            p.persistent_id = self.persistent_id
            p.dump(t)
            t.remove()
            t.bind(None)
            p = Unpickler(StringIO(io.getvalue()))
            p.persistent_load = self.persistent_load
            t = p.load()
            p = None
            io = None

        t.run()

        self.foo()
        n = len(self.trace)
        self.foo()
        n2 = len(self.trace)
        self.assertEqual(n, n2)
Esempio n. 21
0
def picklify(objs):
	from StringIO import StringIO
	from pickle import Pickler
	sio = StringIO()
	pickler = Pickler(sio)
	pickler.dump(objs)
	return sio.getvalue()
Esempio n. 22
0
    def parseLog(self):
        logData = {}
        logRE = re.compile('^([1-9][0-9]*\.[0-9]+)[^/]+/step([1-9])_.*\.log$')
        for logFile in glob.glob(self.basedir+'/[1-9]*/step[0-9]*.log'):
            m = logRE.match(logFile)
            if not m: continue
            wf = m.group(1)
            step = int(m.group(2)) - 1
            if step >= self.StepsPerJob: continue
            if not logData.has_key(wf):
                logData[wf] = {'events' : [], 'failed' : [], 'warning' : []}
                for s in range(0, self.StepsPerJob):
                    for k in logData[wf].keys(): logData[wf][k].append(-1)
            warn = 0
            err = 0
            rd = 0
            inFile = open(logFile)
            for line in inFile:
                if '%MSG-w' in line: warn += 1
                if '%MSG-e' in line: err += 1
                if 'Begin processing the ' in line: rd += 1
            inFile.close()
            logData[wf]['events'][step] = rd
            logData[wf]['failed'][step] = err
            logData[wf]['warning'][step] = warn

        from pickle import Pickler
        outFile = open(os.path.join(self.basedir,'runTheMatrixMsgs.pkl'), 'w')
        pklFile = Pickler(outFile)
        pklFile.dump(logData)
        outFile.close()
        return
                
Esempio n. 23
0
 def write_to_disk(self):
     try:
         p = Pickler(open(self.file, 'wb'))
         p.dump(self.reg)
     except PicklingError:
         sys.stderr.write(
             "ERROR: could not save the registry to path {}".format(
                 self.file))
Esempio n. 24
0
    def save_positions (self):
        """ Save the current character positions in a file """
        val = self.file_val()

        assert self.positions is not None
        with open(self.__data_path + 'current/' + val, 'w+b') as file:
            pickler = Pickler(file)
            pickler.dump(self.positions)
Esempio n. 25
0
    def enregistrer(self):
        """
            Fonction qui enregistre toutes les données de l'utilisateur dans
            un fichier save.elh
        """

        with open("save.elh", "wb") as fichier_sauvegarde:
            pickler = Pickler(fichier_sauvegarde)
            pickler.dump(self)
Esempio n. 26
0
def pickle_outcomes(outcomes, fn):
    fh = open(fn, 'w')
    p = Pickler(fh)
    p.dump(outcomes)
    fh.close()
    if KEEP_TIMING:
        end_t = time()
        time_map["pickling"] = end_t - start_t
        start_t = time()
Esempio n. 27
0
 def pack_datas(self):
     """
     interacts with 'datas' file.
     it dumps 'self.pwmanagement._datas_list' in 'datas'
     this function is called every time we need to save and store new or modified accounts
     """
     with open('datas', 'wb') as datas_file:
         pickler_datas = Pickler(datas_file)
         pickler_datas.dump(self.pwmanagement._datas_list)
Esempio n. 28
0
def pickle_outcomes(outcomes, fn):
    fh = open(fn, 'w')
    p = Pickler(fh)
    p.dump(outcomes)
    fh.close()
    if KEEP_TIMING:
        end_t = time()
        time_map["pickling"] = end_t - start_t
        start_t = time()
def deepcopy(obj):
    """Makes a deep copy of the object using the pickle mechanism.
    """
    stream = StringIO()
    p = Pickler(stream, 1)
    p.dump(aq_base(obj))
    stream.seek(0)
    u = Unpickler(stream)
    return u.load()
Esempio n. 30
0
    def testPickleUnpickle(self):
        s = BytesIO()
        p = Pickler(s)
        p.dump(Allow)
        s.seek(0)
        u = Unpickler(s)
        newAllow = u.load()

        self.assertTrue(newAllow is Allow)
Esempio n. 31
0
def dump(obj):
    print "* Dumping..."
    setrecursionlimit(10000)
    copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method)
    pickle_buffer = StringIO()
    pickler = Pickler(pickle_buffer, -1)
    pickler.persistent_id = persistent_id
    pickler.dump(obj)
    print "* Dumped!"
    return pickle_buffer
Esempio n. 32
0
def save(obj, filename):
    print "* Saving..."
    setrecursionlimit(10000)
    copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method)
    out_file = open(filename, "wb")
    pickler = Pickler(out_file, -1)
    pickler.persistent_id = persistent_id
    pickler.dump(obj)
    out_file.close()
    print "* Saved!"
Esempio n. 33
0
 def save(self, path):
     with open(self.fname(path), 'wb') as f:
         p = Pickler(f, -1)
         p.dump(self)
     # remove old *.memory files
     save_dir = Path(path).parent
     for memfile in save_dir.glob("*.memory"):
         basefile = memfile.with_suffix('.meta')
         if not basefile.is_file():
             memfile.unlink()
Esempio n. 34
0
    def _sauvegarder(self):
        """
            Sauvegarde la map.
        """

        try:
            with open(adresse_fichier_sauvegarde, 'wb') as fichier_sauvegarde:
                pick = Pickler(fichier_sauvegarde)
                pick.dump(self._map)
        except:
            print("Erreur lors de l'enregistrement du fichier")
Esempio n. 35
0
 def save_server(self, abs_path=None, save_dir=None, ext=None):
     """Save a new Server."""
     if os.path.exists(save_dir) and not os.path.isfile(save_dir):
         os.chdir(save_dir)
     else:
         os.mkdir(save_dir)
         os.chdir(save_dir)
     with open(self.name + ext, 'wb') as new_file:
         pickler = Pickler(new_file)
         pickler.dump(self)
     os.chdir(abs_path)
Esempio n. 36
0
 def save_state(self):
     with open(self.options.results_file, 'w') as f:
         dump(self.results, f, indent=4, separators=(',', ': '))
     with open(self.statefile, 'wb') as f:
         p = Pickler(f)
         p.dump(self.target)
         p.dump(self.session)
         p.dump(self.cleanup_session)
         p.dump(self.options)
         p.dump(self.visited)
         p.dump(self.pending)
Esempio n. 37
0
    def _sauvegarder(self):
        """
            Sauvegarde la map.
        """

        try:
            with open(adresse_fichier_sauvegarde, 'wb') as fichier_sauvegarde:
                pick = Pickler(fichier_sauvegarde)
                pick.dump(self._map)
        except:
            print("Erreur lors de l'enregistrement du fichier")
Esempio n. 38
0
 def sync(self):
     res = {}
     with dbm.open(self.db, self.flag) as db:
         for k, v in self.dict.items():
             f = io.BytesIO()
             p = Pickler(f, protocol=self._protocol)
             p.dump(v)
             db[k] = f.getvalue()
         try:
             db.sync()
         except AttributeError:
             pass
Esempio n. 39
0
def pickle(obj, filename, protocol=0):
    f = None
    try:
        f = open(filename, "wb")
        p = Pickler(f, protocol)
        p.dump(obj)
        f.close()
        f = None
        # print "Pickled", filename
    finally:
        if f:
            f.close()
Esempio n. 40
0
 def _dump_blocked(self):
     dump = os.path.join(self.get_cache_dir(), "autoqueue_block_cache")
     if not self._blocked_artists:
         try:
             os.remove(dump)
         except OSError:
             pass
         return
     with open(dump, 'w') as pickle_file:
         pickler = Pickler(pickle_file, -1)
         to_dump = (self._blocked_artists, self._blocked_artists_times)
         pickler.dump(to_dump)
Esempio n. 41
0
def pickle(obj, filename, protocol=0):
    f = None
    try:
        f = open(filename, "wb")
        p = Pickler(f, protocol)
        p.dump(obj)
        f.close()
        f = None
        # print "Pickled", filename
    finally:
        if f:
            f.close()
Esempio n. 42
0
 def sync(self):
     res = {}
     with dbm.open(self.db, self.flag) as db:
         for k, v in self.dict.items():
             f = io.BytesIO()
             p = Pickler(f, protocol=self._protocol)
             p.dump(v)
             db[k] = f.getvalue()
         try:
             db.sync()
         except AttributeError:
             pass
Esempio n. 43
0
    def _tuer_sauvegarde(self):
        """
            Efface la sauvegarde.

            Devient inefficace si le mode de sauvegarde change.
        """

        try:
            with open(adresse_fichier_sauvegarde, 'wb') as fichier_sauvegarde:
                pick = Pickler(fichier_sauvegarde)
                pick.dump(None)
        except:
            print("Erreur lors de l'enregistrement du fichier")
Esempio n. 44
0
 def _dump_blocked(self):
     dump = os.path.join(
         self.get_cache_dir(), "autoqueue_block_cache")
     if not self._blocked_artists:
         try:
             os.remove(dump)
         except OSError:
             pass
         return
     with open(dump, 'w') as pickle_file:
         pickler = Pickler(pickle_file, -1)
         to_dump = (self._blocked_artists, self._blocked_artists_times)
         pickler.dump(to_dump)
Esempio n. 45
0
 def createPickleChild(self, name, child):
     if not os.path.isdir(self.path):
         resource.Resource.putChild(self, name, child)
     if type(child) == type(""):
         fl = open(os.path.join(self.path, name), 'wb')
         fl.write(child)
     else:
         if '.' not in name:
             name = name + '.trp'
         fl = open(os.path.join(self.path, name), 'wb')
         from pickle import Pickler
         pk = Pickler(fl)
         pk.dump(child)
     fl.close()
Esempio n. 46
0
 def createPickleChild(self, name, child):
     if not os.path.isdir(self.path):
         resource.Resource.putChild(self, name, child)
     # xxx use a file-extension-to-save-function dictionary instead
     if type(child) == type(""):
         fl = open(os.path.join(self.path, name), 'wb')
         fl.write(child)
     else:
         if '.' not in name:
             name = name + '.trp'
         fl = open(os.path.join(self.path, name), 'wb')
         from pickle import Pickler
         pk = Pickler(fl)
         pk.dump(child)
     fl.close()
Esempio n. 47
0
    def doCheck(self):
        import glob
        pkgDirList = glob.glob(self.startDir+'/src/[A-Z]*/*')
        errMap = {}
        for pkg in pkgDirList:
            if not os.path.isdir(pkg): continue
            pkg = re.sub('^'+self.startDir+'/src/', '', pkg)
            missing = self.checkPkg(pkg)
            if missing: errMap[pkg] = missing

        from pickle import Pickler
        summFile = open('libchk.pkl','w')
        pklr = Pickler(summFile)
        pklr.dump(errMap)
        summFile.close()
Esempio n. 48
0
def doPickling(option,pickleFile,obj=None):
    varRootHash = {} 
    from pickle import Pickler  
    if os.path.isfile(pickleFile):
        os.remove(pickleFile)
        print "DEBUG:[Pickling] Removing existing pickleFile: ",pickleFile
    f = open(pickleFile,"w")
    if option == "CATALOG" :
        print "DEBUG:[Pickling] Pickling CATALOG..."
        fileSeekHash = obj 
        print "DEBUG:[doPickling] Before dumping to"+pickleFile+"->\n",stemmedQTList
        p = Pickler(f)
        p.dump(makecataloghash())
        f.close()
    else:
        print "******ERROR******* Specify correct pickle option"
    def dump_brain(self):
        """
        Record the configuration of the actor and critic into a binary file.
        :return: Nothing
        """

        # Open the file for writing in binary mode
        with open("Config/actor.cfg", "wb") as cfg_file:
            # Instantiate a pickler
            pickle = Pickler(cfg_file)
            # Dump the configuration
            pickle.dump(self._actor)

        # Repeat the operation for the critic
        with open("Config/critic.cfg", "wb") as cfg_file:
            pickle = Pickler(cfg_file)
            pickle.dump(self._critic)
Esempio n. 50
0
 def dump(self, obj):
     self.inject_addons()
     try:
         return Pickler.dump(self, obj)
     except RuntimeError as e:
         if 'recursion' in e.args[0]:
             msg = """Could not pickle object as excessively deep recursion required."""
             raise pickle.PicklingError(msg)
Esempio n. 51
0
 def save(self, path):
     grille = np.zeros((9, 9), dtype=int)
     modif = np.zeros((9, 9), dtype=bool)
     possibilites = []
     for i in range(9):
         possibilites.append([])
         for j in range(9):
             grille[i, j] = self.blocs[i, j].get_val()
             modif[i, j] = self.blocs[i, j].is_modifiable()
             possibilites[i].append(self.blocs[i, j].get_possibilites())
     with open(path, "wb") as fich:
         p = Pickler(fich)
         p.dump(grille)
         p.dump(modif)
         p.dump(possibilites)
         p.dump(self.chrono)
         p.dump(self.level)
Esempio n. 52
0
def enregistrer_score(adresse_fichier_score, joueur, nouveau_score):
    """Ouvre le fichier des scores, s'il existe, selon le joueur, le score est chargé et envoyé,
    sinon on renvoie 0. Se charge également de charger entièrement le fichier des scores"""
    global score

    try:
        with open(adresse_fichier_score, 'wb') as fichier_score:
            #Définition du score
            score[joueur] = score.get(joueur, 0) + nouveau_score

            #Enregistrement du socre
            pick = Pickler(fichier_score)
            pick.dump(score)
    except IOError:
        print("Erreur lors de l'enregistrement du fichier")

    return score[joueur]
Esempio n. 53
0
 def pickle(self):
     outfile=open(self.saveFile,'wb')
     pickle = Pickler(outfile)
     kinds={"kind":[], "adjective":[], "entity":[]}
     for knd in self._kinds:
         if knd not in kinds:
             kinds[knd]=[]
             typ=knd
             base=self._kinds[typ].__base__.__name__
             while base not in kinds:
                 kinds[base]=[typ]
                 typ=base
                 base=self._kinds[typ].__base__.__name__
             kinds[base].append(typ)
     pickle.dump(kinds)
     globals().update(self._kinds)
     pickle.dump((self._entities,self._antecedents,self._names,self._adjectives))
     outfile.close()
Esempio n. 54
0
 def stop(self):
   try:
     for t in self.threads:
       try:
         t.interrupt()
       except:
         pass
     for t in self.threads:
       try:
         t.join()
       except:
         pass
     self.inventoryLock.acquire()
     with open(os.path.expanduser("~/.ingressbot.pkl"), "wb") as f:
       pickler = Pickler(f)
       pickler.dump(self.inventory)
   finally:
     self.inventoryLock.release()
   self.logger.info("stopped")
Esempio n. 55
0
  def parseLog(self):
    logData = {}
    logRE = re.compile('^.*/([1-9][0-9]*\.[0-9]+)[^/]+/step([1-9])_.*\.log$')
    max_steps = 0
    for logFile in glob.glob(self.basedir+'/[1-9]*/step[0-9]*.log'):
      m = logRE.match(logFile)
      if not m: continue
      wf = m.group(1)
      step = int(m.group(2))
      if step>max_steps: max_steps=step
      if not logData.has_key(wf):
        logData[wf] = {'steps': {}, 'events' : [], 'failed' : [], 'warning' : []}
      if not logData[wf]['steps'].has_key(step):
        logData[wf]['steps'][step]=logFile
    for wf in logData:
      for k in logData[wf]:
        if k == 'steps': continue
        for s in range(0, max_steps):
          logData[wf][k].append(-1)
      index =0
      for step in sorted(logData[wf]['steps']):
        warn = 0
        err = 0
        rd = 0
        inFile = open(logData[wf]['steps'][step])
        for line in inFile:
          if '%MSG-w' in line: warn += 1
          if '%MSG-e' in line: err += 1
          if 'Begin processing the ' in line: rd += 1
        inFile.close()
        logData[wf]['events'][index] = rd
        logData[wf]['failed'][index] = err
        logData[wf]['warning'][index] = warn
        index+=1
      del logData[wf]['steps']

    from pickle import Pickler
    outFile = open(os.path.join(self.basedir,'runTheMatrixMsgs.pkl'), 'w')
    pklFile = Pickler(outFile)
    pklFile.dump(logData)
    outFile.close()
    return
Esempio n. 56
0
    def createPickleChild(self, name, child):
        warnings.warn(
            "File.createPickleChild is deprecated since Twisted 9.0.  "
            "Resource persistence is beyond the scope of Twisted Web.",
            DeprecationWarning, stacklevel=2)

        if not os.path.isdir(self.path):
            resource.Resource.putChild(self, name, child)
        # xxx use a file-extension-to-save-function dictionary instead
        if type(child) == type(""):
            fl = open(os.path.join(self.path, name), 'wb')
            fl.write(child)
        else:
            if '.' not in name:
                name = name + '.trp'
            fl = open(os.path.join(self.path, name), 'wb')
            from pickle import Pickler
            pk = Pickler(fl)
            pk.dump(child)
        fl.close()
Esempio n. 57
0
    def tokens_referenced(self):
        """
        Return a list of all the tokens that are referenced (i.e. contained in) 
        this message.  Tokens that haven't been assigned an id yet are searched 
        recursively for tokens.  So this method may return fewer results after 
        the message is sent.  This information is used by the game engine to 
        catch mistakes like forgetting to add a token to the world or keeping a 
        stale reference to a token after its been removed.
        """
        tokens = set()

        # Use the pickle machinery to find all the tokens contained at any 
        # level of this message.  When an object is being pickled, the Pickler 
        # calls its persistent_id() method for each object it encounters.  We  
        # hijack this method to add every Token we encounter to a list.

        # This definitely feels like a hacky abuse of the pickle machinery, but 
        # that notwithstanding this should be quite robust and quite fast.

        def persistent_id(obj):
            from .tokens import Token

            if isinstance(obj, Token):
                tokens.add(obj)

                # Recursively descend into tokens that haven't been assigned an 
                # id yet, but not into tokens that have.

                return obj.id

        from pickle import Pickler
        from io import BytesIO

        # Use BytesIO to basically ignore the serialized data stream, since we 
        # only care about visiting all the objects that would be pickled.

        pickler = Pickler(BytesIO())
        pickler.persistent_id = persistent_id
        pickler.dump(self)

        return tokens
Esempio n. 58
0
def process(infile='t10k'):
	f = open(infile,'r')
	out = open('report.pickle','w')
	agents = loadagents()
	pickler = Pickler(out)
	for line in f.readlines():
		n, h = line.strip().split(',',1)
		for ua in agents:
			fetcher = HeaderFetcher('http://'+h,{'User-Agent':ua})
			print n, h, ua
			fetcher.fetch()
			pickler.dump((n,h,ua,fetcher.report))
			for url in fetcher.report:
				print 'GET',url
				for header in fetcher.report[url]:
					print header
			print '========================'
	print 'done'
	out.flush()
	out.close()
	f.close()
Esempio n. 59
0
    def pack(self, message):
        from pickle import Pickler
        from io import BytesIO
        from .tokens import Token
        from .messages import Message, require_message

        buffer = BytesIO()
        delegate = Pickler(buffer)

        def persistent_id(token):
            if isinstance(token, Token):
                assert isinstance(message, Message), msg("""\
                        Both Message and ServerResponse objects can be 
                        serialized, but only Messages can contain tokens.""")

                assert token.id, msg("""\
                        Every token should have an id by now.  Tokens that are 
                        in the world should always have an id, and tokens that 
                        are being added to the world should've been assigned an 
                        id by Actor.send_message().""")

                if token in self.world:
                    assert token not in message.tokens_to_add(), msg("""\
                            Actor.send_message() should've refused to send a 
                            message that would add a token that's already in 
                            the world.""")
                    return token.id

                else:
                    assert token in message.tokens_to_add(), msg("""\
                            Actor.send_message() should've refused to send a 
                            message referencing tokens that aren't in the world 
                            and aren't being added to the world.""")
                    return None

        delegate.persistent_id = persistent_id
        delegate.dump(message)

        return buffer.getvalue()