Пример #1
0
 def __init__(self, outFile, saveFile):
     computerName=platform.node().capitalize()
     userName=getpass.getuser().capitalize()
     self.outFile=outFile
     self.saveFile=saveFile
     self._kinds={"kind":kind, "adjective":adjective,"entity":entity,"name":name,"thing":thing,"person":person,
                  "computer":computer,"user":user,"infinitive":infinitive,"pronoun":pronoun,
                  "male":male,"female":female,"place":place,"location":location,"number":number,
                  "time":time}
     try:
         infile = open(saveFile,'rb')
         unpickle = Unpickler(infile)
         kinds=unpickle.load()
         self._loadKinds(kinds, "entity")
         self._loadKinds(kinds, "adjective")
         #globalsBak=globals().copy() #backup globals
         globals().update(self._kinds)   #inject dynamic classes into globals
         self._entities,self._antecedents,self._names,self._adjectives=unpickle.load()
         #globals().clear()   #clear globals
         #globals().update(globalsBak)    #restore backup
         infile.close
     except: # IOError:
         self._entities={computerName:computer(computerName,self,True),userName:user(userName,self,True),"Vibranium":thing("Vibranium",self)}
         self._antecedents={"I":self._entities[userName],"you":self._entities[computerName]}
         self._names={}
         self._adjectives={}
         for key,value in self._entities.items():
             if value in self._names:
                 self._names[value].add(key)
             else:
                 self._names[value]={key}
     self._temp={}   #stores 'a/an' objects, possessives, prepositional phrases, and numbers
    def load_brain(self):
        """
        Load the actor and critic's configurations from the corresponding files.
        :return: True if configuration loaded, False otherwise
        """

        try:
            # Open the file
            with open("Config/actor.cfg", "rb") as cfg_file:
                # Instantiate an Unpickler
                unpickle = Unpickler(cfg_file)
                # Load the object
                self._actor = unpickle.load()

            # Repeat the operation for the actor
            with open("Config/critic.cfg", "rb") as cfg_file:
                unpickle = Unpickler(cfg_file)
                self._critic = unpickle.load()

            # Return True because everything up to this point went well
            return True

        except IOError as error:
            # Display a message detailing the error
            print("No prevously recorded actor and/or critic configuration")
            # Return False to warn that something bad happened, leaving the actor and critic in an unknown state
            return False
Пример #3
0
def doUnPickling(option,pickleFile):
    varRootHash = {}
    from pickle import Unpickler
    if not os.path.isfile(pickleFile):
        print "DEBUG:[doUnPickling] PickleFile does not exist."
        if option == "TERM_CTFMAP":
            print "****ERROR Unpickling****"; exit(1)
        doPickling(option,pickleFile)
    f = open(pickleFile,"r")
    if option == "STEM_TEXT":
        up = Unpickler(f)
        varRootHash = up.load()
        f.close()
#        print "DEBUG:[doUnPickling] Unpickled "+option+": "#,unpkldData
        return varRootHash  
    elif option == "TERM_CTFMAP" :
        print "DEBUG:[doUnpickling] UnPickling Term CTF..."
        up = Unpickler(f)
        termCtfHash = up.load()
        f.close()
        return termCtfHash
    elif option == "STOPLIST" :
        print "DEBUG:[doUnpickling] UnPickling stoplist..."
        up = Unpickler(f)
        stoplst = up.load()
        f.close()
        return stoplst
    elif option == "CATALOG" :
        print "DEBUG:[doUnpickling] UnPickling Catalog.."
        up = Unpickler(f)
        termSeekHash = up.load()
        f.close()
        return termSeekHash
Пример #4
0
def applyConfiguration(index):
    file = configurations[index].value
    if not file:
        print 'no file selected'
        return

    index = 0
    for path in filter(None, cnfg_load.value.split(';')):
        if file == os.path.basename(path):
            fh = open(path, 'r')
            try:
                p = Unpickler(fh)
                if p.load() != 'KKB':
                    print 'ERROR:', os.path.basename(path)
                else:
                    model = ConfigurationModel()
                    for att in dir(model):
                        att_value = getattr(model, att)
                        if (att.find('_') != 0) and ('instancemethod'
                                                     not in str(
                                                         type(att_value))):
                            if p.load() != att:
                                print 'FORMAT ERROR:', os.path.basename(path)
                                break

                            setattr(model, att, p.load())
                    else:
                        print 'apply:', os.path.basename(path)
                        model.apply()
            finally:
                fh.close()
Пример #5
0
def runConfigurations():
    if cnfg_load.value:
        for path in filter(None, cnfg_load.value.split(';')):
            fh = open(path, 'r')
            try:
                p = Unpickler(fh)
                if p.load() != 'KKB':
                    print 'ERROR:', os.path.basename(path)
                else:
                    model = ConfigurationModel()
                    for att in dir(model):
                        att_value = getattr(model, att)
                        if (att.find('_') != 0) and ('instancemethod'
                                                     not in str(
                                                         type(att_value))):
                            if p.load() != att:
                                print 'FORMAT ERROR:', os.path.basename(path)
                                break

                            setattr(model, att, p.load())
                    else:
                        print 'run:', os.path.basename(path)
                        startScan(model)
            finally:
                fh.close()
Пример #6
0
 def load_state(self):
     with open(self.options.results_file, 'r') as f:
         self.results = load(f)
     with open(self.statefile, 'rb') as f:
         u = Unpickler(f)
         self.target = u.load()
         self.session = u.load()
         self.cleanup_session = u.load()
         self.options = u.load()
         self.visited = u.load()
         self.pending = u.load()
Пример #7
0
def load(fn):
    aunpickler = Unpickler(open(fn,'r'));
    dis_list = aunpickler.load()
    data4hz = aunpickler.load()
    exp_list = aunpickler.load()
    pairs = []
    for hzkey in data4hz:
        p = FreqPair()
        p.ex = hzkey[0]
        p.em = hzkey[1]
        p.flu = data4hz[hzkey]
        pairs.append(p)
    return (dis_list,pairs,exp_list)
Пример #8
0
def fromFile(worldDir):
    '''Load a world object from a campaign bundle'''
    pickleFileName = os.path.join(worldDir, 'data.pickle')
    pickle = Unpickler(open(pickleFileName, 'rb'))
    fileVersion = pickle.load()
    if fileVersion != openfrontier.PICKLE_VERSION:
        raise TypeError(
                "Data version mismatch: expected {0}; got {1}.".format(
                    openfrontier.PICKLE_VERSION, fileVersion))
    world = pickle.load()
    if not isinstance(world, OFWorld):
        raise TypeError(
                "Expected OFWorld; got {0}".format(type(world).__name__))
    return world
Пример #9
0
def test_unpickle_gpuarray_as_numpy_ndarray_flag1():
    oldflag = config.experimental.unpickle_gpu_on_cpu
    config.experimental.unpickle_gpu_on_cpu = False

    try:
        testfile_dir = os.path.dirname(os.path.realpath(__file__))
        fname = "GpuArray.pkl"

        with open(os.path.join(testfile_dir, fname), "rb") as fp:
            u = Unpickler(fp, encoding="latin1")
            with pytest.raises((ImportError, ContextNotDefined)):
                u.load()
    finally:
        config.experimental.unpickle_gpu_on_cpu = oldflag
Пример #10
0
    def __init__(self, partIn, maxThreadsIn=8):
        super(ValgrindAnalyzer, self).__init__()
        self.release = None
        self.plat    = None
        self.part    = partIn
        self.ignoreLibs = ['libm-2.5.so','libpthread-2.5.so', 'cmsRun']
        self.libList = []

        prodFileName = os.environ['CMSSW_RELEASE_BASE']+'/src/ReleaseProducts.list'
        self.rpr = RelProdReader()
        self.rpr.readProducts(prodFileName)

        vgCmdFileName = os.environ['CMSSW_RELEASE_BASE']+'/qaLogs/vgCmds.pkl'
        from pickle import Unpickler
        vgCmdFile = open(vgCmdFileName, 'r')
        vgCmdPklr = Unpickler(vgCmdFile)
        self.vgCmds = vgCmdPklr.load()
        vgCmdFile.close()

        self.toDo = []

        self.threadList = []
        self.maxThreads = maxThreadsIn
        self.threadStatus = {}
        self.threadTiming = {}
Пример #11
0
 def recover_snapshot(self):
     file = open(path.join(self.basedir,fu.last_snapshot_file(self.basedir)),"rb")
     if not file:
         return None
     logger.debug("Recovering snapshot from: " + file.name)
     unpickler = Unpickler(file)
     return unpickler.load()
Пример #12
0
    def __init__(self, partIn, maxThreadsIn=8):
        super(ValgrindAnalyzer, self).__init__()
        self.release = None
        self.plat = None
        self.part = partIn
        self.ignoreLibs = ['libm-2.5.so', 'libpthread-2.5.so', 'cmsRun']
        self.libList = []

        prodFileName = os.environ[
            'CMSSW_RELEASE_BASE'] + '/src/ReleaseProducts.list'
        self.rpr = RelProdReader()
        self.rpr.readProducts(prodFileName)

        vgCmdFileName = os.environ['CMSSW_RELEASE_BASE'] + '/qaLogs/vgCmds.pkl'
        from pickle import Unpickler
        vgCmdFile = open(vgCmdFileName, 'r')
        vgCmdPklr = Unpickler(vgCmdFile)
        self.vgCmds = vgCmdPklr.load()
        vgCmdFile.close()

        self.toDo = []

        self.threadList = []
        self.maxThreads = maxThreadsIn
        self.threadStatus = {}
        self.threadTiming = {}
Пример #13
0
def _unpickle(pickled):
    """ Unpickles a string and catch all types of errors it can throw,
    to raise only NotReadableJobError in case of error.

    odoo stores the text fields as 'utf-8', so we specify the encoding.

    `loads()` may raises many types of exceptions (AttributeError,
    IndexError, TypeError, KeyError, ...). They are all catched and
    raised as `NotReadableJobError`).

    Pickle could be exploited by an attacker who would write a value in a job
    that would run arbitrary code when unpickled. This is why we set a custom
    ``find_global`` method on the ``Unpickler``, only jobs and a whitelist of
    classes/functions are allowed to be unpickled (plus the builtins types).
    """
    def restricted_find_global(mod_name, fn_name):
        __import__(mod_name)
        mod = sys.modules[mod_name]
        fn = getattr(mod, fn_name)
        if not (fn in JOB_REGISTRY or fn in _UNPICKLE_WHITELIST):
            raise UnpicklingError('{}.{} is not allowed in jobs'.format(
                mod_name, fn_name))
        return fn

    unpickler = Unpickler(StringIO(pickled))
    unpickler.find_global = restricted_find_global
    try:
        unpickled = unpickler.load()
    except (Exception, UnpicklingError):
        raise NotReadableJobError('Could not unpickle.', pickled)
    return unpickled
Пример #14
0
 def loads(self, s):
     up = Unpickler(BytesIO(s))
     up.persistent_load = self._get_object
     try:
         return up.load()
     except KeyError as e:
         raise UnpicklingError("Could not find Node class for %s" % e)
Пример #15
0
    def charger_donnees(self):
        """
            Méthode appelée à la création de l'utilisateur, permettant de
            charger toutes les données sauvegardées, ou de les créer si le
            joueur n'a aucune données sauvegardées.
        """

        # CHARGER LES DONNEES

        if "save.elh" in listdir(getcwd()):

            with open("save.elh", "rb") as fichier_sauvegarde:
                unpickler = Unpickler(fichier_sauvegarde)
                utilisateur_sauvegarde = unpickler.load()
                for key, value in utilisateur_sauvegarde.__dict__.items():
                    self.__setattr__(key, value)

        # CREER LES DONNEES

        else:

            self.raccourcis["deplacer-haut"] = sf.Keyboard.Z
            self.raccourcis["deplacer-bas"] = sf.Keyboard.S
            self.raccourcis["deplacer-droite"] = sf.Keyboard.D
            self.raccourcis["deplacer-gauche"] = sf.Keyboard.Q
            self.raccourcis["menu"] = sf.Keyboard.ESCAPE
            self.raccourcis["interagir"] = sf.Keyboard.E
            self.raccourcis["inventaire"] = sf.Keyboard.SPACE
            self.raccourcis["map"] = sf.Keyboard.M

            self.sauvegarde_utilisable = False
            self.sauvegarde = dict()
Пример #16
0
 def load(self, filepath):
     with open(filepath, 'rb') as handle:
         loader = Unpickler(handle)
         state = loader.load()
         self.regressor = state['regressor']
         self.user_svd = state['user_svd']
         self.item_svd = state['item_svd']
Пример #17
0
    def search(self, test=False):
        if test == True:
            abs = os.path.normpath(os.path.join(os.getcwd(), "db/matrix.mx"))
        else:
            ruta = os.path.split(sys.argv[0])
            abs = os.path.join(ruta[0], "db/matrix.mx")
        filemx = open(abs, 'r')
        serializer = Unpickler(filemx)
        self._mx = serializer.load()

        while True:
            cadena = raw_input("Search: ")
            if not cadena:
                return
            else:
                #search here
                cadena = cadena.strip()
                cadena = cadena.lower()
                cad = self._prep.ngram_tokenizer(text=cadena)
                resultado = list()
                for doc in self._mx.docs:
                    vector = list()
                    for q in cad:
                        if q in self._mx.terms:
                            pos = self._mx.terms.index(q)
                            vector.append(doc['terms'][pos])
                    resultado.append((doc['id'], vector))
                resultado.sort(lambda a, b: self.__Deuclidiana(a[1]) - self.
                               __Deuclidiana(b[1]),
                               reverse=True)
                print resultado
Пример #18
0
    def search(self, buscar=""):
        ruta = os.path.split(sys.argv[0])
        abs = os.path.join(ruta[0], "motor/db/matrix.mx")
        filemx = open(abs, 'r')
        serializer = Unpickler(filemx)
        self._mx = serializer.load()

        cadena = buscar
        if not cadena:
            return ['vacio']
        else:
            #search here
            cadena = cadena.strip()
            cadena = cadena.lower()
            cad = self._prep.ngram_tokenizer(text=cadena)
            resultado = list()
            for doc in self._mx.docs:
                vector = list()
                for q in cad:
                    if q in self._mx.terms:
                        pos = self._mx.terms.index(q)
                        vector.append(doc['terms'][pos])
                resultado.append((doc['id'], vector))
            resultado.sort(lambda a, b: self.__Deuclidiana(a[1]) - self.
                           __Deuclidiana(b[1]),
                           reverse=True)
            return resultado
Пример #19
0
def read_line1(name):  #Card name/setcodes
    try:
        with open("util/sets", "rb") as fichier:
            unpickle = Unpickler(fichier)
            sets = unpickle.load()
            sets.sort_by_names()
            sets_in_name = list()
            for i in sets:
                if i.name in name:
                    sets_in_name.append(i.code)
            #Remove setcodes from the list if a sub-archetype is also there
            sets_to_remove = list()
            for i in sets_in_name:
                for j in sets.get_archetype_by_code(i).subs:
                    if sets.get_archetype_by_name(
                            j
                    ).code in sets_in_name and i not in sets_to_remove:
                        sets_to_remove.append(i)
            for i in sets_to_remove:
                del_index = sets_in_name.index(i)
                del sets_in_name[del_index]
            #Calculate output setcode
            total_setcode = 0
            for i in sets_in_name:
                total_setcode += int(i, 0) * pow(16, 4 * sets_in_name.index(i))
            return int(total_setcode)
    except FileNotFoundError:
        return 0
Пример #20
0
    def testDeepCopyCanInvalidate(self):
        """
        Tests regression for invalidation problems related to missing
        readers and writers values in cloned objects (see
        http://mail.zope.org/pipermail/zodb-dev/2008-August/012054.html)
        """
        import ZODB.MappingStorage
        database = DB(
            ZODB.blob.BlobStorage('blobs',
                                  ZODB.MappingStorage.MappingStorage()))
        connection = database.open()
        root = connection.root()
        transaction.begin()
        root['blob'] = Blob()
        transaction.commit()

        stream = StringIO()
        p = Pickler(stream, 1)
        p.dump(root['blob'])
        u = Unpickler(stream)
        stream.seek(0)
        clone = u.load()
        clone._p_invalidate()

        # it should also be possible to open the cloned blob
        # (even though it won't contain the original data)
        clone.open()

        # tearDown
        database.close()
Пример #21
0
def deepCopy(obj):
    stream = StringIO()
    p = Pickler(stream, 1)
    p.dump(obj)
    stream.seek(0)
    u = Unpickler(stream)
    return u.load()
Пример #22
0
def test_unpickle_gpuarray_as_numpy_ndarray_flag2():
    oldflag = config.experimental.unpickle_gpu_on_cpu
    config.experimental.unpickle_gpu_on_cpu = True

    try:
        testfile_dir = os.path.dirname(os.path.realpath(__file__))
        fname = "GpuArray.pkl"

        with open(os.path.join(testfile_dir, fname), "rb") as fp:
            u = Unpickler(fp, encoding="latin1")
            try:
                mat = u.load()
            except ImportError:
                # Windows sometimes fail with nonsensical errors like:
                #   ImportError: No module named type
                #   ImportError: No module named copy_reg
                # when "type" and "copy_reg" are builtin modules.
                if sys.platform == "win32":
                    exc_type, exc_value, exc_trace = sys.exc_info()
                    raise
                raise

        assert isinstance(mat, np.ndarray)
        assert mat[0] == -42.0

    finally:
        config.experimental.unpickle_gpu_on_cpu = oldflag
Пример #23
0
    def testDeepCopyCanInvalidate(self):
        """
        Tests regression for invalidation problems related to missing
        readers and writers values in cloned objects (see
        http://mail.zope.org/pipermail/zodb-dev/2008-August/012054.html)
        """
        import ZODB.MappingStorage
        database = DB(ZODB.blob.BlobStorage(
            'blobs', ZODB.MappingStorage.MappingStorage()))
        connection = database.open()
        root = connection.root()
        transaction.begin()
        root['blob'] = Blob()
        transaction.commit()

        stream = StringIO()
        p = Pickler(stream, 1)
        p.dump(root['blob'])
        u = Unpickler(stream)
        stream.seek(0)
        clone = u.load()
        clone._p_invalidate()

        # it should also be possible to open the cloned blob
        # (even though it won't contain the original data)
        clone.open()

        # tearDown
        database.close()
Пример #24
0
def unpickle(file_path=None):
    """Load pickled Python objects from a file.

    Almost like ``cPickle.load(open(file_path))``, but also loads object saved
    with older versions of Eelbrain, and allows using a system file dialog to
    select a file.

    Parameters
    ----------
    file_path : None | str
        Path to a pickled file. If None (default), a system file dialog will be
        shown. If the user cancels the file dialog, a RuntimeError is raised.
    """
    if file_path is None:
        filetypes = [("Pickles (*.pickled)", '*.pickled'), ("All files", '*')]
        file_path = ui.ask_file("Select File to Unpickle", "Select a pickled "
                                "file to unpickle", filetypes)
        if file_path is False:
            raise RuntimeError("User canceled")
        else:
            print(repr(file_path))
    else:
        file_path = os.path.expanduser(file_path)
        if not os.path.exists(file_path):
            new_path = os.extsep.join((file_path, 'pickled'))
            if os.path.exists(new_path):
                file_path = new_path

    with open(file_path, 'rb') as fid:
        unpickler = Unpickler(fid)
        unpickler.find_global = map_paths
        obj = unpickler.load()

    return obj
Пример #25
0
def get_menu():
    """Checks if menu is current, if yes, load menu from file,
        if not, downloads data from internet and saves them to file,
        and updates last_updated"""
    d = {}

    if check_current():
        with open("menu", "rb") as f:
            p = Unpickler(f)
            d = p.load()
    else:
        d = {
            "obed": get_menza_zizkov_obed(),
            "pizza": get_pizza(),
            "zdrava": get_zdrava(),
            "vecere": get_menza_zizkov_vecere(),
            "jarov": get_jarov(),
            "volha": get_volha()
        }

        # write new menu to file
        with open("menu", "wb") as f:
            f.truncate()
            p = Pickler(f, 0)
            p.dump(d)

        # update last_updated
        with open("last_updated", "wb") as f:
            f.truncate()
            p = Pickler(f, 0)
            p.dump(date.today().strftime("%w"))

    return d
Пример #26
0
    def get_all_products(self):
        """
            Returns one by one the products recovered in the OpenFoodFacts
            database.

        """
        fp = open("data_products.bin", "rb")
        unpck = Unpickler(fp)
        try:
            d = unpck.load()
            while (d):
                yield d
                d = unpck.load()
        except EOFError:  # End of file reached.
            fp.close()
            raise StopIteration
Пример #27
0
class Environment(object):

    handle = None
    loader = None
    writer = None
    data = None


    def __init__(self):
    
        self.handle = open("environment.pickle", 'w+')
        self.loader = Unpickler(self.handle)
        self.writer = Pickler(self.handle)

        try:
            self.data = self.loader.load()
        except EOFError:
            print "WARNING: Empty environment, creating environment file."
            self.data = {}
            self.write(self.data)


    def write(self, data):

        self.writer.dump(data)
Пример #28
0
 def loads(self, s):
     up = Unpickler(BytesIO(s))
     up.persistent_load = self._get_object
     try:
         return up.load()
     except KeyError as e:
         raise UnpicklingError("Could not find Node class for %s" % e)
Пример #29
0
 def readData(self):
     import config
     pklFileName = config.siteInfo['qaPath']+'/navigator-summary.pkl'
     pklIn = open(pklFileName,'r')
     from pickle import Unpickler
     upklr = Unpickler(pklIn)
     self.data = upklr.load()
     pklIn.close()
Пример #30
0
 def readData(self):
     import config
     pklFileName = config.siteInfo['qaPath'] + '/navigator-summary.pkl'
     pklIn = open(pklFileName, 'r')
     from pickle import Unpickler
     upklr = Unpickler(pklIn)
     self.data = upklr.load()
     pklIn.close()
Пример #31
0
    def charger_partie(self):
        chemin = os.path.join("parties", self.nom)

        parties = os.listdir("parties")
        if self.nom in parties:
            with open(chemin, "rb") as file:
                file = Unpickler(file)
                return file.load()
Пример #32
0
    def _reader(self, data):
        unpickler = Unpickler(StringIO(data))

        while True:
            try:
                self._process(*unpickler.load())
            except EOFError:
                break
Пример #33
0
    def _reader(self, data):
        unpickler = Unpickler(StringIO(data))

        while True:
            try:
                self._process(*unpickler.load())
            except EOFError:
                break
def deepcopy(obj):
    """Makes a deep copy of the object using the pickle mechanism.
    """
    stream = StringIO()
    p = Pickler(stream, 1)
    p.dump(aq_base(obj))
    stream.seek(0)
    u = Unpickler(stream)
    return u.load()
Пример #35
0
    def testPickleUnpickle(self):
        s = BytesIO()
        p = Pickler(s)
        p.dump(Allow)
        s.seek(0)
        u = Unpickler(s)
        newAllow = u.load()

        self.assertTrue(newAllow is Allow)
Пример #36
0
    def unpack(self, packet):
        from pickle import Unpickler
        from io import BytesIO

        buffer = BytesIO(packet)
        delegate = Unpickler(buffer)

        delegate.persistent_load = lambda id: self.world.get_token(int(id))
        return delegate.load()
Пример #37
0
 def loads(self, string):
     f = StringIO(string)
     unpickler = Unpickler(f)
     unpickler.memo = self._unpicklememo
     res = unpickler.load()
     self._updatepicklememo()
     #print >>debug, "loaded", res
     #print >>debug, "unpicklememo", self._unpicklememo
     return res
Пример #38
0
def getScoreDictionary(fileName):
    scoreDict = None
    try:
        with open(fileName, "rb") as scoreFile:
            scoreUnpickler = Unpickler(scoreFile)
            scoreDict = scoreUnpickler.load()
    except FileNotFoundError:
        pass
    return scoreDict
Пример #39
0
def restore(pickle_buffer):
    #print "* Restoring..."
    copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method)
    pickle_buffer.seek(0)
    unpickler = Unpickler(pickle_buffer)
    unpickler.persistent_load = persistent_load
    obj = unpickler.load()
    #print "* Restored!"
    return obj
Пример #40
0
 def loadnetwork(self, filename):
     # from cPickle import Unpickler
     from pickle import Unpickler
     f = file(filename, "rb")
     u = Unpickler(f)
     self.res = u.load()
     # tmp = u.load()
     # print tmp
     f.close()
Пример #41
0
def load(filename):
    print "* Loading..."
    copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method)
    in_file = open(filename, "rb")
    unpickler = Unpickler(in_file)
    unpickler.persistent_load = persistent_load
    loaded = unpickler.load()
    print "* Loaded!"
    return loaded
Пример #42
0
def unpickle_outcomes(fn, desired_iterations):
    fh = open(fn, 'r')
    unp = Unpickler(fh)
    outcomes = unp.load()
    fh.close()
    kept_iterations = outcomes["ITERATIONS"]
    if kept_iterations == desired_iterations:
        return outcomes
    raise IOError
Пример #43
0
def unpickle_outcomes(fn, desired_iterations):
        fh = open(fn, 'r')
        unp = Unpickler(fh)
        outcomes = unp.load()
        fh.close()
        kept_iterations = outcomes["ITERATIONS"]
        if kept_iterations == desired_iterations:
            return outcomes
        raise IOError
Пример #44
0
 def loads(self, string):
     f = StringIO(string)
     unpickler = Unpickler(f)
     unpickler.memo = self._unpicklememo
     res = unpickler.load()
     self._updatepicklememo()
     #print >>debug, "loaded", res
     #print >>debug, "unpicklememo", self._unpicklememo
     return res
Пример #45
0
 def read_from_disk(self):
     try:
         p = Unpickler(open(self.file, 'rb'))
         self.reg = p.load()
     except (UnpicklingError, EOFError, FileNotFoundError):
         traceback.print_exc()
         self.reg = {}
         sys.stderr.write(
             "ERROR: could not load the registry from path {}".format(
                 self.file))
Пример #46
0
    def load_from_file(self, cache_file):
        now = time.time()
        unpick = Unpickler(cache_file)
        version = unpick.load()
        if version != 1: # pragma: no cover
            raise ValueError("Incorrect version of cache_file")
        count = unpick.load()
        stored = 0
        loaded_dict = unpick.load()
        if not self._dict:
            # bulk-update in C for speed
            stored = len(loaded_dict)
            self._dict.update(loaded_dict)
            for ring_entry in itervalues(loaded_dict):
                if self.size < self.limit:
                    self._ring.add(ring_entry)
                    self.size += len(ring_entry.key) + len(ring_entry.value)
                else:
                    # We're too big! ignore these things from now on.
                    # This is unlikely.
                    del self._dict[ring_entry.key]
        else:
            new_keys = set(loaded_dict.keys()) - set(self._dict.keys())
            stored += len(new_keys)
            # Loading more data into an existing bucket.
            # Load only the *new* keys, but don't care about LRU,
            # it's all screwed up anyway at this point
            for new_key in new_keys:
                new_ring_entry = loaded_dict[new_key]
                self._dict[new_key] = new_ring_entry
                self._ring.add(new_ring_entry)

                self.size += len(new_key) + len(new_ring_entry.value)
                if self.size >= self.limit: # pragma: no cover
                    break


        then = time.time()
        log.info("Examined %d and stored %d items from %s in %s",
                 count, stored, cache_file, then - now)
        return count, stored
Пример #47
0
 def load_models(self,model_files):
     """load model or list of models into self.model"""
     if type(model_files)==str: model_files = [model_files]
     i = 0
     for mod_file in model_files:
         model = open(mod_file,'r')
         unPickled = Unpickler(model)
         clf_RF = unPickled.load()
         self.model.append(clf_RF)
         model.close()
         i += 1
     return i
Пример #48
0
 def load_sudoku(self, file):
     with open(file, "rb") as fich:
         dp = Unpickler(fich)
         grille = dp.load()
         modif = dp.load()
         possibilites = dp.load()
         chrono = dp.load()
         self.level = dp.load()
     self.nb_cases_remplies = 0
     self.reset_nbs()
     self.restart(*chrono)
     for i in range(9):
         for j in range(9):
             self.blocs[i, j].efface_case()
             if grille[i, j]:
                 self.nb_cases_remplies += 1
                 self.blocs[i, j].edit_chiffre(grille[i, j])
             else:
                 for pos in possibilites[i][j]:
                     self.blocs[i, j].edit_possibilite(pos)
             self.blocs[i, j].set_modifiable(modif[i, j])
Пример #49
0
def doUnPickling(option, pickleFile, loqt=None, url=""):
    from pickle import Unpickler

    if not os.path.isfile(pickleFile):
        print "DEBUG:[doUnPickling] PickleFile does not exist."
        doPickling(option, pickleFile, loqt, url)
    f = open(pickleFile, "r")
    if option == "STEMMED_QUERIES":
        up = Unpickler(f)
        unpkldData = up.load()
        f.close()
        print "DEBUG:[doUnPickling] Unpickled " + option + ": "  # ,unpkldData
        return unpkldData
    elif option == "EXTFILE_MAP":
        print "DEBUG:[doUnPickling] Unpickling ->" + option
        up = Unpickler(f)
        extfile_map_hash = up.load()
        f.close()
        return extfile_map_hash
    elif option == "TERM_STATS":
        print "DEBUG:[doUnPickling] Unpickling ->" + option
        up = Unpickler(f)
        term_id_hash = up.load()
        docIdUniverse = up.load()
        corpus_stats = up.load()
        query_stats = up.load()
        f.close()
        #        print "DEBUG:[doUnPickling] Unpickled "+option+": corpus_stats",corpus_stats
        #        print "DEBUG:[doUnPickling] Unpickled "+option+": query_stats",query_stats
        return term_id_hash, docIdUniverse, corpus_stats, query_stats
    else:
        print "***ERROR***:[doPickling] Specify a correct option to UnPickle."
Пример #50
0
 def load(self):
     # We overload the load_build method so update the dispatch dict.
     dispatch = self.dispatch
     dispatch[BUILD] = StateUnpickler.load_build
     dispatch[NEWOBJ] = StateUnpickler.load_newobj
     dispatch[REDUCE] = StateUnpickler.load_reduce
     # call the super class' method.
     ret = Unpickler.load(self)
     # Reset the Unpickler's dispatch
     dispatch[BUILD] = Unpickler.load_build
     dispatch[NEWOBJ] = Unpickler.load_newobj
     dispatch[REDUCE] = Unpickler.load_reduce
     return ret        
Пример #51
0
    def recuperer(self):

        if "user_settings.elr" in listdir(getcwd()):

            with open("user_settings.elr", "rb") as fichier:
                unpickler = Unpickler(fichier)
                options_recuperees = unpickler.load()
                for key, item in options_recuperees.__dict__.items():
                    if key == "raccourcis":
                        for raccourcis, donnees in item.items():
                            self.raccourcis[raccourcis] = donnees
                    else:
                        self.__setattr__(key, item)
Пример #52
0
def unpickle(filename):
    f, obj = None, None
    try:
        f = open(filename, "rb")
        p = Unpickler(f)
        x = p.load()
        f.close()
        f = None
        obj = x
        # print "Unpickled", filename
    finally:
        if f:
            f.close()
    return obj
Пример #53
0
 def _load_blocked_artists(self):
     """Read the list of blocked artists from disk."""
     dump = os.path.join(self.get_cache_dir(), "autoqueue_block_cache")
     try:
         with open(dump, 'r') as pickle:
             unpickler = Unpickler(pickle)
             artists, times = unpickler.load()
             if isinstance(artists, list):
                 artists = deque(artists)
             if isinstance(times, list):
                 times = deque(times)
             self._blocked_artists = artists
             self._blocked_artists_times = times
     except IOError:
         pass
Пример #54
0
def doUnPickling(option,pickleFile):
    varRootHash = {}
    from pickle import Unpickler
    if not os.path.isfile(pickleFile):
        print "DEBUG:[doUnPickling] PickleFile does not exist."
        if option == "TERM_CTFMAP":
            print "****ERROR Unpickling****"; exit(1)
        doPickling(option,pickleFile)
    f = open(pickleFile,"r")
    if option == "CATALOG" :
        print "DEBUG:[doUnpickling] UnPickling Catalog.."
        up = Unpickler(f)
        termSeekHash = up.load()
        f.close()
        return termSeekHash
    else:
        print "******ERROR******* Specify correct pickle option"
Пример #55
0
def avoir_score(adresse_fichier_score, joueur):
    """Ouvre le fichier des scores, s'il existe, selon le joueur, le score est chargé et envoyé,
    sinon on renvoie 0. Se charge également de charger entièrement le fichier des scores"""
    global score

    try:
        with open(adresse_fichier_score, 'rb') as fichier_score:
            #Lecture du fichier
            unpick = Unpickler(fichier_score)
            score = unpick.load()

            return score.get(joueur, 0)
    except IOError:
        print("erreur")
        return 0
    except EOFError:
        return 0
Пример #56
0
class WatcherThread(threading.Thread):
    def __init__(self, actor, p, logger):
        self._actor = actor
        self._process = p
        self._logger = logger
        self._unpickler = Unpickler(p.stdout)
        threading.Thread.__init__(self)
        
    def run(self):
        while True:
            #if self._logger != None:
            #    self._logger.error('Waiting message')
            e = json.loads(self._unpickler.load())
            change = parse_file_change(e['ty'], e['pathname'], e['src_pathname'], e['context'], e['isdir'])
            #self._unpickler.memo = {}
            #if self._logger != None:
            #    self._logger.error('Message received')
            self._actor.tell({'command': 'forward', 'change': change})
Пример #57
0
    def _obtenir_partie_sauvegarde(self):
        """
            Récupère la dernière partie sauvegardée, s'il n'y en a pas,
            renvoie None.
        """

        pas_de_sauv_msg = "Aucune sauvegarde trouvée."

        try:
            with open(adresse_fichier_sauvegarde, 'rb') as fichier_sauvegarde:
                #Lecture du fichier
                unpick = Unpickler(fichier_sauvegarde)
                map_ = unpick.load()

                return map_
        except:
            print(pas_de_sauv_msg)
            return None
Пример #58
0
 def run(self):
   self.api = Api(self.pwd["ingress"]["userEmail"], self.pwd["ingress"]["userPassword"])
   try:
     with open(os.path.expanduser("~/.ingressbot.pkl"), "rb") as f:
       unpickler = Unpickler(f)
       self.inventory = unpickler.load()
   except:
     pass
   
   self.threads.append(TimerThread(interval=10, target=self.refreshInventory))
   self.threads.append(TimerThread(interval=10, setup=self.setupRefreshChat, target=self.refreshChat))
   self.threads.append(Thread(target=self.serve_forever))
   self.send(self.cfg["master"], "IngressBot is up and running")
   for t in self.threads:
     t.start()
   for t in self.threads:
     while t.is_alive():
       t.join(timeout=3600.0)
    def load(self, max_pass=-1):
        """Read a pickled object representation from the open file.

        Return the reconstituted object hierarchy specified in the file.
        """
        # List of objects to be unpickled.
        self.objects = []

        # We overload the load_build method.
        dispatch = self.dispatch
        dispatch[BUILD] = NewUnpickler.load_build

        # call the super class' method.
        ret = Unpickler.load(self)
        self.initialize(max_pass)
        self.objects = []

        # Reset the Unpickler's dispatch table.
        dispatch[BUILD] = Unpickler.load_build
        return ret