Beispiel #1
0
    def search(self, test=False):
        if test == True:
            abs = os.path.normpath(os.path.join(os.getcwd(), "db/matrix.mx"))
        else:
            ruta = os.path.split(sys.argv[0])
            abs = os.path.join(ruta[0], "db/matrix.mx")
        filemx = open(abs, 'r')
        serializer = Unpickler(filemx)
        self._mx = serializer.load()

        while True:
            cadena = raw_input("Search: ")
            if not cadena:
                return
            else:
                #search here
                cadena = cadena.strip()
                cadena = cadena.lower()
                cad = self._prep.ngram_tokenizer(text=cadena)
                resultado = list()
                for doc in self._mx.docs:
                    vector = list()
                    for q in cad:
                        if q in self._mx.terms:
                            pos = self._mx.terms.index(q)
                            vector.append(doc['terms'][pos])
                    resultado.append((doc['id'], vector))
                resultado.sort(lambda a, b: self.__Deuclidiana(a[1]) - self.
                               __Deuclidiana(b[1]),
                               reverse=True)
                print resultado
Beispiel #2
0
 def load(self, filepath):
     with open(filepath, 'rb') as handle:
         loader = Unpickler(handle)
         state = loader.load()
         self.regressor = state['regressor']
         self.user_svd = state['user_svd']
         self.item_svd = state['item_svd']
def read_line1(name):  #Card name/setcodes
    try:
        with open("util/sets", "rb") as fichier:
            unpickle = Unpickler(fichier)
            sets = unpickle.load()
            sets.sort_by_names()
            sets_in_name = list()
            for i in sets:
                if i.name in name:
                    sets_in_name.append(i.code)
            #Remove setcodes from the list if a sub-archetype is also there
            sets_to_remove = list()
            for i in sets_in_name:
                for j in sets.get_archetype_by_code(i).subs:
                    if sets.get_archetype_by_name(
                            j
                    ).code in sets_in_name and i not in sets_to_remove:
                        sets_to_remove.append(i)
            for i in sets_to_remove:
                del_index = sets_in_name.index(i)
                del sets_in_name[del_index]
            #Calculate output setcode
            total_setcode = 0
            for i in sets_in_name:
                total_setcode += int(i, 0) * pow(16, 4 * sets_in_name.index(i))
            return int(total_setcode)
    except FileNotFoundError:
        return 0
Beispiel #4
0
    def __init__(self, partIn, maxThreadsIn=8):
        super(ValgrindAnalyzer, self).__init__()
        self.release = None
        self.plat    = None
        self.part    = partIn
        self.ignoreLibs = ['libm-2.5.so','libpthread-2.5.so', 'cmsRun']
        self.libList = []

        prodFileName = os.environ['CMSSW_RELEASE_BASE']+'/src/ReleaseProducts.list'
        self.rpr = RelProdReader()
        self.rpr.readProducts(prodFileName)

        vgCmdFileName = os.environ['CMSSW_RELEASE_BASE']+'/qaLogs/vgCmds.pkl'
        from pickle import Unpickler
        vgCmdFile = open(vgCmdFileName, 'r')
        vgCmdPklr = Unpickler(vgCmdFile)
        self.vgCmds = vgCmdPklr.load()
        vgCmdFile.close()

        self.toDo = []

        self.threadList = []
        self.maxThreads = maxThreadsIn
        self.threadStatus = {}
        self.threadTiming = {}
Beispiel #5
0
    def search(self, buscar=""):
        ruta = os.path.split(sys.argv[0])
        abs = os.path.join(ruta[0], "motor/db/matrix.mx")
        filemx = open(abs, 'r')
        serializer = Unpickler(filemx)
        self._mx = serializer.load()

        cadena = buscar
        if not cadena:
            return ['vacio']
        else:
            #search here
            cadena = cadena.strip()
            cadena = cadena.lower()
            cad = self._prep.ngram_tokenizer(text=cadena)
            resultado = list()
            for doc in self._mx.docs:
                vector = list()
                for q in cad:
                    if q in self._mx.terms:
                        pos = self._mx.terms.index(q)
                        vector.append(doc['terms'][pos])
                resultado.append((doc['id'], vector))
            resultado.sort(lambda a, b: self.__Deuclidiana(a[1]) - self.
                           __Deuclidiana(b[1]),
                           reverse=True)
            return resultado
Beispiel #6
0
 def loads(self, s):
     up = Unpickler(BytesIO(s))
     up.persistent_load = self._get_object
     try:
         return up.load()
     except KeyError as e:
         raise UnpicklingError("Could not find Node class for %s" % e)
Beispiel #7
0
def unpickle(file_path=None):
    """Load pickled Python objects from a file.

    Almost like ``cPickle.load(open(file_path))``, but also loads object saved
    with older versions of Eelbrain, and allows using a system file dialog to
    select a file.

    Parameters
    ----------
    file_path : None | str
        Path to a pickled file. If None (default), a system file dialog will be
        shown. If the user cancels the file dialog, a RuntimeError is raised.
    """
    if file_path is None:
        filetypes = [("Pickles (*.pickled)", '*.pickled'), ("All files", '*')]
        file_path = ui.ask_file("Select File to Unpickle", "Select a pickled "
                                "file to unpickle", filetypes)
        if file_path is False:
            raise RuntimeError("User canceled")
        else:
            print(repr(file_path))
    else:
        file_path = os.path.expanduser(file_path)
        if not os.path.exists(file_path):
            new_path = os.extsep.join((file_path, 'pickled'))
            if os.path.exists(new_path):
                file_path = new_path

    with open(file_path, 'rb') as fid:
        unpickler = Unpickler(fid)
        unpickler.find_global = map_paths
        obj = unpickler.load()

    return obj
Beispiel #8
0
    def testDeepCopyCanInvalidate(self):
        """
        Tests regression for invalidation problems related to missing
        readers and writers values in cloned objects (see
        http://mail.zope.org/pipermail/zodb-dev/2008-August/012054.html)
        """
        import ZODB.MappingStorage
        database = DB(
            ZODB.blob.BlobStorage('blobs',
                                  ZODB.MappingStorage.MappingStorage()))
        connection = database.open()
        root = connection.root()
        transaction.begin()
        root['blob'] = Blob()
        transaction.commit()

        stream = StringIO()
        p = Pickler(stream, 1)
        p.dump(root['blob'])
        u = Unpickler(stream)
        stream.seek(0)
        clone = u.load()
        clone._p_invalidate()

        # it should also be possible to open the cloned blob
        # (even though it won't contain the original data)
        clone.open()

        # tearDown
        database.close()
Beispiel #9
0
def deepCopy(obj):
    stream = StringIO()
    p = Pickler(stream, 1)
    p.dump(obj)
    stream.seek(0)
    u = Unpickler(stream)
    return u.load()
Beispiel #10
0
 def __init__(self, outFile, saveFile):
     computerName=platform.node().capitalize()
     userName=getpass.getuser().capitalize()
     self.outFile=outFile
     self.saveFile=saveFile
     self._kinds={"kind":kind, "adjective":adjective,"entity":entity,"name":name,"thing":thing,"person":person,
                  "computer":computer,"user":user,"infinitive":infinitive,"pronoun":pronoun,
                  "male":male,"female":female,"place":place,"location":location,"number":number,
                  "time":time}
     try:
         infile = open(saveFile,'rb')
         unpickle = Unpickler(infile)
         kinds=unpickle.load()
         self._loadKinds(kinds, "entity")
         self._loadKinds(kinds, "adjective")
         #globalsBak=globals().copy() #backup globals
         globals().update(self._kinds)   #inject dynamic classes into globals
         self._entities,self._antecedents,self._names,self._adjectives=unpickle.load()
         #globals().clear()   #clear globals
         #globals().update(globalsBak)    #restore backup
         infile.close
     except: # IOError:
         self._entities={computerName:computer(computerName,self,True),userName:user(userName,self,True),"Vibranium":thing("Vibranium",self)}
         self._antecedents={"I":self._entities[userName],"you":self._entities[computerName]}
         self._names={}
         self._adjectives={}
         for key,value in self._entities.items():
             if value in self._names:
                 self._names[value].add(key)
             else:
                 self._names[value]={key}
     self._temp={}   #stores 'a/an' objects, possessives, prepositional phrases, and numbers
Beispiel #11
0
    def __init__(self, partIn, maxThreadsIn=8):
        super(ValgrindAnalyzer, self).__init__()
        self.release = None
        self.plat = None
        self.part = partIn
        self.ignoreLibs = ['libm-2.5.so', 'libpthread-2.5.so', 'cmsRun']
        self.libList = []

        prodFileName = os.environ[
            'CMSSW_RELEASE_BASE'] + '/src/ReleaseProducts.list'
        self.rpr = RelProdReader()
        self.rpr.readProducts(prodFileName)

        vgCmdFileName = os.environ['CMSSW_RELEASE_BASE'] + '/qaLogs/vgCmds.pkl'
        from pickle import Unpickler
        vgCmdFile = open(vgCmdFileName, 'r')
        vgCmdPklr = Unpickler(vgCmdFile)
        self.vgCmds = vgCmdPklr.load()
        vgCmdFile.close()

        self.toDo = []

        self.threadList = []
        self.maxThreads = maxThreadsIn
        self.threadStatus = {}
        self.threadTiming = {}
Beispiel #12
0
def _unpickle(pickled):
    """ Unpickles a string and catch all types of errors it can throw,
    to raise only NotReadableJobError in case of error.

    odoo stores the text fields as 'utf-8', so we specify the encoding.

    `loads()` may raises many types of exceptions (AttributeError,
    IndexError, TypeError, KeyError, ...). They are all catched and
    raised as `NotReadableJobError`).

    Pickle could be exploited by an attacker who would write a value in a job
    that would run arbitrary code when unpickled. This is why we set a custom
    ``find_global`` method on the ``Unpickler``, only jobs and a whitelist of
    classes/functions are allowed to be unpickled (plus the builtins types).
    """
    def restricted_find_global(mod_name, fn_name):
        __import__(mod_name)
        mod = sys.modules[mod_name]
        fn = getattr(mod, fn_name)
        if not (fn in JOB_REGISTRY or fn in _UNPICKLE_WHITELIST):
            raise UnpicklingError('{}.{} is not allowed in jobs'.format(
                mod_name, fn_name))
        return fn

    unpickler = Unpickler(StringIO(pickled))
    unpickler.find_global = restricted_find_global
    try:
        unpickled = unpickler.load()
    except (Exception, UnpicklingError):
        raise NotReadableJobError('Could not unpickle.', pickled)
    return unpickled
Beispiel #13
0
 def recover_snapshot(self):
     file = open(path.join(self.basedir,fu.last_snapshot_file(self.basedir)),"rb")
     if not file:
         return None
     logger.debug("Recovering snapshot from: " + file.name)
     unpickler = Unpickler(file)
     return unpickler.load()
Beispiel #14
0
    def test_02_parse_peaks(self):
        """
        function
        """
        peak_files = [
            os_join(TEST_PATH, 'data', 'peaks_protA.bed'),
            os_join(TEST_PATH, 'data', 'peaks_protB.bed')
        ]
        in_feature = False
        biases = os_join(TEST_PATH, 'data', 'biases.pickle')
        fh = open(biases, "rb")
        try:
            badcols = Unpickler(fh, encoding='latin1').load()['badcol']
        except TypeError:
            badcols = Unpickler(fh).load()['badcol']
        fh.close()
        peak_coord1, peak_coord2, npeaks1, npeaks2, submatrices, coord_conv = parse_peaks(
            peak_files[0], peak_files[1], RESOLUTION, in_feature, CHROM_SIZES,
            badcols, SECTION_POS, WINDOWS_SPAN)

        global COORD_CONV
        COORD_CONV = coord_conv
        global SUBMATRICES
        SUBMATRICES = submatrices
        self.assertEqual(peak_coord1, PEAK_COORD1)
        self.assertEqual(peak_coord2, PEAK_COORD2)
        self.assertEqual(npeaks1, 6)
        self.assertEqual(npeaks2, 14)
Beispiel #15
0
 def load_train_samples(self):
     samples_file = os.path.join(
         self.config.load_samples_folder_file[0],
         self.config.load_samples_folder_file[1],
     )
     if not os.path.isfile(samples_file):
         print(samples_file)
         r = input("File with train samples not found. Continue? [y|n]")
         if r != "y":
             sys.exit()
     else:
         print("File with train samples found. Read it.")
         with open(samples_file, "rb") as f:
             self.train_samples_history = Unpickler(f).load()
         f.closed
         while (
             len(self.train_samples_history)
             > self.config.num_iters_for_train_samples_history
         ):
             print(
                 "len(train_samples_history) =",
                 len(self.train_samples_history),
                 " => remove the oldest train_samples",
             )
             self.train_samples_history.pop(0)
         # examples based on the model were already collected (loaded)?
         self.skip_first_self_play = self.config.skip_first_self_play
Beispiel #16
0
    def refit(self, samples):
        df = Unpickler(
            open(
                pkg_resources.resource_filename('hate_cl', 'hate_cl/data.sav',
                                                'rb'))).load()
        aux_df = DataFrame(samples, columns=['hate', 'sentence'])
        df = df.append(aux_df, ignore_index=True)
        print(df)
        X = df['sentence'].tolist()
        y = df['hate'].tolist()
        cl = Pipeline([('tfidf', TfidfVectorizer(ngram_range=(1, 4))),
                       ('clf',
                        RandomForestClassifier(n_estimators=100,
                                               max_depth=None,
                                               min_samples_leaf=1,
                                               min_samples_split=2,
                                               min_weight_fraction_leaf=0))])
        cl.fit(X, y)
        self.classifier = cl
        cl_filename = pkg_resources.resource_filename(
            'hate_cl', 'hate_cl/randomforest.sav')
        df_filename = pkg_resources.resource_filename('hate_cl',
                                                      'hate_cl/data.sav')

        f = open(cl_filename, 'wb')
        Pickler(f).dump(cl)
        f.close()

        f = open(df_filename, 'wb')
        Pickler(f).dump(df)
        f.close()
    def load_brain(self):
        """
        Load the actor and critic's configurations from the corresponding files.
        :return: True if configuration loaded, False otherwise
        """

        try:
            # Open the file
            with open("Config/actor.cfg", "rb") as cfg_file:
                # Instantiate an Unpickler
                unpickle = Unpickler(cfg_file)
                # Load the object
                self._actor = unpickle.load()

            # Repeat the operation for the actor
            with open("Config/critic.cfg", "rb") as cfg_file:
                unpickle = Unpickler(cfg_file)
                self._critic = unpickle.load()

            # Return True because everything up to this point went well
            return True

        except IOError as error:
            # Display a message detailing the error
            print("No prevously recorded actor and/or critic configuration")
            # Return False to warn that something bad happened, leaving the actor and critic in an unknown state
            return False
Beispiel #18
0
    def testDeepCopyCanInvalidate(self):
        """
        Tests regression for invalidation problems related to missing
        readers and writers values in cloned objects (see
        http://mail.zope.org/pipermail/zodb-dev/2008-August/012054.html)
        """
        import ZODB.MappingStorage
        database = DB(ZODB.blob.BlobStorage(
            'blobs', ZODB.MappingStorage.MappingStorage()))
        connection = database.open()
        root = connection.root()
        transaction.begin()
        root['blob'] = Blob()
        transaction.commit()

        stream = StringIO()
        p = Pickler(stream, 1)
        p.dump(root['blob'])
        u = Unpickler(stream)
        stream.seek(0)
        clone = u.load()
        clone._p_invalidate()

        # it should also be possible to open the cloned blob
        # (even though it won't contain the original data)
        clone.open()

        # tearDown
        database.close()
Beispiel #19
0
def applyConfiguration(index):
    file = configurations[index].value
    if not file:
        print 'no file selected'
        return

    index = 0
    for path in filter(None, cnfg_load.value.split(';')):
        if file == os.path.basename(path):
            fh = open(path, 'r')
            try:
                p = Unpickler(fh)
                if p.load() != 'KKB':
                    print 'ERROR:', os.path.basename(path)
                else:
                    model = ConfigurationModel()
                    for att in dir(model):
                        att_value = getattr(model, att)
                        if (att.find('_') != 0) and ('instancemethod'
                                                     not in str(
                                                         type(att_value))):
                            if p.load() != att:
                                print 'FORMAT ERROR:', os.path.basename(path)
                                break

                            setattr(model, att, p.load())
                    else:
                        print 'apply:', os.path.basename(path)
                        model.apply()
            finally:
                fh.close()
def test_unpickle_gpuarray_as_numpy_ndarray_flag2():
    oldflag = config.experimental.unpickle_gpu_on_cpu
    config.experimental.unpickle_gpu_on_cpu = True

    try:
        testfile_dir = os.path.dirname(os.path.realpath(__file__))
        fname = "GpuArray.pkl"

        with open(os.path.join(testfile_dir, fname), "rb") as fp:
            u = Unpickler(fp, encoding="latin1")
            try:
                mat = u.load()
            except ImportError:
                # Windows sometimes fail with nonsensical errors like:
                #   ImportError: No module named type
                #   ImportError: No module named copy_reg
                # when "type" and "copy_reg" are builtin modules.
                if sys.platform == "win32":
                    exc_type, exc_value, exc_trace = sys.exc_info()
                    raise
                raise

        assert isinstance(mat, np.ndarray)
        assert mat[0] == -42.0

    finally:
        config.experimental.unpickle_gpu_on_cpu = oldflag
Beispiel #21
0
 def loads(self, s):
     up = Unpickler(BytesIO(s))
     up.persistent_load = self._get_object
     try:
         return up.load()
     except KeyError as e:
         raise UnpicklingError("Could not find Node class for %s" % e)
Beispiel #22
0
    def charger_donnees(self):
        """
            Méthode appelée à la création de l'utilisateur, permettant de
            charger toutes les données sauvegardées, ou de les créer si le
            joueur n'a aucune données sauvegardées.
        """

        # CHARGER LES DONNEES

        if "save.elh" in listdir(getcwd()):

            with open("save.elh", "rb") as fichier_sauvegarde:
                unpickler = Unpickler(fichier_sauvegarde)
                utilisateur_sauvegarde = unpickler.load()
                for key, value in utilisateur_sauvegarde.__dict__.items():
                    self.__setattr__(key, value)

        # CREER LES DONNEES

        else:

            self.raccourcis["deplacer-haut"] = sf.Keyboard.Z
            self.raccourcis["deplacer-bas"] = sf.Keyboard.S
            self.raccourcis["deplacer-droite"] = sf.Keyboard.D
            self.raccourcis["deplacer-gauche"] = sf.Keyboard.Q
            self.raccourcis["menu"] = sf.Keyboard.ESCAPE
            self.raccourcis["interagir"] = sf.Keyboard.E
            self.raccourcis["inventaire"] = sf.Keyboard.SPACE
            self.raccourcis["map"] = sf.Keyboard.M

            self.sauvegarde_utilisable = False
            self.sauvegarde = dict()
Beispiel #23
0
def runConfigurations():
    if cnfg_load.value:
        for path in filter(None, cnfg_load.value.split(';')):
            fh = open(path, 'r')
            try:
                p = Unpickler(fh)
                if p.load() != 'KKB':
                    print 'ERROR:', os.path.basename(path)
                else:
                    model = ConfigurationModel()
                    for att in dir(model):
                        att_value = getattr(model, att)
                        if (att.find('_') != 0) and ('instancemethod'
                                                     not in str(
                                                         type(att_value))):
                            if p.load() != att:
                                print 'FORMAT ERROR:', os.path.basename(path)
                                break

                            setattr(model, att, p.load())
                    else:
                        print 'run:', os.path.basename(path)
                        startScan(model)
            finally:
                fh.close()
Beispiel #24
0
    def load_history(self, filename):
        """
        Loads a game history from a file. A file can optionally
        contain one or many History classes, and this method
        can be extended with optional arguments to specify how
        many histories to load.

        Parameters
        ----------
        filename : str
            File to load history from.

        Returns
        -------
        self
        """

        filepath = filename
        if not os.path.exists(filepath):
            filepath = os.path.join(self.args.checkpoint_dir, filename)
            if not os.path.exists(filepath):
                raise("No checkpoint in local file {} or path {}!".format(filename, filepath))

        with open(filepath, "rb") as f:
            log.info(f"Loading History from {filepath}")
            self.trainExamplesHistory = Unpickler(f).load()

        return self
Beispiel #25
0
 def readData(self):
     import config
     pklFileName = config.siteInfo['qaPath'] + '/navigator-summary.pkl'
     pklIn = open(pklFileName, 'r')
     from pickle import Unpickler
     upklr = Unpickler(pklIn)
     self.data = upklr.load()
     pklIn.close()
Beispiel #26
0
    def _reader(self, data):
        unpickler = Unpickler(StringIO(data))

        while True:
            try:
                self._process(*unpickler.load())
            except EOFError:
                break
Beispiel #27
0
    def charger_partie(self):
        chemin = os.path.join("parties", self.nom)

        parties = os.listdir("parties")
        if self.nom in parties:
            with open(chemin, "rb") as file:
                file = Unpickler(file)
                return file.load()
Beispiel #28
0
    def _reader(self, data):
        unpickler = Unpickler(StringIO(data))

        while True:
            try:
                self._process(*unpickler.load())
            except EOFError:
                break
Beispiel #29
0
 def readData(self):
     import config
     pklFileName = config.siteInfo['qaPath']+'/navigator-summary.pkl'
     pklIn = open(pklFileName,'r')
     from pickle import Unpickler
     upklr = Unpickler(pklIn)
     self.data = upklr.load()
     pklIn.close()
Beispiel #30
0
    def unpack(self, packet):
        from pickle import Unpickler
        from io import BytesIO

        buffer = BytesIO(packet)
        delegate = Unpickler(buffer)

        delegate.persistent_load = lambda id: self.world.get_token(int(id))
        return delegate.load()
Beispiel #31
0
 def loads(self, string):
     f = StringIO(string)
     unpickler = Unpickler(f)
     unpickler.memo = self._unpicklememo
     res = unpickler.load()
     self._updatepicklememo()
     #print >>debug, "loaded", res
     #print >>debug, "unpicklememo", self._unpicklememo
     return res
Beispiel #32
0
def getScoreDictionary(fileName):
    scoreDict = None
    try:
        with open(fileName, "rb") as scoreFile:
            scoreUnpickler = Unpickler(scoreFile)
            scoreDict = scoreUnpickler.load()
    except FileNotFoundError:
        pass
    return scoreDict
Beispiel #33
0
def load(filename):
    print "* Loading..."
    copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method)
    in_file = open(filename, "rb")
    unpickler = Unpickler(in_file)
    unpickler.persistent_load = persistent_load
    loaded = unpickler.load()
    print "* Loaded!"
    return loaded
Beispiel #34
0
 def loads(self, string):
     f = StringIO(string)
     unpickler = Unpickler(f)
     unpickler.memo = self._unpicklememo
     res = unpickler.load()
     self._updatepicklememo()
     #print >>debug, "loaded", res
     #print >>debug, "unpicklememo", self._unpicklememo
     return res
def deepcopy(obj):
    """Makes a deep copy of the object using the pickle mechanism.
    """
    stream = StringIO()
    p = Pickler(stream, 1)
    p.dump(aq_base(obj))
    stream.seek(0)
    u = Unpickler(stream)
    return u.load()
    def testPickleUnpickle(self):
        s = BytesIO()
        p = Pickler(s)
        p.dump(Allow)
        s.seek(0)
        u = Unpickler(s)
        newAllow = u.load()

        self.assertTrue(newAllow is Allow)
Beispiel #37
0
 def loadnetwork(self, filename):
     # from cPickle import Unpickler
     from pickle import Unpickler
     f = file(filename, "rb")
     u = Unpickler(f)
     self.res = u.load()
     # tmp = u.load()
     # print tmp
     f.close()
Beispiel #38
0
def unpickle_outcomes(fn, desired_iterations):
        fh = open(fn, 'r')
        unp = Unpickler(fh)
        outcomes = unp.load()
        fh.close()
        kept_iterations = outcomes["ITERATIONS"]
        if kept_iterations == desired_iterations:
            return outcomes
        raise IOError
Beispiel #39
0
def restore(pickle_buffer):
    #print "* Restoring..."
    copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method)
    pickle_buffer.seek(0)
    unpickler = Unpickler(pickle_buffer)
    unpickler.persistent_load = persistent_load
    obj = unpickler.load()
    #print "* Restored!"
    return obj
    def load(self, param):
        if os.path.isfile('syncdata.bin'):
            with open('syncdata.bin', 'rb') as f:
                self.path = Unpickler(f).load()

            self.clear()
            print('Data loaded...')
        else:
            print('File not found. Session saved?')
Beispiel #41
0
def unpickle_outcomes(fn, desired_iterations):
    fh = open(fn, 'r')
    unp = Unpickler(fh)
    outcomes = unp.load()
    fh.close()
    kept_iterations = outcomes["ITERATIONS"]
    if kept_iterations == desired_iterations:
        return outcomes
    raise IOError
def doUnPickling(option, pickleFile, loqt=None, url=""):
    from pickle import Unpickler

    if not os.path.isfile(pickleFile):
        print "DEBUG:[doUnPickling] PickleFile does not exist."
        doPickling(option, pickleFile, loqt, url)
    f = open(pickleFile, "r")
    if option == "STEMMED_QUERIES":
        up = Unpickler(f)
        unpkldData = up.load()
        f.close()
        print "DEBUG:[doUnPickling] Unpickled " + option + ": "  # ,unpkldData
        return unpkldData
    elif option == "EXTFILE_MAP":
        print "DEBUG:[doUnPickling] Unpickling ->" + option
        up = Unpickler(f)
        extfile_map_hash = up.load()
        f.close()
        return extfile_map_hash
    elif option == "TERM_STATS":
        print "DEBUG:[doUnPickling] Unpickling ->" + option
        up = Unpickler(f)
        term_id_hash = up.load()
        docIdUniverse = up.load()
        corpus_stats = up.load()
        query_stats = up.load()
        f.close()
        #        print "DEBUG:[doUnPickling] Unpickled "+option+": corpus_stats",corpus_stats
        #        print "DEBUG:[doUnPickling] Unpickled "+option+": query_stats",query_stats
        return term_id_hash, docIdUniverse, corpus_stats, query_stats
    else:
        print "***ERROR***:[doPickling] Specify a correct option to UnPickle."
    def __init__(self, *args, **kwargs):
        Unpickler.__init__(self, *args, **kwargs)

        self.dispatch[pickle.BININT] = lambda x: self.load_binint()
        self.dispatch[pickle.BININT2] = lambda x: self.load_binint2()
        self.dispatch[pickle.LONG4] = lambda x: self.load_long4()
        self.dispatch[pickle.BINSTRING] = lambda x: self.load_binstring()
        self.dispatch[pickle.BINUNICODE] = lambda x: self.load_binunicode()
        self.dispatch[pickle.EXT2] = lambda x: self.load_ext2()
        self.dispatch[pickle.EXT4] = lambda x: self.load_ext4()
        self.dispatch[pickle.LONG_BINGET] = lambda x: self.load_long_binget()
        self.dispatch[pickle.LONG_BINPUT] = lambda x: self.load_long_binput()
Beispiel #44
0
 def load_models(self,model_files):
     """load model or list of models into self.model"""
     if type(model_files)==str: model_files = [model_files]
     i = 0
     for mod_file in model_files:
         model = open(mod_file,'r')
         unPickled = Unpickler(model)
         clf_RF = unPickled.load()
         self.model.append(clf_RF)
         model.close()
         i += 1
     return i
Beispiel #45
0
    def recuperer(self):

        if "user_settings.elr" in listdir(getcwd()):

            with open("user_settings.elr", "rb") as fichier:
                unpickler = Unpickler(fichier)
                options_recuperees = unpickler.load()
                for key, item in options_recuperees.__dict__.items():
                    if key == "raccourcis":
                        for raccourcis, donnees in item.items():
                            self.raccourcis[raccourcis] = donnees
                    else:
                        self.__setattr__(key, item)
Beispiel #46
0
def load(fn):
    aunpickler = Unpickler(open(fn,'r'));
    dis_list = aunpickler.load()
    data4hz = aunpickler.load()
    exp_list = aunpickler.load()
    pairs = []
    for hzkey in data4hz:
        p = FreqPair()
        p.ex = hzkey[0]
        p.em = hzkey[1]
        p.flu = data4hz[hzkey]
        pairs.append(p)
    return (dis_list,pairs,exp_list)
Beispiel #47
0
def unpickle(filename):
    f, obj = None, None
    try:
        f = open(filename, "rb")
        p = Unpickler(f)
        x = p.load()
        f.close()
        f = None
        obj = x
        # print "Unpickled", filename
    finally:
        if f:
            f.close()
    return obj
Beispiel #48
0
def fromFile(worldDir):
    '''Load a world object from a campaign bundle'''
    pickleFileName = os.path.join(worldDir, 'data.pickle')
    pickle = Unpickler(open(pickleFileName, 'rb'))
    fileVersion = pickle.load()
    if fileVersion != openfrontier.PICKLE_VERSION:
        raise TypeError(
                "Data version mismatch: expected {0}; got {1}.".format(
                    openfrontier.PICKLE_VERSION, fileVersion))
    world = pickle.load()
    if not isinstance(world, OFWorld):
        raise TypeError(
                "Expected OFWorld; got {0}".format(type(world).__name__))
    return world
def doUnPickling(option,pickleFile):
    varRootHash = {}
    from pickle import Unpickler
    if not os.path.isfile(pickleFile):
        print "DEBUG:[doUnPickling] PickleFile does not exist."
        if option == "TERM_CTFMAP":
            print "****ERROR Unpickling****"; exit(1)
        doPickling(option,pickleFile)
    f = open(pickleFile,"r")
    if option == "STEM_TEXT":
        up = Unpickler(f)
        varRootHash = up.load()
        f.close()
#        print "DEBUG:[doUnPickling] Unpickled "+option+": "#,unpkldData
        return varRootHash  
    elif option == "TERM_CTFMAP" :
        print "DEBUG:[doUnpickling] UnPickling Term CTF..."
        up = Unpickler(f)
        termCtfHash = up.load()
        f.close()
        return termCtfHash
    elif option == "STOPLIST" :
        print "DEBUG:[doUnpickling] UnPickling stoplist..."
        up = Unpickler(f)
        stoplst = up.load()
        f.close()
        return stoplst
    elif option == "CATALOG" :
        print "DEBUG:[doUnpickling] UnPickling Catalog.."
        up = Unpickler(f)
        termSeekHash = up.load()
        f.close()
        return termSeekHash
Beispiel #50
0
 def _load_blocked_artists(self):
     """Read the list of blocked artists from disk."""
     dump = os.path.join(self.get_cache_dir(), "autoqueue_block_cache")
     try:
         with open(dump, 'r') as pickle:
             unpickler = Unpickler(pickle)
             artists, times = unpickler.load()
             if isinstance(artists, list):
                 artists = deque(artists)
             if isinstance(times, list):
                 times = deque(times)
             self._blocked_artists = artists
             self._blocked_artists_times = times
     except IOError:
         pass
    def __init__(self, *args, **kwargs):
        self._src_impl = kwargs.pop("impl", "CP27")
        self._dump_code = kwargs.pop("dump_code", False)
        Unpickler.__init__(self, *args, **kwargs)

        self._override_dispatch(pickle.BININT, self.load_binint)
        self._override_dispatch(pickle.BININT2, self.load_binint2)
        self._override_dispatch(pickle.LONG4, self.load_long4)
        self._override_dispatch(pickle.BINSTRING, self.load_binstring)
        self._override_dispatch(pickle.BINUNICODE, self.load_binunicode)
        self._override_dispatch(pickle.EXT2, self.load_ext2)
        self._override_dispatch(pickle.EXT4, self.load_ext4)
        self._override_dispatch(pickle.LONG_BINGET, self.load_long_binget)
        self._override_dispatch(pickle.LONG_BINPUT, self.load_long_binput)
        self._override_dispatch(pickle.REDUCE, self.load_reduce)
Beispiel #52
0
class Environment(object):

    handle = None
    loader = None
    writer = None
    data = None


    def __init__(self):
    
        self.handle = open("environment.pickle", 'w+')
        self.loader = Unpickler(self.handle)
        self.writer = Pickler(self.handle)

        try:
            self.data = self.loader.load()
        except EOFError:
            print "WARNING: Empty environment, creating environment file."
            self.data = {}
            self.write(self.data)


    def write(self, data):

        self.writer.dump(data)
def avoir_score(adresse_fichier_score, joueur):
    """Ouvre le fichier des scores, s'il existe, selon le joueur, le score est chargé et envoyé,
    sinon on renvoie 0. Se charge également de charger entièrement le fichier des scores"""
    global score

    try:
        with open(adresse_fichier_score, 'rb') as fichier_score:
            #Lecture du fichier
            unpick = Unpickler(fichier_score)
            score = unpick.load()

            return score.get(joueur, 0)
    except IOError:
        print("erreur")
        return 0
    except EOFError:
        return 0
Beispiel #54
0
def doUnPickling(option,pickleFile):
    varRootHash = {}
    from pickle import Unpickler
    if not os.path.isfile(pickleFile):
        print "DEBUG:[doUnPickling] PickleFile does not exist."
        if option == "TERM_CTFMAP":
            print "****ERROR Unpickling****"; exit(1)
        doPickling(option,pickleFile)
    f = open(pickleFile,"r")
    if option == "CATALOG" :
        print "DEBUG:[doUnpickling] UnPickling Catalog.."
        up = Unpickler(f)
        termSeekHash = up.load()
        f.close()
        return termSeekHash
    else:
        print "******ERROR******* Specify correct pickle option"
Beispiel #55
0
    def _obtenir_partie_sauvegarde(self):
        """
            Récupère la dernière partie sauvegardée, s'il n'y en a pas,
            renvoie None.
        """

        pas_de_sauv_msg = "Aucune sauvegarde trouvée."

        try:
            with open(adresse_fichier_sauvegarde, 'rb') as fichier_sauvegarde:
                #Lecture du fichier
                unpick = Unpickler(fichier_sauvegarde)
                map_ = unpick.load()

                return map_
        except:
            print(pas_de_sauv_msg)
            return None
Beispiel #56
0
 def run(self):
   self.api = Api(self.pwd["ingress"]["userEmail"], self.pwd["ingress"]["userPassword"])
   try:
     with open(os.path.expanduser("~/.ingressbot.pkl"), "rb") as f:
       unpickler = Unpickler(f)
       self.inventory = unpickler.load()
   except:
     pass
   
   self.threads.append(TimerThread(interval=10, target=self.refreshInventory))
   self.threads.append(TimerThread(interval=10, setup=self.setupRefreshChat, target=self.refreshChat))
   self.threads.append(Thread(target=self.serve_forever))
   self.send(self.cfg["master"], "IngressBot is up and running")
   for t in self.threads:
     t.start()
   for t in self.threads:
     while t.is_alive():
       t.join(timeout=3600.0)
Beispiel #57
0
 def find_class(self, module, name):
     ok = False
     for wm in _DjangoQueryUnpickler.white_modules:
         if module.startswith(wm):
             ok = True
             break
     if "%s.%s"%(module, name) in _DjangoQueryUnpickler.white_objects:
         ok = True
     if not ok:
         raise UnpicklingError("Unsafe class to unpickle %s.%s"%(module, name))
     return Unpickler.find_class(self, module, name)
Beispiel #58
0
def restore(system, basedir):
    #save current clock
    current_clock = system._clock

    #restore from snapshot
    system = SnapshotManager(basedir).recover_snapshot()

    files = fileutils.last_log_files(basedir)
    logger.debug(LOG_PREFIX + "Files found: " + str(files))
    if not files:
        return system

    actions = []
    for file in files:
        logger.debug(LOG_PREFIX + "Opening  " + str(file))
        unpickler = Unpickler(open(file,'rb'))
        try:
            while True:
                action = unpickler.load()
                logger.debug(LOG_PREFIX + action.action)
                actions.append(action)
        except BadPickleGet:
            logger.error(LOG_PREFIX + "Error unpickling %s" % (str(file)))
        except EOFError:
            pass

    if not actions:
        return system

    logger.debug(LOG_PREFIX + "Actions re-execution")
    for action in actions:
        try:
            if hasattr(action, 'results'):
                system._clock = RestoreClock(action.results)
            action.execute_action(system)
        except Exception as e:
            logger.debug(LOG_PREFIX + 'Error executing: %s' % (str(action)))
            logger.debug(LOG_PREFIX + 'Exception: %s' % (str(e)))

    system._clock = current_clock
    return system