Пример #1
0
    def test_02_parse_peaks(self):
        """
        function
        """
        peak_files = [
            os_join(TEST_PATH, 'data', 'peaks_protA.bed'),
            os_join(TEST_PATH, 'data', 'peaks_protB.bed')
        ]
        in_feature = False
        biases = os_join(TEST_PATH, 'data', 'biases.pickle')
        fh = open(biases, "rb")
        try:
            badcols = Unpickler(fh, encoding='latin1').load()['badcol']
        except TypeError:
            badcols = Unpickler(fh).load()['badcol']
        fh.close()
        peak_coord1, peak_coord2, npeaks1, npeaks2, submatrices, coord_conv = parse_peaks(
            peak_files[0], peak_files[1], RESOLUTION, in_feature, CHROM_SIZES,
            badcols, SECTION_POS, WINDOWS_SPAN)

        global COORD_CONV
        COORD_CONV = coord_conv
        global SUBMATRICES
        SUBMATRICES = submatrices
        self.assertEqual(peak_coord1, PEAK_COORD1)
        self.assertEqual(peak_coord2, PEAK_COORD2)
        self.assertEqual(npeaks1, 6)
        self.assertEqual(npeaks2, 14)
Пример #2
0
    def test_06_windows(self):
        """
        test if total intra chromsomal is the same as several windows
        """
        biases = os_join(TEST_PATH, 'data', 'biases.pickle')
        fh = open(biases, "rb")
        try:
            badcols = Unpickler(fh, encoding='latin1').load()['badcol']
        except TypeError:
            badcols = Unpickler(fh).load()['badcol']
        fh.close()
        window = 'intra'
        groups = {}
        windows = [(0, 100), (100, 200), (200, 300), (300, 400)]
        for window in ['intra'] + windows:
            pair_peaks = generate_pairs(PEAK_COORD1,
                                        PEAK_COORD2,
                                        WINDOWS_SPAN,
                                        window,
                                        COORD_CONV,
                                        both_features=False)
            counter = defaultdict(int)
            iter_pairs = submatrix_coordinates(pair_peaks,
                                               (WINDOWS_SPAN * 1000) + 1,
                                               SUBMATRICES,
                                               counter,
                                               both_features=False)
            genomic_mat = os_join(TEST_PATH, 'data', 'data_bam_10kb.tsv')
            submatrices = os_join(TEST_PATH, 'tmp.tsv')

            groups[window] = {
                '': {
                    'sum_raw': defaultdict(int),
                    'sqr_raw': defaultdict(int),
                    'sum_nrm': defaultdict(float),
                    'sqr_nrm': defaultdict(float),
                    'passage': defaultdict(int)
                }
            }

            interactions_at_intersection(groups[window],
                                         genomic_mat,
                                         iter_pairs,
                                         submatrices,
                                         '',
                                         window_size,
                                         both_features=False)
        self.assertEqual(
            round(sum(groups['intra']['']['sum_nrm'].values()), 5),
            round(
                sum(
                    sum(groups[window]['']['sum_nrm'].values())
                    for window in windows), 5))
        self.assertEqual(
            round(sum(groups['intra']['']['sum_nrm'].values()), 5),
            round(2720.13242866, 5))
Пример #3
0
    def __init__(self, tFile, cFile, lFile, config):
        #As was loaded from file
        self.config = config
        
        #Load the connections and locations of the neurons
        #Connections are stored as a list of (a,b) tuples, which indicate that 
        #the neuron with ID a synapses onto the neuron with ID b. 
        with open(cFile) as inFile:
            self.connections = Unpickler(inFile).load()
        
        #Locations are stored as an array of possible neuron locations. Each element of 
        #the array contains either -1 (no neuron) or a neuron ID number
        with open(lFile) as inFile:
            self.locations = Unpickler(inFile).load()
        
        #Neuron types are stored in a hash with two keys. 
        #  "inhib" - a list of inhibitory neuron IDs
        #  "excite" - a list of excitatory neuron IDs
        with open(tFile) as inFile:
            self.neuron_types = Unpickler(inFile).load()
            
        #Set the date based on the location file
        self.file_date = re.search("_([\-:0-9]*)\.", lFile).group(1)
        days = self.file_date.split("-")
        self.file_date_short = "{0}/{1}/{2}".format(days[0], days[1], days[2])
        mins = days[3].split(":")
        self.file_time = "{0}:{1}:{2}.{3}".format(mins[0], mins[1], mins[2], 0.0)
        
        #Build a physical representation of the dish
        self.physDish = PhysicalDish.PhysicalLayout(self.config)
        self.physDish.loadMaps(self.locations)
        
        #Get the largest neuron id, which is also the number of neurons - 1 
        self.neuronCount = 0
        for row in reversed(self.locations):
            self.neuronCount = max(row)
            if max(row) > 0:
                break
        self.neuronCount += 1
        
        #The Brian model, with the connectivity and neuron models used
        self.culture = None
      
        #The which pads are near which neurons
        self.pad_neuron_map = None
        
        #Stimulus schedule, if this remains None, no stim will be delivered
        self.stimSchedule = None

        #Use an image handler to update dynamic stim scheduler. 
        #TODO this is bad OO design, but expedient
        self.imgHandler = None
Пример #4
0
 def loadTrainExamples(self):
     folder = self.args.checkpoint
     filename_white = os.path.join(folder, "training_white.examples")
     filename_black = os.path.join(folder, "training_black.examples")
     if not os.path.isfile(filename_white) or not os.path.isfile(filename_black):
         print(filename_white)
         print(filename_black)
         r = input("File with trainExamples not found. Continue? [y|n]")
         if r != "y":
             sys.exit()
     else:
         print("File with trainExamples found. Read it.")
         with open(filename_white, "rb") as f:
             self.trainExamplesHistory_white = Unpickler(f).load()
         with open(filename_black, "rb") as f:
             self.trainExamplesHistory_black = Unpickler(f).load()
Пример #5
0
def test_unpickle_gpuarray_as_numpy_ndarray_flag2():
    oldflag = config.experimental.unpickle_gpu_on_cpu
    config.experimental.unpickle_gpu_on_cpu = True

    try:
        testfile_dir = os.path.dirname(os.path.realpath(__file__))
        fname = "GpuArray.pkl"

        with open(os.path.join(testfile_dir, fname), "rb") as fp:
            u = Unpickler(fp, encoding="latin1")
            try:
                mat = u.load()
            except ImportError:
                # Windows sometimes fail with nonsensical errors like:
                #   ImportError: No module named type
                #   ImportError: No module named copy_reg
                # when "type" and "copy_reg" are builtin modules.
                if sys.platform == "win32":
                    exc_type, exc_value, exc_trace = sys.exc_info()
                    raise
                raise

        assert isinstance(mat, np.ndarray)
        assert mat[0] == -42.0

    finally:
        config.experimental.unpickle_gpu_on_cpu = oldflag
Пример #6
0
 def load_train_samples(self):
     samples_file = os.path.join(
         self.config.load_samples_folder_file[0],
         self.config.load_samples_folder_file[1],
     )
     if not os.path.isfile(samples_file):
         print(samples_file)
         r = input("File with train samples not found. Continue? [y|n]")
         if r != "y":
             sys.exit()
     else:
         print("File with train samples found. Read it.")
         with open(samples_file, "rb") as f:
             self.train_samples_history = Unpickler(f).load()
         f.closed
         while (
             len(self.train_samples_history)
             > self.config.num_iters_for_train_samples_history
         ):
             print(
                 "len(train_samples_history) =",
                 len(self.train_samples_history),
                 " => remove the oldest train_samples",
             )
             self.train_samples_history.pop(0)
         # examples based on the model were already collected (loaded)?
         self.skip_first_self_play = self.config.skip_first_self_play
Пример #7
0
def init_score(file):
    try:
        with open(file, "rb") as scores_file:
            score_dict = Unpickler(scores_file).load()
    except:
        score_dict = {}
    return score_dict
Пример #8
0
    def load_weights(self, filename):
        """
        Loads a previous Approach state from a file. Just the
        weights, history is loaded separately.

        Parameters
        ----------
        filename : str
            File to load the weights from. Not having this be a
            list allows for other data encoding schemes.

        Returns
        -------
        self
        """

        filepath = filename
        if not os.path.exists(filepath):
            filepath = os.path.join(self.args.checkpoint_dir, filename)
            if not os.path.exists(filepath):
                raise("No model in local file {} or path {}!".format(filename, filepath))

        all_model_files = []
        with open(filepath, "rb") as f:
            all_model_files = Unpickler(f).load()

        for i in range(len(all_model_files)):
            buf = all_model_files[i]
            self.models[i].load_weights(buf)

        return self
Пример #9
0
    def __init__(self, partIn, maxThreadsIn=8):
        super(ValgrindAnalyzer, self).__init__()
        self.release = None
        self.plat = None
        self.part = partIn
        self.ignoreLibs = ['libm-2.5.so', 'libpthread-2.5.so', 'cmsRun']
        self.libList = []

        prodFileName = os.environ[
            'CMSSW_RELEASE_BASE'] + '/src/ReleaseProducts.list'
        self.rpr = RelProdReader()
        self.rpr.readProducts(prodFileName)

        vgCmdFileName = os.environ['CMSSW_RELEASE_BASE'] + '/qaLogs/vgCmds.pkl'
        from pickle import Unpickler
        vgCmdFile = open(vgCmdFileName, 'r')
        vgCmdPklr = Unpickler(vgCmdFile)
        self.vgCmds = vgCmdPklr.load()
        vgCmdFile.close()

        self.toDo = []

        self.threadList = []
        self.maxThreads = maxThreadsIn
        self.threadStatus = {}
        self.threadTiming = {}
    def loadTrainExamples(self, iteration):
        """ Loads pre-generated examples from file(s) for the given iteration
            @param iteration: index of iteration from which examples to be loaded 
            
            Examples are loaded from args.numItersForTrainExamplesHistory iterations using the following filename convention 
            filePath is determined as getCheckpointFile(iteration)+".examples.mini"
        """
        modelFile = os.path.join(self.args.load_folder_file[0], getCheckpointFile(iteration))
        
        print("Load trainExamples from MINI file(s)")
        trainExamplesHistory = []
        # check mini files
        for n in range(self.args.numItersForTrainExamplesHistory):
            examplesIteration = iteration-n
            examplesFile = os.path.join(self.args.load_folder_file[0], getCheckpointFile(examplesIteration)+".examples.mini")
            if not os.path.isfile(examplesFile):
                # look for file in checkpoint folder
                examplesFile = os.path.join(self.args.checkpoint, getCheckpointFile(examplesIteration)+".examples.mini")
                if not os.path.isfile(examplesFile):
                    if examplesIteration>=0: 
                        ask_for_continue("MINI file with trainExamples not found:"+examplesFile+". Continue? [y|n]")
                        continue
                    elif examplesIteration<0:
                        break
            print("Load trainExamples from MINI file:", examplesFile)
            with open(examplesFile, "rb") as f:
                iterationExamples = Unpickler(f).load()
                print("...loaded ", len(iterationExamples), "examples")
                trainExamplesHistory.insert(0, iterationExamples) 
            f.closed

        print("length of trainExamplesHistory:", len(trainExamplesHistory))
        for examples in trainExamplesHistory:
            print("Length of mini pack of examples:", len(examples))
        return trainExamplesHistory
Пример #11
0
 def __init__(self, outFile, saveFile):
     computerName=platform.node().capitalize()
     userName=getpass.getuser().capitalize()
     self.outFile=outFile
     self.saveFile=saveFile
     self._kinds={"kind":kind, "adjective":adjective,"entity":entity,"name":name,"thing":thing,"person":person,
                  "computer":computer,"user":user,"infinitive":infinitive,"pronoun":pronoun,
                  "male":male,"female":female,"place":place,"location":location,"number":number,
                  "time":time}
     try:
         infile = open(saveFile,'rb')
         unpickle = Unpickler(infile)
         kinds=unpickle.load()
         self._loadKinds(kinds, "entity")
         self._loadKinds(kinds, "adjective")
         #globalsBak=globals().copy() #backup globals
         globals().update(self._kinds)   #inject dynamic classes into globals
         self._entities,self._antecedents,self._names,self._adjectives=unpickle.load()
         #globals().clear()   #clear globals
         #globals().update(globalsBak)    #restore backup
         infile.close
     except: # IOError:
         self._entities={computerName:computer(computerName,self,True),userName:user(userName,self,True),"Vibranium":thing("Vibranium",self)}
         self._antecedents={"I":self._entities[userName],"you":self._entities[computerName]}
         self._names={}
         self._adjectives={}
         for key,value in self._entities.items():
             if value in self._names:
                 self._names[value].add(key)
             else:
                 self._names[value]={key}
     self._temp={}   #stores 'a/an' objects, possessives, prepositional phrases, and numbers
Пример #12
0
def get_menu():
    """Checks if menu is current, if yes, load menu from file,
        if not, downloads data from internet and saves them to file,
        and updates last_updated"""
    d = {}

    if check_current():
        with open("menu", "rb") as f:
            p = Unpickler(f)
            d = p.load()
    else:
        d = {
            "obed": get_menza_zizkov_obed(),
            "pizza": get_pizza(),
            "zdrava": get_zdrava(),
            "vecere": get_menza_zizkov_vecere(),
            "jarov": get_jarov(),
            "volha": get_volha()
        }

        # write new menu to file
        with open("menu", "wb") as f:
            f.truncate()
            p = Pickler(f, 0)
            p.dump(d)

        # update last_updated
        with open("last_updated", "wb") as f:
            f.truncate()
            p = Pickler(f, 0)
            p.dump(date.today().strftime("%w"))

    return d
Пример #13
0
def loadCharacter(name, cPlayerName):
    '''
    takes the character name,
    returns the character object
    '''
    try:
        f = open("../data/characters/" + name + ".txt", "rb")
    except FileNotFoundError:
        print("Character file not found. Unable to load progress. \n")
        name = input(
            "Please enter a valid character name (if you want to create a new character, type \"new\"): \n"
        )
        if name == "new":
            return "new"
        else:
            return loadCharacter(name, cPlayerName)

    character = Unpickler(f).load()
    if character.player == cPlayerName:
        f.close()
        return character
    else:
        print(
            "Character does not belong to you. Please choose one of your own characters."
        )
        f.close
        name = input(
            "Please enter a valid character name (if you want to create a new character, type \"new\"): \n"
        )
        if name == "new":
            return "new"
        else:
            return loadCharacter(name, cPlayerName)
Пример #14
0
    def unserializeEmailFromFile(self, emailId):
        '''
            unserialize json from email cache to variable
            outputMessage = {
                'code':IMAPConnect.CODE_FAIL,
                'message':None
                'data':None
            }
        '''
        
        outputMessage = {
            'code':IMAPConnect.CODE_FAIL,
            'message':None,
            'data':None
        }

        try:
            dirPath = os.path.sep.join((self.__dataDir, emailId))
            os.makedirs(dirPath, exist_ok=True)
            filePath = os.path.sep.join((dirPath, emailId))
            f = open(filePath, 'rb')
            content = Unpickler(f).load()
            f.close()
            outputMessage['code'] = IMAPConnect.CODE_OK
            outputMessage['message'] = 'Succsess load {}'.format(filePath)
            outputMessage['data'] = content
                
        except Exception as ex:
            outputMessage['message'] = ex

        return outputMessage
Пример #15
0
 def unpickle_data(data):
     un_p = []  # un-pickled list
     for value in data:
         file = BytesIO(value)
         un_pickled = Unpickler(file).load()
         un_p.append(un_pickled)
     return un_p
Пример #16
0
def load_history():
    # ===== load history file =====
    # Descriptions: containes the data collected in every Iteration
    modelFile = os.path.join(DATAFILE, "trainhistory.pth.tar")
    examplesFile = modelFile + ".examples"
    trainhistory = []
    if not os.path.isfile(examplesFile):
        print(examplesFile)
    else:
        print("File with trainExamples found. Read it.")
        with open(examplesFile, "rb") as f:
            for i in Unpickler(f).load():
                trainhistory.append(i)
        f.closed

    print("The trainhistory containes {} iteration of data".format(
        len(trainhistory)))

    # ===== Extract data =====
    trainExamples = []
    for i, e in enumerate(trainhistory):
        trainExamples.extend(np.array(e))

    print("Number of all trainexamples: {}".format(len(trainExamples)))
    return trainExamples
Пример #17
0
def deepCopy(obj):
    stream = StringIO()
    p = Pickler(stream, 1)
    p.dump(obj)
    stream.seek(0)
    u = Unpickler(stream)
    return u.load()
Пример #18
0
def unpickle(file_path=None):
    """Load pickled Python objects from a file.

    Almost like ``cPickle.load(open(file_path))``, but also loads object saved
    with older versions of Eelbrain, and allows using a system file dialog to
    select a file.

    Parameters
    ----------
    file_path : None | str
        Path to a pickled file. If None (default), a system file dialog will be
        shown. If the user cancels the file dialog, a RuntimeError is raised.
    """
    if file_path is None:
        filetypes = [("Pickles (*.pickled)", '*.pickled'), ("All files", '*')]
        file_path = ui.ask_file("Select File to Unpickle", "Select a pickled "
                                "file to unpickle", filetypes)
        if file_path is False:
            raise RuntimeError("User canceled")
        else:
            print(repr(file_path))
    else:
        file_path = os.path.expanduser(file_path)
        if not os.path.exists(file_path):
            new_path = os.extsep.join((file_path, 'pickled'))
            if os.path.exists(new_path):
                file_path = new_path

    with open(file_path, 'rb') as fid:
        unpickler = Unpickler(fid)
        unpickler.find_global = map_paths
        obj = unpickler.load()

    return obj
Пример #19
0
    def testDeepCopyCanInvalidate(self):
        """
        Tests regression for invalidation problems related to missing
        readers and writers values in cloned objects (see
        http://mail.zope.org/pipermail/zodb-dev/2008-August/012054.html)
        """
        import ZODB.MappingStorage
        database = DB(
            ZODB.blob.BlobStorage('blobs',
                                  ZODB.MappingStorage.MappingStorage()))
        connection = database.open()
        root = connection.root()
        transaction.begin()
        root['blob'] = Blob()
        transaction.commit()

        stream = StringIO()
        p = Pickler(stream, 1)
        p.dump(root['blob'])
        u = Unpickler(stream)
        stream.seek(0)
        clone = u.load()
        clone._p_invalidate()

        # it should also be possible to open the cloned blob
        # (even though it won't contain the original data)
        clone.open()

        # tearDown
        database.close()
Пример #20
0
def applyConfiguration(index):
    file = configurations[index].value
    if not file:
        print 'no file selected'
        return

    index = 0
    for path in filter(None, cnfg_load.value.split(';')):
        if file == os.path.basename(path):
            fh = open(path, 'r')
            try:
                p = Unpickler(fh)
                if p.load() != 'KKB':
                    print 'ERROR:', os.path.basename(path)
                else:
                    model = ConfigurationModel()
                    for att in dir(model):
                        att_value = getattr(model, att)
                        if (att.find('_') != 0) and ('instancemethod'
                                                     not in str(
                                                         type(att_value))):
                            if p.load() != att:
                                print 'FORMAT ERROR:', os.path.basename(path)
                                break

                            setattr(model, att, p.load())
                    else:
                        print 'apply:', os.path.basename(path)
                        model.apply()
            finally:
                fh.close()
Пример #21
0
def ResourceUnpickler(path, registry=None):
    warnings.warn("cyclone.tw.trp.ResourceUnpickler " + _msg,
                  DeprecationWarning,
                  stacklevel=2)
    fl = open(path)
    result = Unpickler(fl).load()
    return result
Пример #22
0
    def refit(self, samples):
        df = Unpickler(
            open(
                pkg_resources.resource_filename('hate_cl', 'hate_cl/data.sav',
                                                'rb'))).load()
        aux_df = DataFrame(samples, columns=['hate', 'sentence'])
        df = df.append(aux_df, ignore_index=True)
        print(df)
        X = df['sentence'].tolist()
        y = df['hate'].tolist()
        cl = Pipeline([('tfidf', TfidfVectorizer(ngram_range=(1, 4))),
                       ('clf',
                        RandomForestClassifier(n_estimators=100,
                                               max_depth=None,
                                               min_samples_leaf=1,
                                               min_samples_split=2,
                                               min_weight_fraction_leaf=0))])
        cl.fit(X, y)
        self.classifier = cl
        cl_filename = pkg_resources.resource_filename(
            'hate_cl', 'hate_cl/randomforest.sav')
        df_filename = pkg_resources.resource_filename('hate_cl',
                                                      'hate_cl/data.sav')

        f = open(cl_filename, 'wb')
        Pickler(f).dump(cl)
        f.close()

        f = open(df_filename, 'wb')
        Pickler(f).dump(df)
        f.close()
Пример #23
0
    def load_history(self, filename):
        """
        Loads a game history from a file. A file can optionally
        contain one or many History classes, and this method
        can be extended with optional arguments to specify how
        many histories to load.

        Parameters
        ----------
        filename : str
            File to load history from.

        Returns
        -------
        self
        """

        filepath = filename
        if not os.path.exists(filepath):
            filepath = os.path.join(self.args.checkpoint_dir, filename)
            if not os.path.exists(filepath):
                raise("No checkpoint in local file {} or path {}!".format(filename, filepath))

        with open(filepath, "rb") as f:
            log.info(f"Loading History from {filepath}")
            self.trainExamplesHistory = Unpickler(f).load()

        return self
Пример #24
0
def _unpickle(pickled):
    """ Unpickles a string and catch all types of errors it can throw,
    to raise only NotReadableJobError in case of error.

    odoo stores the text fields as 'utf-8', so we specify the encoding.

    `loads()` may raises many types of exceptions (AttributeError,
    IndexError, TypeError, KeyError, ...). They are all catched and
    raised as `NotReadableJobError`).

    Pickle could be exploited by an attacker who would write a value in a job
    that would run arbitrary code when unpickled. This is why we set a custom
    ``find_global`` method on the ``Unpickler``, only jobs and a whitelist of
    classes/functions are allowed to be unpickled (plus the builtins types).
    """
    def restricted_find_global(mod_name, fn_name):
        __import__(mod_name)
        mod = sys.modules[mod_name]
        fn = getattr(mod, fn_name)
        if not (fn in JOB_REGISTRY or fn in _UNPICKLE_WHITELIST):
            raise UnpicklingError('{}.{} is not allowed in jobs'.format(
                mod_name, fn_name))
        return fn

    unpickler = Unpickler(StringIO(pickled))
    unpickler.find_global = restricted_find_global
    try:
        unpickled = unpickler.load()
    except (Exception, UnpicklingError):
        raise NotReadableJobError('Could not unpickle.', pickled)
    return unpickled
Пример #25
0
def runConfigurations():
    if cnfg_load.value:
        for path in filter(None, cnfg_load.value.split(';')):
            fh = open(path, 'r')
            try:
                p = Unpickler(fh)
                if p.load() != 'KKB':
                    print 'ERROR:', os.path.basename(path)
                else:
                    model = ConfigurationModel()
                    for att in dir(model):
                        att_value = getattr(model, att)
                        if (att.find('_') != 0) and ('instancemethod'
                                                     not in str(
                                                         type(att_value))):
                            if p.load() != att:
                                print 'FORMAT ERROR:', os.path.basename(path)
                                break

                            setattr(model, att, p.load())
                    else:
                        print 'run:', os.path.basename(path)
                        startScan(model)
            finally:
                fh.close()
Пример #26
0
def run(args):
    # TODO: save the results of processing data for faster inference load
    if exists(args.data_reader_path):
        print 'Loading data reader...'
        with open(args.data_reader_path, 'rb') as f:
            data_reader = Unpickler(f).load()
            print 'Loaded'

            vocab = data_reader.get_vocab()
    else:
        print 'Creating data reader...'
        data_reader = DataReader(args.train_dir)

        vocab = data_reader.get_vocab()

        # Save the data reader
        with open(args.data_reader_path, 'wb') as f:
            Pickler(f).dump(data_reader)

    print 'Init model...'
    model = WordModel(args, vocab)

    if args.inference:
        model.generate(primer=args.primer)
    else:
        global_step = 0
        while global_step < args.max_steps:
            inputs, targets = data_reader.get_train_batch(
                args.batch_size, args.seq_len)
            global_step = model.train_step(inputs, targets)
Пример #27
0
    def _testTracingOrProfileState(self, do_pickle=False, **kw):
        t = tasklet(self.Tasklet)
        t(**kw)
        t.run()

        self.foo()
        n = len(self.trace)
        self.foo()
        n2 = len(self.trace)
        self.assertEqual(n, n2)

        if do_pickle:
            io = StringIO()
            p = Pickler(io, -1)
            p.persistent_id = self.persistent_id
            p.dump(t)
            t.remove()
            t.bind(None)
            p = Unpickler(StringIO(io.getvalue()))
            p.persistent_load = self.persistent_load
            t = p.load()
            p = None
            io = None

        t.run()

        self.foo()
        n = len(self.trace)
        self.foo()
        n2 = len(self.trace)
        self.assertEqual(n, n2)
Пример #28
0
 def loads(self, s):
     up = Unpickler(BytesIO(s))
     up.persistent_load = self._get_object
     try:
         return up.load()
     except KeyError as e:
         raise UnpicklingError("Could not find Node class for %s" % e)
Пример #29
0
    def search(self, buscar=""):
        ruta = os.path.split(sys.argv[0])
        abs = os.path.join(ruta[0], "motor/db/matrix.mx")
        filemx = open(abs, 'r')
        serializer = Unpickler(filemx)
        self._mx = serializer.load()

        cadena = buscar
        if not cadena:
            return ['vacio']
        else:
            #search here
            cadena = cadena.strip()
            cadena = cadena.lower()
            cad = self._prep.ngram_tokenizer(text=cadena)
            resultado = list()
            for doc in self._mx.docs:
                vector = list()
                for q in cad:
                    if q in self._mx.terms:
                        pos = self._mx.terms.index(q)
                        vector.append(doc['terms'][pos])
                resultado.append((doc['id'], vector))
            resultado.sort(lambda a, b: self.__Deuclidiana(a[1]) - self.
                           __Deuclidiana(b[1]),
                           reverse=True)
            return resultado
Пример #30
0
    def charger_donnees(self):
        """
            Méthode appelée à la création de l'utilisateur, permettant de
            charger toutes les données sauvegardées, ou de les créer si le
            joueur n'a aucune données sauvegardées.
        """

        # CHARGER LES DONNEES

        if "save.elh" in listdir(getcwd()):

            with open("save.elh", "rb") as fichier_sauvegarde:
                unpickler = Unpickler(fichier_sauvegarde)
                utilisateur_sauvegarde = unpickler.load()
                for key, value in utilisateur_sauvegarde.__dict__.items():
                    self.__setattr__(key, value)

        # CREER LES DONNEES

        else:

            self.raccourcis["deplacer-haut"] = sf.Keyboard.Z
            self.raccourcis["deplacer-bas"] = sf.Keyboard.S
            self.raccourcis["deplacer-droite"] = sf.Keyboard.D
            self.raccourcis["deplacer-gauche"] = sf.Keyboard.Q
            self.raccourcis["menu"] = sf.Keyboard.ESCAPE
            self.raccourcis["interagir"] = sf.Keyboard.E
            self.raccourcis["inventaire"] = sf.Keyboard.SPACE
            self.raccourcis["map"] = sf.Keyboard.M

            self.sauvegarde_utilisable = False
            self.sauvegarde = dict()