Beispiel #1
0
 def save(self, obj):
     desc = "object of type %s" % (describe_type(obj))
     # , describe_value(obj, 100))
     #  self.stack.append(describe_value(obj, 120))
     self.stack.append(desc)
     Pickler.save(self, obj)
     self.stack.pop()
Beispiel #2
0
    def __init__(self, file, protocol=None):
        Pickler.__init__(self, file, protocol)
        self.lazywrites = deque()
        self.realwrite = file.write

        # Pickler.__init__ overwrites self.write, we do not want that
        del self.write
def picklify(objs):
	from StringIO import StringIO
	from pickle import Pickler
	sio = StringIO()
	pickler = Pickler(sio)
	pickler.dump(objs)
	return sio.getvalue()
Beispiel #4
0
class Environment(object):

    handle = None
    loader = None
    writer = None
    data = None


    def __init__(self):
    
        self.handle = open("environment.pickle", 'w+')
        self.loader = Unpickler(self.handle)
        self.writer = Pickler(self.handle)

        try:
            self.data = self.loader.load()
        except EOFError:
            print "WARNING: Empty environment, creating environment file."
            self.data = {}
            self.write(self.data)


    def write(self, data):

        self.writer.dump(data)
Beispiel #5
0
def save_type(pickler, obj):
    if obj in _typemap:

        pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj)
    elif obj.__module__ == '__main__':
        if type(obj) == type:
            # we are pickling the interpreter
            if is_dill(pickler) and pickler._session:

                _dict = _dict_from_dictproxy(obj.__dict__)
            else: # otherwise punt to StockPickler

                StockPickler.save_global(pickler, obj)
                return
        else:

            _dict = obj.__dict__
       #print _dict
       #print "%s\n%s" % (type(obj), obj.__name__)
       #print "%s\n%s" % (obj.__bases__, obj.__dict__)
        pickler.save_reduce(_create_type, (type(obj), obj.__name__,
                                           obj.__bases__, _dict), obj=obj)
    else:

       #print obj.__dict__
       #print "%s\n%s" % (type(obj), obj.__name__)
       #print "%s\n%s" % (obj.__bases__, obj.__dict__)
        StockPickler.save_global(pickler, obj)
    return
Beispiel #6
0
 def __setitem__(self, key, value):
     if self.writeback:
         self.cache[key] = value
     f = BytesIO()
     p = Pickler(f, self._protocol)
     p.dump(value)
     self.dict[key.encode(self.keyencoding)] = f.getvalue()
  def parseLog(self):
    logData = {}
    logRE = re.compile('^.*/([1-9][0-9]*(\.[0-9]+|))_[^/]+/step([1-9])_.*\.log$')
    max_steps = 0
    for logFile in glob.glob(self.basedir+'/[1-9]*/step[0-9]*.log'):
      m = logRE.match(logFile)
      if not m: continue
      wf = m.group(1)
      step = int(m.group(3))
      if step>max_steps: max_steps=step
      if not logData.has_key(wf):
        logData[wf] = {'steps': {}, 'events' : [], 'failed' : [], 'warning' : []}
      if not logData[wf]['steps'].has_key(step):
        logData[wf]['steps'][step]=logFile
    cache_read=0
    log_processed=0
    for wf in logData:
      for k in logData[wf]:
        if k == 'steps': continue
        for s in range(0, max_steps):
          logData[wf][k].append(-1)
      index =0
      for step in sorted(logData[wf]['steps']):
        data = [0, 0, 0]
        logFile = logData[wf]['steps'][step]
        json_cache = os.path.dirname(logFile)+"/logcache_"+str(step)+".json"
        if (not os.path.exists(json_cache)) or (os.path.getmtime(logFile)>os.path.getmtime(json_cache)):
          try:
            es_parse_log(logFile)
          except Exception as e:
            print "Sending log information to elasticsearch failed" , str(e)
          inFile = open(logFile)
          for line in inFile:
            if '%MSG-w' in line: data[1]=data[1]+1
            if '%MSG-e' in line: data[2]=data[2]+1
            if 'Begin processing the ' in line: data[0]=data[0]+1
          inFile.close()
          jfile = open(json_cache,"w")
          json.dump(data,jfile)
          jfile.close()
          log_processed+=1
        else:
          jfile = open(json_cache,"r")
          data = json.load(jfile)
          jfile.close()
          cache_read+=1
        logData[wf]['events'][index] = data[0]
        logData[wf]['failed'][index] = data[2]
        logData[wf]['warning'][index] = data[1]
        index+=1
      del logData[wf]['steps']

    print "Log processed: ",log_processed
    print "Caches read:",cache_read
    from pickle import Pickler
    outFile = open(os.path.join(self.outdir,'runTheMatrixMsgs.pkl'), 'w')
    pklFile = Pickler(outFile)
    pklFile.dump(logData)
    outFile.close()
    return
Beispiel #8
0
 def __init__(self, file, protocol=None):
     Pickler.__init__(self, file, protocol)
     # set of modules to unpickle
     self.modules = set()
     # map ids to dictionary. used to ensure that functions can share
     # global env
     self.globals_ref = {}
Beispiel #9
0
def save_function(pickler, obj):
    if not _locate_function(obj):
        pickler.save_reduce(FunctionType, (obj.func_code, obj.func_globals,
                                           obj.func_name, obj.func_defaults,
                                           obj.func_closure), obj=obj)
    else:
        Pickler.save_global(pickler, obj)
Beispiel #10
0
    def realsave(self, obj):
        def _name(obj):
            try:
                name = getattr(obj, '__name__', None)
                if name is not None:
                    return ': %s' % name
            except Exception:
                pass

            return ''

        def _loc(obj):
            try:
                fn = getattr(obj, '__file__', None)
                if fn is not None:
                    return ' @%s' % (fn,)

                obj = getattr(obj, 'im_func', obj)
                code = getattr(obj, '__code__', None)
                if code is not None:
                    return ' @%s:%s' % (code.co_filename, code.co_firstlineno)
            except Exception:
                pass

            return ''

        try:
            Pickler.save(self, obj)
        except TypeError:
            logger.error('Failed to serialize %s%s%s',
                         type(obj), _name(obj), _loc(obj))
            raise
Beispiel #11
0
    def parseLog(self):
        logData = {}
        logRE = re.compile('^([1-9][0-9]*\.[0-9]+)[^/]+/step([1-9])_.*\.log$')
        for logFile in glob.glob(self.basedir+'/[1-9]*/step[0-9]*.log'):
            m = logRE.match(logFile)
            if not m: continue
            wf = m.group(1)
            step = int(m.group(2)) - 1
            if step >= self.StepsPerJob: continue
            if not logData.has_key(wf):
                logData[wf] = {'events' : [], 'failed' : [], 'warning' : []}
                for s in range(0, self.StepsPerJob):
                    for k in logData[wf].keys(): logData[wf][k].append(-1)
            warn = 0
            err = 0
            rd = 0
            inFile = open(logFile)
            for line in inFile:
                if '%MSG-w' in line: warn += 1
                if '%MSG-e' in line: err += 1
                if 'Begin processing the ' in line: rd += 1
            inFile.close()
            logData[wf]['events'][step] = rd
            logData[wf]['failed'][step] = err
            logData[wf]['warning'][step] = warn

        from pickle import Pickler
        outFile = open(os.path.join(self.basedir,'runTheMatrixMsgs.pkl'), 'w')
        pklFile = Pickler(outFile)
        pklFile.dump(logData)
        outFile.close()
        return
                
Beispiel #12
0
 def __init__(self, writer, reducers=None, protocol=HIGHEST_PROTOCOL):
     _Pickler.__init__(self, writer, protocol=protocol)
     # Make the dispatch registry an instance level attribute instead of a
     # reference to the class dictionary
     self.dispatch = _Pickler.dispatch.copy()
     for type, reduce_func in reducers.items():
         self.register(type, reduce_func)
Beispiel #13
0
    def testDeepCopyCanInvalidate(self):
        """
        Tests regression for invalidation problems related to missing
        readers and writers values in cloned objects (see
        http://mail.zope.org/pipermail/zodb-dev/2008-August/012054.html)
        """
        import ZODB.MappingStorage
        database = DB(ZODB.blob.BlobStorage(
            'blobs', ZODB.MappingStorage.MappingStorage()))
        connection = database.open()
        root = connection.root()
        transaction.begin()
        root['blob'] = Blob()
        transaction.commit()

        stream = StringIO()
        p = Pickler(stream, 1)
        p.dump(root['blob'])
        u = Unpickler(stream)
        stream.seek(0)
        clone = u.load()
        clone._p_invalidate()

        # it should also be possible to open the cloned blob
        # (even though it won't contain the original data)
        clone.open()

        # tearDown
        database.close()
Beispiel #14
0
 def take_snapshot(self, object):
     file = open(fu.next_snapshot_file(self.basedir), "wb")
     logger.debug("Taking snapshot on: " + file.name)
     pickler = Pickler(file, pickle.HIGHEST_PROTOCOL)
     pickler.dump(object)
     file.flush()
     file.close()
Beispiel #15
0
def save_builtin_method(pickler, obj):
    if obj.__self__ is not None:

        pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj)
    else:

        StockPickler.save_global(pickler, obj)
    return
Beispiel #16
0
def save_builtin_method(pickler, obj):
    if obj.__self__ is not None:
        log.info("B1: %s" % obj)
        pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj)
    else:
        log.info("B2: %s" % obj)
        StockPickler.save_global(pickler, obj)
    return
Beispiel #17
0
 def __init__(self, file, protocol=None):
     if protocol is None:
         protocol = DEFAULT_PROTOCOL
     Pickler.__init__(self, file, protocol=protocol)
     # set of modules to unpickle
     self.modules = set()
     # map ids to dictionary. used to ensure that functions can share global env
     self.globals_ref = {}
Beispiel #18
0
def pickle_outcomes(outcomes, fn):
    fh = open(fn, 'w')
    p = Pickler(fh)
    p.dump(outcomes)
    fh.close()
    if KEEP_TIMING:
        end_t = time()
        time_map["pickling"] = end_t - start_t
        start_t = time()
    def testPickleUnpickle(self):
        s = BytesIO()
        p = Pickler(s)
        p.dump(Allow)
        s.seek(0)
        u = Unpickler(s)
        newAllow = u.load()

        self.assertTrue(newAllow is Allow)
Beispiel #20
0
def save_type(pickler, obj):
    if obj in _typemap:
        pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj)
    elif obj.__module__ == '__main__':
        pickler.save_reduce(_create_type, (type(obj), obj.__name__,
                                           obj.__bases__, obj.__dict__),
                                           obj=obj)
    else:
        Pickler.save_global(pickler, obj)
Beispiel #21
0
def save_function(pickler, obj):
    if not _locate_function(obj): #, pickler._session):
        log.info("F1: %s" % obj)
        pickler.save_reduce(FunctionType, (obj.func_code, obj.func_globals,
                                           obj.func_name, obj.func_defaults,
                                           obj.func_closure), obj=obj)
    else:
        log.info("F2: %s" % obj)
        StockPickler.save_global(pickler, obj)
    return
Beispiel #22
0
def save_classobj(pickler, obj):
    if obj.__module__ == '__main__':

        pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__,
                                        obj.__dict__), obj=obj)
                                       #XXX: or obj.__dict__.copy()), obj=obj) ?
    else:

        StockPickler.save_global(pickler, obj)
    return
Beispiel #23
0
def save_module_dict(pickler, obj):
    if is_dill(pickler) and obj is pickler._main_module.__dict__:
        log.info("D1: %s" % "<dict ...>") # obj
        pickler.write('c__builtin__\n__main__\n')
    elif not is_dill(pickler) and obj is _main_module.__dict__:
        log.info("D3: %s" % "<dict ...>") # obj
        pickler.write('c__main__\n__dict__\n')   #XXX: works in general?
    else:
        log.info("D2: %s" % "<dict ...>") #obj
        StockPickler.save_dict(pickler, obj)
    return
Beispiel #24
0
def save_module_dict(pickler, obj):
    if is_dill(pickler) and obj is pickler._main_module.__dict__:

        pickler.write('c__builtin__\n__main__\n')
    elif not is_dill(pickler) and obj is _main_module.__dict__:

        pickler.write('c__main__\n__dict__\n')   #XXX: works in general?
    else:

        StockPickler.save_dict(pickler, obj)
    return
Beispiel #25
0
    def _sauvegarder(self):
        """
            Sauvegarde la map.
        """

        try:
            with open(adresse_fichier_sauvegarde, 'wb') as fichier_sauvegarde:
                pick = Pickler(fichier_sauvegarde)
                pick.dump(self._map)
        except:
            print("Erreur lors de l'enregistrement du fichier")
Beispiel #26
0
 def sync(self):
     res = {}
     with dbm.open(self.db, self.flag) as db:
         for k, v in self.dict.items():
             f = io.BytesIO()
             p = Pickler(f, protocol=self._protocol)
             p.dump(v)
             db[k] = f.getvalue()
         try:
             db.sync()
         except AttributeError:
             pass
Beispiel #27
0
def pickle(obj, filename, protocol=0):
    f = None
    try:
        f = open(filename, "wb")
        p = Pickler(f, protocol)
        p.dump(obj)
        f.close()
        f = None
        # print "Pickled", filename
    finally:
        if f:
            f.close()
Beispiel #28
0
    def _tuer_sauvegarde(self):
        """
            Efface la sauvegarde.

            Devient inefficace si le mode de sauvegarde change.
        """

        try:
            with open(adresse_fichier_sauvegarde, 'wb') as fichier_sauvegarde:
                pick = Pickler(fichier_sauvegarde)
                pick.dump(None)
        except:
            print("Erreur lors de l'enregistrement du fichier")
Beispiel #29
0
 def _dump_blocked(self):
     dump = os.path.join(
         self.get_cache_dir(), "autoqueue_block_cache")
     if not self._blocked_artists:
         try:
             os.remove(dump)
         except OSError:
             pass
         return
     with open(dump, 'w') as pickle_file:
         pickler = Pickler(pickle_file, -1)
         to_dump = (self._blocked_artists, self._blocked_artists_times)
         pickler.dump(to_dump)
Beispiel #30
0
 def __init__(self, writer, reducers=None, protocol=HIGHEST_PROTOCOL):
     Pickler.__init__(self, writer, protocol=protocol)
     if reducers is None:
         reducers = {}
     if hasattr(Pickler, 'dispatch'):
         # Make the dispatch registry an instance level attribute instead of
         # a reference to the class dictionary under Python 2
         self.dispatch = Pickler.dispatch.copy()
     else:
         # Under Python 3 initialize the dispatch table with a copy of the
         # default registry
         self.dispatch_table = copyreg.dispatch_table.copy()
     for type, reduce_func in reducers.items():
         self.register(type, reduce_func)
Beispiel #31
0
 def save_memoryview(self, obj):
     """Fallback to save_string"""
     Pickler.save_string(self, str(obj))
Beispiel #32
0
def EA_Main(locator,
            building_names,
            extraCosts,
            extraCO2,
            extraPrim,
            solarFeat,
            ntwFeat,
            gv,
            genCP=0):
    """
    Evolutionary algorithm to optimize the district energy system's design
    
    Parameters
    ----------
    locator : string
        paths to folders
    finances / CO2 / Prim : float
        costs [CHF] / emissions [kg CO2-eq] / primary energy needs [MJ oil] 
        previously calculated
    solarFeat : class solarFeatures
        includes data from solar files
    ntwFeat : class ntwFeatures
        includes data from the ntw optimization
    genCP : int
        generation to start the EA from (eg if there was a crash of the code)
    
    Returns
    -------
    
    """
    t0 = time.clock()

    # get number of buildings
    nBuildings = len(building_names)

    # set-up toolbox of DEAp library in python (containing the evolutionary algotirhtm
    creator, toolbox = calc_ea_setup(nBuildings, gv)

    # define objective function and register into toolbox
    def evalConfig(ind):
        (costs, CO2, prim) = eI.evalInd(ind, building_names, locator,
                                        extraCosts, extraCO2, extraPrim,
                                        solarFeat, ntwFeat, gv)
        return (costs, CO2, prim)

    toolbox.register("evaluate", evalConfig)

    ntwList = ["1" * nBuildings]
    epsInd = []
    invalid_ind = []

    # Evolutionary strategy
    if genCP is 0:
        # create population
        pop = toolbox.population(n=gv.initialInd)

        # Check network
        for ind in pop:
            eI.checkNtw(ind, ntwList, locator, gv)

        # Evaluate the initial population
        print "Evaluate initial population"
        fitnesses = map(toolbox.evaluate, pop)

        for ind, fit in zip(pop, fitnesses):
            ind.fitness.values = fit
            print ind.fitness.values, "fit"

        # Save initial population
        print "Save Initial population \n"
        os.chdir(locator.pathMasterRes)
        with open("CheckPointInitial", "wb") as CPwrite:
            CPpickle = Pickler(CPwrite)
            cp = dict(population=pop,
                      generation=0,
                      networkList=ntwList,
                      epsIndicator=[],
                      testedPop=[])
            CPpickle.dump(cp)
    else:
        print "Recover from CP " + str(genCP) + "\n"
        os.chdir(locator.pathMasterRes)

        with open("CheckPoint" + str(genCP), "rb") as CPread:
            CPunpick = Unpickler(CPread)
            cp = CPunpick.load()
            pop = cp["population"]
            ntwList = cp["networkList"]
            epsInd = cp["epsIndicator"]

    PROBA, SIGMAP = gv.PROBA, gv.SIGMAP

    # Evolution starts !
    g = genCP
    stopCrit = False  # Threshold for the Epsilon indictor, Not used

    while g < gv.NGEN and not stopCrit and (time.clock() - t0) < gv.maxTime:

        g += 1
        print "Generation", g

        offspring = list(pop)

        # Apply crossover and mutation on the pop
        print "CrossOver"
        for ind1, ind2 in zip(pop[::2], pop[1::2]):
            child1, child2 = cx.cxUniform(ind1, ind2, PROBA, gv)
            offspring += [child1, child2]

        # First half of the EA: create new un-collerated configurations
        if g < gv.NGEN / 2:
            for mutant in pop:
                print "Mutation Flip"
                offspring.append(mut.mutFlip(mutant, PROBA, gv))
                print "Mutation Shuffle"
                offspring.append(mut.mutShuffle(mutant, PROBA, gv))
                print "Mutation GU \n"
                offspring.append(mut.mutGU(mutant, PROBA, gv))

        # Third quarter of the EA: keep the good individuals but modify the shares uniformly
        elif g < gv.NGEN * 3 / 4:
            for mutant in pop:
                print "Mutation Uniform"
                offspring.append(mut.mutUniformCap(mutant, gv))

        # Last quarter: keep the very good individuals and modify the shares with Gauss distribution
        else:
            for mutant in pop:
                print "Mutation Gauss"
                offspring.append(mut.mutGaussCap(mutant, SIGMAP, gv))

        # Evaluate the individuals with an invalid fitness
        # NB: every generation leads to the reevaluation of 4n / 2n / 2n individuals
        # (n being the number of individuals in the previous generation)
        invalid_ind = [ind for ind in offspring if not ind.fitness.valid]

        print "Update Network list \n"
        for ind in invalid_ind:
            eI.checkNtw(ind, ntwList, locator, gv)

        print "Re-evaluate the population"
        fitnesses = map(toolbox.evaluate, invalid_ind)

        print "......................................."
        for ind, fit in zip(invalid_ind, fitnesses):
            ind.fitness.values = fit
            print ind.fitness.values, "new fit"
        print "....................................... \n"

        # Select the Pareto Optimal individuals
        print "Pareto Selection"
        selection = sel.selectPareto(offspring)

        # Compute the epsilon criteria [and check the stopping criteria]
        epsInd.append(eI.epsIndicator(pop, selection))
        #if len(epsInd) >1:
        #    eta = (epsInd[-1] - epsInd[-2]) / epsInd[-2]
        #    if eta < gv.epsMargin:
        #        stopCrit = True

        # The population is entirely replaced by the best individuals
        print "Replace the population \n"
        pop[:] = selection

        print "....................................... \n GENERATION ", g
        for ind in pop:
            print ind.fitness.values, "selected fit"
        print "....................................... \n"

        # Create Checkpoint if necessary
        if g % gv.fCheckPoint == 0:
            os.chdir(locator.pathMasterRes)

            print "Create CheckPoint", g, "\n"
            with open("CheckPoint" + str(g), "wb") as CPwrite:
                CPpickle = Pickler(CPwrite)
                cp = dict(population=pop,
                          generation=g,
                          networkList=ntwList,
                          epsIndicator=epsInd,
                          testedPop=invalid_ind)
                CPpickle.dump(cp)

    if g == gv.NGEN:
        print "Final Generation reached"
    else:
        print "Stopping criteria reached"

    # Saving the final results
    print "Save final results. " + str(
        len(pop)) + " individuals in final population"
    print "Epsilon indicator", epsInd, "\n"
    os.chdir(locator.pathMasterRes)

    with open("CheckPointFinal", "wb") as CPwrite:
        CPpickle = Pickler(CPwrite)
        cp = dict(population=pop,
                  generation=g,
                  networkList=ntwList,
                  epsIndicator=epsInd,
                  testedPop=invalid_ind)
        CPpickle.dump(cp)

    print "Master Work Complete \n"

    return pop, epsInd
Beispiel #33
0
 def save(self):
     logging.info('Save event database')
     data = [ev.to_dict() for ev in self.events.values()]
     with open(DATA_PATH, 'wb') as file:
         pick = Pickler(file)
         pick.dump(data)
Beispiel #34
0
 def __setitem__(self, key, value):
     """..."""
     f = BytesIO()
     p = Pickler(f, self._protocol)
     p.dump(value)
     self.dict[key.encode(self.keyencoding)] = bz2.compress(f.getvalue())
Beispiel #35
0
def dumps(obj, protocol=None):
    file = StringIO()
    Pickler(file, protocol).dump(obj)
    return file.getvalue()
Beispiel #36
0
 def save(self, filepath):
     with open(filepath, 'wb') as handle:
         saver = Pickler(handle, protocol=HIGHEST_PROTOCOL)
         saver.save(self.fm)
Beispiel #37
0
 def saveMX(self, mx):
     filemx = open('%s/%s' % ('db', 'matrix.mx'), 'w')
     serializer = Pickler(filemx)
     serializer.dump(mx)
Beispiel #38
0
    def split(self, logFile):

        self.outFile.write( "going to check "+ logFile+'\n')

        subsysRe = re.compile('^>> Tests for package ([A-Z].*/[A-Z].*) ran.')

        pkgTestStartRe  = re.compile('^===== Test \"(.*)\" ====')
        pkgTestEndRe    = re.compile('^\^\^\^\^ End Test (.*) \^\^\^\^')
        pkgTestResultRe = re.compile('.*---> test ([^ ]+) (had ERRORS|succeeded)')

        pkgStartRe = re.compile("^>> Entering Package (.*)")
        # pkgEndRe   = re.compile("^>> Leaving Package (.*)")
        pkgEndRe   = re.compile("^>> Tests for package (.*) ran.")
        
        infoPkg = {}
        pkgSubsysMap = {}
        subsysPkgMap = {}
        
        baseDir = os.path.split(logFile)[0]
	logDirs = os.path.join( baseDir,'unitTestLogs')
        print "logDirs ", logDirs
        if not os.path.exists(logDirs):
            os.makedirs(logDirs)

        lf = open(logFile,'r')
        lines = lf.xreadlines()

        startTime = time.time()
        nLines = 0
        testNames = {}
        testLines = {}
        pkgLines  = {}
        results   = {}
        pkgTests  = {}
        
        actPkg   = "None"
        actTest  = "None"
        actTstLines = 0
        actPkgLines = 0

        actLogLines = []
        startFound = False
        for line in lines:

            # write out log to individual log file ...
            if startFound and ">> Leaving Package " not in line:
                actLogLines.append(line)
                
            nLines += 1
            actTstLines += 1
            actPkgLines += 1
            subsysMatch = subsysRe.match(line)
            if subsysMatch:
                subsys, pkg = subsysMatch.group(1).split('/')
                if not pkgSubsysMap.has_key(pkg) : 
                    pkgSubsysMap[pkg] = subsys
                if subsysPkgMap.has_key(subsys) :
                    subsysPkgMap[subsys].append(pkg)
                else:
                    subsysPkgMap[subsys] = [pkg]
                
            pkgStartMatch = pkgStartRe.match(line)
            if pkgStartMatch:
                pkg = pkgStartMatch.group(1)
                actPkg = pkg
                pkgTests[pkg] = 0
                actPkgLines = 0
                startFound = True
                
            pkgEndMatch   = pkgEndRe.match(line)
            if pkgEndMatch:
                pkg = pkgEndMatch.group(1)
                if actPkg != pkg :
                    self.outFile.write( "pkgEndMatch> package mismatch: pkg found "+pkg+' actPkg='+actPkg+'\n')
                pkgLines[pkg] = actPkgLines

                if len(actLogLines) > 2 :
                    actLogDir = os.path.join(logDirs, pkg)
                    os.makedirs(actLogDir)
                    actLogFile = open(os.path.join(actLogDir, 'unitTest.log'), 'w')
                    actLogFile.write("".join(actLogLines))
                    actLogFile.close()
                    actLogLines = []
                startFound = False
                
            pkgTestResultMatch= pkgTestResultRe.match(line)
            if pkgTestResultMatch :  # this seems to only appear if there is an ERROR
                tstName = pkgTestResultMatch.group(1)
                results[tstName] = pkgTestResultMatch.group(2)
            
            pkgTestStartMatch = pkgTestStartRe.match(line)
            if pkgTestStartMatch:
                tst = pkgTestStartMatch.group(1)
                actTest = tst
                actTstLines = 0
                pkgTests[actPkg] += 1
                if testNames.has_key(actPkg):
                    testNames[actPkg].append(actTest)
                else:
                    testNames[actPkg] = [actTest]
                if actTest not in results:
                    results[actTest] = "succeeded" # set the default, no error seen yet
            
            pkgTestEndMatch   = pkgTestEndRe.match(line)
            if pkgTestEndMatch:
                tst = pkgTestEndMatch.group(1)
                if actTest != tst :
                    self.outFile.write( "pkgTestEndMatch> package mismatch: pkg found "+pkg+' actPkg='+actPkg+'\n')
                testLines[tst] = actTstLines

        stopTime = time.time()
        lf.close()
    
        self.outFile.write( "found a total of "+ str(nLines)+ ' lines in logfile.\n')
        self.outFile.write( "analysis took "+str(stopTime-startTime)+ ' sec.\n')

        self.outFile.write( "total number of tests: " +str( len(results.keys()) ) + '\n')
        nMax = 1000
        self.outFile.write( "tests with more than " +str(nMax) + " lines of logs:\n")
        for pkg, lines in testLines.items():
            if lines > nMax : self.outFile.write( "  "+ pkg+ ' : ' + str(lines) +'\n')

        self.outFile.write( "Number of tests for packages: \n" )
        noTests = 0
        nrTests = 0
        indent = '    '
        totalOK = 0
        totalFail = 0
        unitTestResults = {}
        for pkg, nTst in pkgTests.items():
            if nTst == 0:
                noTests += 1
            else:
                nrTests += 1
                if self.verbose: self.outFile.write( '-'*80 +'\n' )
                self.outFile.write( indent+pkg+' : ' )
                nOK = 0
                if self.verbose: self.outFile.write( "\n" )
                for tNam in testNames[pkg]:
                    if results[tNam] == 'succeeded':
                        nOK += 1
                        totalOK += 1
                    else:
                        totalFail += 1
                    if self.verbose :
                        self.outFile.write( indent*2 + tNam +' '+ results[tNam] + '\n')
                if self.verbose: self.outFile.write( indent+ pkg+" : ")
                self.outFile.write( indent + str(len(testNames[pkg]) ) + ' tests in total,  OK:'+str(nOK)+ ' fail:'+str(len(testNames[pkg])-nOK) +'\n')
                unitTestResults[pkg] = [testNames[pkg], nOK, len(testNames[pkg])-nOK ]
                
        self.outFile.write( indent+str(nrTests)+" packages  with   tests ("+str(float(nrTests)/float(len(pkgTests.keys())) )+")\n")
        self.outFile.write( indent+str(noTests)+" packages without tests ("+str(float(noTests)/float(len(pkgTests.keys())) )+")\n")
        self.outFile.write( indent+"in total:  tests OK : "+str(totalOK)+' tests FAIL : '+str(totalFail)+'\n')

        try:
            from pickle import Pickler
            resFile = open(baseDir+'/unitTestResults.pkl', 'w')
            pklr = Pickler(resFile)
            pklr.dump(unitTestResults)
            pklr.dump(results)
            resFile.close()
            print "Successfully pickled results for unit tests ! "
        except Exception, e:
            print "ERROR during pickling results for unit tests:", str(e)
Beispiel #39
0
# Game start
while not (game_over):
    print(word_masked)
    player_input = get_player_input()
    if player_input in letters_guessed:
        print("Vous avez déjà essayé la lettre " + player_input)
    elif player_input in word_random:
        letters_guessed.append(player_input)
    else:
        letters_guessed.append(player_input)
        lives_left -= 1
        print("{} ne se trouve pas dans le mot, il vous reste {} essai(s)".
              format(player_input, lives_left))

    word_masked = gen_masked_word(word_random, letters_guessed)

    if word_masked == word_random:
        print("Bien joué !  Le mot était {}.".format(word_random))
        game_over = True

        player_score = lives_left + len(word_random)
        scores = get_player_score(player_pseudo, player_score, scores)

        with open("scores", "wb") as scores_file:
            Pickler(scores_file).dump(scores)
    elif lives_left <= 0:
        print("Vous avez perdu :/ Le mot était {}.".format(word_random))
        game_over = True

print(scores)
    def save(self, param):
        # saves the shell with all the supdirs.
        with open('syncdata.bin', 'wb') as f:
            Pickler(f, 3).dump(self.path)

        print('Data saved...')
Beispiel #41
0
 def __init__(self, *args, **kwargs):
     Pickler.__init__(self, *args, **kwargs)
     self.dispatch_table = dispatch_table.copy()
     self.dispatch_table.update(self._extra_reducers)
Beispiel #42
0
 def __init__(self, file, protocol=None):
     if protocol is None:
         protocol = DEFAULT_PROTOCOL
     Pickler.__init__(self, file, protocol=protocol)
     # map ids to dictionary. used to ensure that functions can share global env
     self.globals_ref = {}
Beispiel #43
0
def get_trivago_datasets(columns,
                         percentage=1,
                         seed=1,
                         uitems_min=2,
                         lt_drop=0,
                         time=0):
    """Load train and test datasets
        - percentage - how much of train could be dropped
        - uitem_min - minimum number of interaction for user
        - lt_drop - how many % should be dropped from long tail
        - time - how many seconds from last action to keep - 0 = infinite
    """
    debug_print("Loading trivago datasets", level=0)
    columns = set(columns + COLUMNS)
    # Compute names, must be sorted, because sets are kinda random
    file_name = str(
        hash_params(sorted(columns, reverse=True), percentage, seed,
                    uitems_min, lt_drop, time))
    # Check for existence
    os.makedirs(_script_relative(CACHE_FOLDER), exist_ok=True)
    dataset_path = _script_relative(os.path.join(CACHE_FOLDER, file_name))

    debug_print("Trying {}".format(dataset_path), level=2)
    # Check cached
    if not os.path.exists(dataset_path):
        debug_print("Not found", level=2)
        # Create dataset
        train_path = _script_relative(REL_TRAIN_PATH)
        test_path = _script_relative(REL_TEST_PATH)

        __pandas_modify_datasets(train_path, test_path)

        train = __pandas_get_dataset(train_path)
        debug_print("Train shape before {}".format(train.shape))

        train = __pandas_strip_columns(train, columns | {ACTION_TYPE})
        train = __pandas_trivago_invalid_rows(train)
        train = __pandas_strip_columns(train, columns)

        train = __pandas_trivago_drop_unique(train,
                                             USER_ID,
                                             percentage=percentage)
        train = __pandas_drop_top(train,
                                  USER_ID,
                                  percentage=lt_drop,
                                  min_items=uitems_min)
        train = __pandas_drop_time(train, time=time)
        debug_print("Train shape after {}".format(train.shape))

        test = __pandas_get_dataset(test_path)
        test = __pandas_strip_columns(test, columns | {ACTION_TYPE})
        test = __pandas_trivago_invalid_rows(test)
        test = __pandas_strip_columns(test, columns)
        debug_print("Dropping non train {}".format(test.shape), level=2)
        test = test[test[USER_ID].isin(train[USER_ID].unique())]
        debug_print("After non train {}".format(test.shape), level=2)
        # __pandas_reindex_values(train, test, column=USER_ID)
        # __pandas_reindex_values(train, test, column=REFERENCE)

        # Save dataset
        debug_print("Saving dataset {}".format(dataset_path))
        with open(dataset_path, "wb") as f:
            Pickler(f).dump((train, test))
    else:  # Load dataset
        with open(dataset_path, "rb") as f:
            debug_print("Found", level=2)
            train, test = Unpickler(f).load()
    print(len(set(test[USER_ID])))
    print(len(set(train[USER_ID]) & set(test[USER_ID])))
    __pandas_trivago_plot_density(train, USER_ID)
    return __pandas_to_coo(train, test)
Beispiel #44
0
#!/usr/bin/env python
from __future__ import print_function
import sys
from _py2with3compatibility import run_cmd
from pickle import Pickler

def doDu(what):
  error, out = run_cmd('du -k -s %s' % what)
  if error:
    print("Error while getting directory size.")
    sys.exit(1)
  results = [l.split() for l in out.split("\n")]
  return dict([(pkg.strip().replace("src/", ''), int(sz.strip()*1024))
               for (sz, pkg) in results])

if __name__ == '__main__':
  try:
    f = open('dirSizeInfo.pkl', 'wb')
    pklr = Pickler(f, protocol=2)
    pklr.dump(doDu("src lib bin"))
    pklr.dump(doDu("src/*/*"))
    f.close()
  except Exception as e:
    print("ERROR during pickling results for dir size:", str(e))
    sys.exit(1)
  print("Successfully pickled results for dir size !")
Beispiel #45
0
 def save_buffer(self, obj):
     """Fallback to save_string"""
     Pickler.save_string(self, str(obj))
Beispiel #46
0
 def write(self, data):
     with open(self.chemin, 'wb') as f:
         Pickler(f).dump(data)
Beispiel #47
0
 def save(self, path):
     grille = np.zeros((9, 9), dtype=int)
     modif = np.zeros((9, 9), dtype=bool)
     possibilites = []
     for i in range(9):
         possibilites.append([])
         for j in range(9):
             grille[i, j] = self.blocs[i, j].get_val()
             modif[i, j] = self.blocs[i, j].is_modifiable()
             possibilites[i].append(self.blocs[i, j].get_possibilites())
     with open(path, "wb") as fich:
         p = Pickler(fich)
         p.dump(grille)
         p.dump(modif)
         p.dump(possibilites)
         p.dump(self.chrono)
         p.dump(self.level)
Beispiel #48
0
 def ecrire(self, donnee):
     with open(self.chemin, 'wb') as f:
         Pickler(f).dump(donnee)
 def __init__(self, file, protocol, bin=None):
     Pickler.__init__(self, file, protocol)
     self.bin = bin
Beispiel #50
0
                comentario.polarity = getS(tempo2)
            comentario.confidence = test.get('confidence')
            comentario.subjetivity = test.get('subjectivity')
            comentario.agreement = test.get('agreement')
            comentario.irony = test.get('irony')
            comentario.category = getCategoria(comentario.time, textfile, tags)
            trama = comentario.to_String2()
            file4.write(trama)
            comentarios.append(comentario)
            control[i] -= 1
            if control[i] == 0:
                i += 1

        file4.close()
        dbfile = open(ruta + sub + textfile, 'ab')
        Pickler(dbfile).dump(comentarios)
        dbfile.close()
        comments.append(comentarios)

arch3 = open(ruta + "usuarios" + extension, "w", encoding="utf8")
arch3.write(users)
arch3.close()
arch2 = open(ruta + "exa" + sub + extension, "w", encoding="utf8")
mess = ""
users = sorted(usuarios.items(), key=lambda kv: (kv[1], kv[0]), reverse=True)
i = 0
for p in users:
    if i < 10:
        mess += "Usuario " + str(p) + "\n"
        i += 1
    else:
Beispiel #51
0
from collections import namedtuple

a = [1, 2, 3]
b = ('a', 'b', 'c')

""" pic.dump(a,  open('arquivo_a.txt', 'wb'))
c = pic.dumps(b, 2)
print(c)

d = pic.loads(c)
print(d)

e = pic.load(open('arquivo_a.txt', 'rb'))
print(e) """

""" Pickler(open('arquivo_a.sav', 'wb')).dump(a)
Pickler(file=open('arquivo_txt', 'wb')).dump(b)

c = Unpickler(open('arquivo_a.sav', 'rb')).load()
print(c) """

""" user = namedtuple('User', ['nome', 'sobrenome', 'idade'])
user_1 = user('Felipe', 'Corrêa', 33)
user_2 = user('Samanta', 'Kido', 33)
user_3 = user_1 + user_2
print(user_3.count(33)) """

""" from sklearn.datasets import make_regression
a, b = make_regression(n_samples=10, n_features=5, n_targets=3)
print(a)
print(b)
Beispiel #52
0
 def __init__(self, ip = "", port = 8081) -> None:
     
     version = "0.1 Beta"
     
     try:
         with open("connexion.login", "rb") as login:
             unpick = Unpickler(login)
             logers = unpick.load()
             logers : dict
             logers["test"]["pwd"]
             
     except IOError:
         
         with open("connexion.login", "wb") as file:
             tok = TokenTime(taille=20, time_out=1)
             
             dict_base = {
                 "uadmin": {
                     "pwd": "padmin",
                     "admin": True,
                     "token": tok.getToken()
                 },
                 
                 "test": {
                     "pwd": "test",
                     "admin": False,
                     "token": "ABCD0000000000000000"
                 }
             }
             
             picklefile = Pickler(file)
             picklefile.dump(dict_base)
     
     except:
         print("Une erreur a été relevée.")
     
     # ------------------------------------
     
     self.accounts = None
     self.accounts: dict
     
     with open("connexion.login", "rb") as file:
         pic2 = Unpickler(file)
         self.accounts = pic2.load()
         
     self.host = soc(AF_INET, SOCK_STREAM)
     self.host.bind((ip, port))
     self.host.listen(5)
     self.connected = []
     self.clients = []
     
     print(f"Version {version}")
     
     self.looper = True
     self._thread_loop(self.demand_start)
     
     self.open = True
     self.true = True
     while self.true:
         if self.open:
             Thread(None, self._thread_loop, None, (self, self.demand_start))
             Thread.start()
             self.open = False
Beispiel #53
0
    def parseLog(self):
        logData = {}
        logRE = re.compile(
            '^.*/([1-9][0-9]*(\.[0-9]+|))_[^/]+/step([1-9])_.*\.log$')
        max_steps = 0
        for logFile in glob.glob(self.basedir + '/[1-9]*/step[0-9]*.log'):
            m = logRE.match(logFile)
            if not m: continue
            wf = m.group(1)
            step = int(m.group(3))
            if step > max_steps: max_steps = step
            if not logData.has_key(wf):
                logData[wf] = {
                    'steps': {},
                    'events': [],
                    'failed': [],
                    'warning': []
                }
            if not logData[wf]['steps'].has_key(step):
                logData[wf]['steps'][step] = logFile
        cache_read = 0
        log_processed = 0
        for wf in logData:
            for k in logData[wf]:
                if k == 'steps': continue
                for s in range(0, max_steps):
                    logData[wf][k].append(-1)
            index = 0
            for step in sorted(logData[wf]['steps']):
                data = [0, 0, 0]
                logFile = logData[wf]['steps'][step]
                json_cache = os.path.dirname(logFile) + "/logcache_" + str(
                    step) + ".json"
                cache_ok = False
                if (os.path.exists(json_cache)) and (
                        os.path.getmtime(logFile) <=
                        os.path.getmtime(json_cache)):
                    try:
                        jfile = open(json_cache, "r")
                        data = json.load(jfile)
                        jfile.close()
                        cache_read += 1
                        cache_ok = True
                    except:
                        os.remove(json_cache)
                if not cache_ok:
                    try:
                        es_parse_log(logFile)
                    except Exception as e:
                        print "Sending log information to elasticsearch failed", str(
                            e)
                    inFile = open(logFile)
                    for line in inFile:
                        if '%MSG-w' in line: data[1] = data[1] + 1
                        if '%MSG-e' in line: data[2] = data[2] + 1
                        if 'Begin processing the ' in line:
                            data[0] = data[0] + 1
                    inFile.close()
                    jfile = open(json_cache, "w")
                    json.dump(data, jfile)
                    jfile.close()
                    log_processed += 1
                logData[wf]['events'][index] = data[0]
                logData[wf]['failed'][index] = data[2]
                logData[wf]['warning'][index] = data[1]
                index += 1
            del logData[wf]['steps']

        print "Log processed: ", log_processed
        print "Caches read:", cache_read
        from pickle import Pickler
        outFile = open(os.path.join(self.outdir, 'runTheMatrixMsgs.pkl'), 'w')
        pklFile = Pickler(outFile)
        pklFile.dump(logData)
        outFile.close()
        return
Beispiel #54
0
 def __delattr__(self, name: str) -> None:
     
     if name == "accounts":
         with open("connexion.login", "wb") as file:
             picklefile = Pickler(file)
             picklefile.dump(self.accounts)
Beispiel #55
0
def dump(obj, file, protocol=None):
    Pickler(file, protocol).dump(obj)
Beispiel #56
0
 def __init__(self, *args, **kargs):
     Pickler.__init__(self, *args, **kargs)
     self.stack = []
Beispiel #57
0
 def dumps(self, graph, protocol):
     src = StringIO()
     pickler = Pickler(src)
     pickler.persistent_id = self.persistent_id
     pickler.dump(graph)
     return src.getvalue()
Beispiel #58
0
    def save_global(self, obj, name=None, pack=struct.pack):
        if obj.__module__ == "__builtin__" or obj.__module__ == "builtins":
            if obj in _BUILTIN_TYPE_NAMES:
                return self.save_reduce(_builtin_type,
                                        (_BUILTIN_TYPE_NAMES[obj], ),
                                        obj=obj)

        if name is None:
            name = obj.__name__

        modname = getattr(obj, "__module__", None)
        if modname is None:
            try:
                # whichmodule() could fail, see
                # https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
                modname = pickle.whichmodule(obj, name)
            except Exception:
                modname = '__main__'

        if modname == '__main__':
            themodule = None
        else:
            __import__(modname)
            themodule = sys.modules[modname]
            self.modules.add(themodule)

        if hasattr(themodule, name) and getattr(themodule, name) is obj:
            return Pickler.save_global(self, obj, name)

        typ = type(obj)
        if typ is not obj and isinstance(obj, (type, types.ClassType)):
            d = dict(obj.__dict__)  # copy dict proxy to a dict
            if not isinstance(d.get('__dict__', None), property):
                # don't extract dict that are properties
                d.pop('__dict__', None)
            d.pop('__weakref__', None)

            # hack as __new__ is stored differently in the __dict__
            new_override = d.get('__new__', None)
            if new_override:
                d['__new__'] = obj.__new__

            # workaround for namedtuple (hijacked by PySpark)
            if getattr(obj, '_is_namedtuple_', False):
                self.save_reduce(_load_namedtuple, (obj.__name__, obj._fields))
                return

            self.save(_load_class)
            self.save_reduce(typ, (obj.__name__, obj.__bases__, {
                "__doc__": obj.__doc__
            }),
                             obj=obj)
            d.pop('__doc__', None)
            # handle property and staticmethod
            dd = {}
            for k, v in d.items():
                if isinstance(v, property):
                    k = ('property', k)
                    v = (v.fget, v.fset, v.fdel, v.__doc__)
                elif isinstance(v, staticmethod) and hasattr(v, '__func__'):
                    k = ('staticmethod', k)
                    v = v.__func__
                elif isinstance(v, classmethod) and hasattr(v, '__func__'):
                    k = ('classmethod', k)
                    v = v.__func__
                dd[k] = v
            self.save(dd)
            self.write(pickle.TUPLE2)
            self.write(pickle.REDUCE)

        else:
            raise pickle.PicklingError("Can't pickle %r" % obj)
Beispiel #59
0
 def __init__(self, file, protocol=None):
     Pickler.__init__(self, file, protocol)
     # set of modules to unpickle
     self.modules = set()
     # map ids to dictionary. used to ensure that functions can share global env
     self.globals_ref = {}
Beispiel #60
0
    def save_function(self, obj, name=None):
        """ Registered with the dispatch to handle all function types.

        Determines what kind of function obj is (e.g. lambda, defined at
        interactive prompt, etc) and handles the pickling appropriately.
        """
        write = self.write

        if name is None:
            name = obj.__name__
        modname = pickle.whichmodule(obj, name)
        # print('which gives %s %s %s' % (modname, obj, name))
        try:
            themodule = sys.modules[modname]
        except KeyError:
            # eval'd items such as namedtuple give invalid items for their function __module__
            modname = '__main__'

        if modname == '__main__':
            themodule = None

        if themodule:
            self.modules.add(themodule)
            if getattr(themodule, name, None) is obj:
                return self.save_global(obj, name)

        # a builtin_function_or_method which comes in as an attribute of some
        # object (e.g., object.__new__, itertools.chain.from_iterable) will end
        # up with modname "__main__" and so end up here. But these functions
        # have no __code__ attribute in CPython, so the handling for 
        # user-defined functions below will fail.
        # So we pickle them here using save_reduce; have to do it differently
        # for different python versions.
        if not hasattr(obj, '__code__'):
            if PY3:
                if sys.version_info < (3, 4):
                    raise pickle.PicklingError("Can't pickle %r" % obj)
                else:
                    rv = obj.__reduce_ex__(self.proto)
            else:
                if hasattr(obj, '__self__'):
                    rv = (getattr, (obj.__self__, name))
                else:
                    raise pickle.PicklingError("Can't pickle %r" % obj)
            return Pickler.save_reduce(self, obj=obj, *rv)

        # if func is lambda, def'ed at prompt, is in main, or is nested, then
        # we'll pickle the actual function object rather than simply saving a
        # reference (as is done in default pickler), via save_function_tuple.
        if (islambda(obj)
                or getattr(obj.__code__, 'co_filename', None) == '<stdin>'
                or themodule is None):
            self.save_function_tuple(obj)
            return
        else:
            # func is nested
            klass = getattr(themodule, name, None)
            if klass is None or klass is not obj:
                self.save_function_tuple(obj)
                return

        if obj.__dict__:
            # essentially save_reduce, but workaround needed to avoid recursion
            self.save(_restore_attr)
            write(pickle.MARK + pickle.GLOBAL + modname + '\n' + name + '\n')
            self.memoize(obj)
            self.save(obj.__dict__)
            write(pickle.TUPLE + pickle.REDUCE)
        else:
            write(pickle.GLOBAL + modname + '\n' + name + '\n')
            self.memoize(obj)