Ejemplo n.º 1
0
    def _get_unpickler(self, pickle):
        file = cStringIO.StringIO(pickle)
        unpickler = cPickle.Unpickler(file)
        unpickler.persistent_load = self._persistent_load

        def find_global(modulename, name):
            self.identity = "%s.%s" % (modulename, name)
            return Nonce

        unpickler.find_global = find_global

        return unpickler
Ejemplo n.º 2
0
def read_pickle(filename):
    try:
        fp = open(filename, "r")
    except:
        print "Could not read file:", filename
        return None

    u = pickle.Unpickler(fp)
    parsed_data = u.load()
    fp.close()
    #print "read", parsed_data['summary']
    return parsed_data
Ejemplo n.º 3
0
def load_object(fname,zip=0,nofind=0,verbose=0):
    """Loads an object from disk. By default, this handles zipped files
    and searches in the usual places for OCRopus. It also handles some
    class names that have changed."""
    if not nofind:
        fname = ocropus_find_file(fname)
    if verbose:
        print "# loading object",fname
    if zip==0 and fname.endswith(".gz"):
        zip = 1
    if zip>0:
        # with gzip.GzipFile(fname,"rb") as stream:
        with os.popen("gunzip < '%s'"%fname,"rb") as stream:
            unpickler = cPickle.Unpickler(stream)
            unpickler.find_global = unpickle_find_global
            return unpickler.load()
    else:
        with open(fname,"rb") as stream:
            unpickler = cPickle.Unpickler(stream)
            unpickler.find_global = unpickle_find_global
            return unpickler.load()
Ejemplo n.º 4
0
def _makeUnpickler(file):
    up = cPickle.Unpickler(file)
    # see http://docs.python.org/2/library/pickle.html#subclassing-unpicklers

    def find_global(modname, clsname):
        try:
            return substituteClasses[(modname, clsname)]
        except KeyError:
            mod = reflect.namedModule(modname)
            return getattr(mod, clsname)
    up.find_global = find_global
    return up
Ejemplo n.º 5
0
    def __init__(self, filename, unwrap=False, **append_columns):
        # set the file
        self._file = gzip.open(filename, "rb")

        # save whether we should unwrap when reading
        self._unwrap = unwrap

        # save additional columns to append
        self._append_columns = append_columns

        # set up the unpickler
        self._unpickler = pickle.Unpickler(self._file)
 def classifier(self):
     if self.__classifier is None:
         mtime = os.stat(self.path).st_mtime
         if self.path in self.cache:
             classifier, old_mtime = self.cache[self.path]
             if mtime == old_mtime:
                 return classifier
             
         unpickler = cPickle.Unpickler(self.open())
         self.__classifier = unpickler.load()
         self.cache[self.path] = (self.__classifier, mtime)
     return self.__classifier
Ejemplo n.º 7
0
def dump_sigfile(a):
    output = []

    p1 = pickle.Unpickler(open(a, "rb"))
    a_data = p1.load()

    output.append("basewhitelist: %s" % (a_data['basewhitelist']))

    output.append("taskwhitelist: %s" % (a_data['taskwhitelist']))

    output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))

    output.append("basehash: %s" % (a_data['basehash']))

    for dep in a_data['gendeps']:
        output.append("List of dependencies for variable %s is %s" %
                      (dep, a_data['gendeps'][dep]))

    for dep in a_data['varvals']:
        output.append("Variable %s value is %s" %
                      (dep, a_data['varvals'][dep]))

    if 'runtaskdeps' in a_data:
        output.append("Tasks this task depends on: %s" %
                      (a_data['runtaskdeps']))

    if 'file_checksum_values' in a_data:
        output.append("This task depends on the checksums of files: %s" %
                      (a_data['file_checksum_values']))

    if 'runtaskhashes' in a_data:
        for dep in a_data['runtaskhashes']:
            output.append("Hash for dependent task %s is %s" %
                          (dep, a_data['runtaskhashes'][dep]))

    if 'taint' in a_data:
        output.append("Tainted (by forced/invalidated task): %s" %
                      a_data['taint'])

    data = a_data['basehash']
    for dep in a_data['runtaskdeps']:
        data = data + a_data['runtaskhashes'][dep]

    for c in a_data['file_checksum_values']:
        data = data + c[1]

    if 'taint' in a_data:
        data = data + a_data['taint']

    h = hashlib.md5(data).hexdigest()
    output.append("Computed Hash is %s" % h)

    return output
Ejemplo n.º 8
0
    def unpickle(self, throw=True, throw_dead=True):
        """Deserialize `data` into an object."""
        _vv and IOLOG.debug('%r.unpickle()', self)
        fp = cStringIO.StringIO(self.data)
        unpickler = cPickle.Unpickler(fp)
        unpickler.find_global = self._find_global

        try:
            # Must occur off the broker thread.
            obj = unpickler.load()
        except (TypeError, ValueError), ex:
            raise StreamError('invalid message: %s', ex)
Ejemplo n.º 9
0
def test_pickler_unpickler():
    t = {'name': ['v1', 'v2']}

    f = file('test.bin', 'wb')
    pick = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
    pick.dump(t)
    f.close()

    f = file('test.bin', 'rb')
    unpick = pickle.Unpickler(f)
    p = unpick.load()
    print p
    f.close()
Ejemplo n.º 10
0
	def cache_restore(self):
		# type: () -> None
		# check if there is a cache file
		cache_file_name = self.cache_file_name()
		if os.path.isfile(cache_file_name):
			with open(cache_file_name, 'r') as cache_file:
				data = pickle.Unpickler(cache_file)
				if data:
					self._instances = data.load()
					for instance in self._instances:
						logger.debug("loaded cached instance %s" % instance.name)
						instance.available = False
						instance.state = 4  # state UNKNOWN
Ejemplo n.º 11
0
def test_find_global():
    import time, cStringIO
    entry = time.strptime('Fri Mar 27 22:20:42 2017')
    f = cStringIO.StringIO()
    cPickle.Pickler(f).dump(entry)

    f = cStringIO.StringIO(f.getvalue())
    e = cPickle.Unpickler(f).load()
    assert e == entry

    f = cStringIO.StringIO(f.getvalue())
    up = cPickle.Unpickler(f)
    up.find_global = None
    with pytest.raises(cPickle.UnpicklingError) as e:
        up.load()
    assert str(e.value) == "Global and instance pickles are not supported."

    f = cStringIO.StringIO(f.getvalue())
    up = cPickle.Unpickler(f)
    up.find_global = lambda module, name: lambda a, b: (name, a, b)
    e = up.load()
    assert e == ('struct_time', (2017, 3, 27, 22, 20, 42, 4, 86, -1), {})
Ejemplo n.º 12
0
    def __init__(self):

        membuf = io.BytesIO()

        pickler = pickle.Pickler(membuf, protocol=pickle.HIGHEST_PROTOCOL)
        unpickler = pickle.Unpickler(membuf)

        pickler.persistent_id = self.persistent_id
        unpickler.persistent_load = self.persistent_load

        self.pickler = pickler
        self.unpickler = unpickler
        self.buffer = membuf
Ejemplo n.º 13
0
def get_samples():
  label_samples = collections.defaultdict(list)
  label_scores = collections.defaultdict(list)
  with open(FLAGS.load_samples, 'rb') as f_in:
    u = cPickle.Unpickler(f_in)
    while True:
      try:
        entry = u.load()
        label_samples[entry['label']].append(entry['states'])
        label_scores[entry['label']].append(entry['score'])
      except (EOFError):
        break
  return label_samples, label_scores
Ejemplo n.º 14
0
def NewObjectReader_get_unpickler(self, pickle):
    file = cStringIO.StringIO(pickle)
    unpickler = cPickle.Unpickler(file)
    unpickler.persistent_load = self._persistent_load
    factory = classfactory.ClassFactory
    conn = self._conn

    def find_global(modulename, name):
        return factory(conn, modulename, name)

    unpickler.find_global = find_global

    return unpickler
Ejemplo n.º 15
0
def readGrooves():
    global groovedict
    global groovelib_lookup
    try:
        f = open(common.groovesFile, "r")
        p = cPickle.Unpickler(f)
        groovedict = p.load()
        groovelib_lookup = p.load()
        f.close()
    except IOError:
        common.printOutput(
            "Error opening " + common.groovesFile +
            ". Please check your settings, then refresh the grooves.", True)
Ejemplo n.º 16
0
def unpickle_hidden(config, out, max_sentences=0):
    hidden_list = []
    with open(config['decode_hidden'], 'rb') as f_in:
        unpickler = cPickle.Unpickler(f_in)
        while True and (max_sentences == 0
                        or len(hidden_list) < max_sentences):
            try:
                hidden_list.append(np.array(unpickler.load()['states']))
            except (EOFError):
                break
    with tf.Session() as session:
        model = get_inference_model(config, session, hidden=True)
        decode_hidden(session, model, config, out, hidden_list)
Ejemplo n.º 17
0
def load(fileName) :

    import cPickle
    
    if not os.path.exists(fileName) :
        raise ValueError, "file does not exist at %s" % fileName

    fileHandle = open(fileName, "r")

    u = cPickle.Unpickler(fileHandle)
    object = u.load()
    fileHandle.close()
    return object
Ejemplo n.º 18
0
    def __init__(self, file, use_proxy=True, use_hash=False):
        """Create a ROOT unpickler.
        `file` should be a ROOT TFile.
        """
        global xserial
        xserial += 1
        self.__use_proxy = use_proxy
        self.__file = file
        self.__io = IO_Wrapper()
        self.__unpickle = cPickle.Unpickler(self.__io)
        self.__unpickle.persistent_load = self._persistent_load
        self.__unpickle.find_global = self._find_class
        self.__n = 0
        self.__serial = '{0:d}-'.format(xserial)
        xdict[self.__serial] = file

        if use_hash:
            htab = {}
            ctab = {}
            for k in file.GetListOfKeys():
                nm = k.GetName()
                cy = k.GetCycle()
                htab[(nm, cy)] = k
                if cy > ctab.get(nm, 0):
                    ctab[nm] = cy
                    htab[(nm, 9999)] = k
            file._htab = htab
            oget = file.Get

            def xget(nm0):
                nm = nm0
                ipos = nm.find(';')
                if ipos >= 0:
                    cy = nm[ipos+1]
                    if cy == '*':
                        cy = 10000
                    else:
                        cy = int(cy)
                    nm = nm[:ipos - 1]
                else:
                    cy = 9999
                ret = htab.get((nm, cy), None)
                if not ret:
                    log.warning(
                        "did't find {0} {1} {2}".format(nm, cy, len(htab)))
                    return oget(nm0)
                #ctx = ROOT.TDirectory.TContext (file)
                ret = ret.ReadObj()
                #del ctx
                return ret
            file.Get = xget
Ejemplo n.º 19
0
    def __init__(self, cooker):


        self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True)
        self.clean = {}
        self.checked = {}
        self.depends_cache = {}
        self.data = None
        self.data_fn = None
        self.cacheclean = True

        if self.cachedir in [None, '']:
            self.has_cache = False
            bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.")
        else:
            self.has_cache = True
            self.cachefile = os.path.join(self.cachedir,"bb_cache.dat")
            
            bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir)
            try:
                os.stat( self.cachedir )
            except OSError:
                bb.mkdirhier( self.cachedir )

        if not self.has_cache:
            return            

        # If any of configuration.data's dependencies are newer than the
        # cache there isn't even any point in loading it...
        newest_mtime = 0
        deps = bb.data.getVar("__depends", cooker.configuration.data, True)
        for f,old_mtime in deps:
            if old_mtime > newest_mtime:
                newest_mtime = old_mtime

        if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime:
            try:
                p = pickle.Unpickler(file(self.cachefile, "rb"))
                self.depends_cache, version_data = p.load()
                if version_data['CACHE_VER'] != __cache_version__:
                    raise ValueError, 'Cache Version Mismatch'
                if version_data['BITBAKE_VER'] != bb.__version__:
                    raise ValueError, 'Bitbake Version Mismatch'
            except EOFError:
                bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...")
                self.depends_cache = {}
            except:
                bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
                self.depends_cache = {}
        else:
            bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...")
Ejemplo n.º 20
0
def load_omnical_metrics(filename):
    """
    load omnical metrics file

    Input:
    ------
    filename : str
        path to omnical metrics file

    Output:
    metrics : dictionary
        dictionary containing omnical metrics
    """
    # get filetype
    filetype = filename.split('.')[-1]

    # load json
    if filetype == 'json':
        with open(filename, 'r') as f:
            metrics = json.load(f, object_pairs_hook=odict)

        # ensure keys of ant_dicts are not strings
        # loop over pols
        for h, p in enumerate(metrics.keys()):
            # loop over items in each pol metric dict
            for k in metrics[p].keys():
                if isinstance(metrics[p][k], (dict, odict)):
                    if isinstance(list(metrics[p][k].values())[0], list):
                        metrics[p][k] = odict([(int(i),
                                                np.array(metrics[p][k][i]))
                                               for i in metrics[p][k]])
                    elif isinstance(
                            list(metrics[p][k].values())[0],
                        (np.unicode, np.unicode_)):
                        metrics[p][k] = odict([
                            (int(i), metrics[p][k][i].astype(np.complex128))
                            for i in metrics[p][k]
                        ])

                elif isinstance(metrics[p][k], list):
                    metrics[p][k] = np.array(metrics[p][k])

    # load pickle
    elif filetype == 'pkl':
        with open(filename, 'rb') as f:
            inp = pkl.Unpickler(f)
            metrics = inp.load()
    else:
        raise IOError("Filetype not recognized, try a json or pkl file")

    return metrics
Ejemplo n.º 21
0
Archivo: cache.py Proyecto: gosborne/yp
    def save_merge(self, d):
        if not self.cachefile:
            return

        glf = bb.utils.lockfile(self.cachefile + ".lock")

        try:
            p = pickle.Unpickler(file(self.cachefile, "rb"))
            data, version = p.load()
        except (IOError, EOFError):
            data, version = None, None

        if version != self.__class__.CACHE_VERSION:
            data = self.create_cachedata()

        for f in [
                y for y in os.listdir(os.path.dirname(self.cachefile))
                if y.startswith(os.path.basename(self.cachefile) + '-')
        ]:
            f = os.path.join(os.path.dirname(self.cachefile), f)
            try:
                p = pickle.Unpickler(file(f, "rb"))
                extradata, version = p.load()
            except (IOError, EOFError):
                extradata, version = self.create_cachedata(), None

            if version != self.__class__.CACHE_VERSION:
                continue

            self.merge_data(extradata, data)
            os.unlink(f)

        self.compress_keys(data)

        p = pickle.Pickler(file(self.cachefile, "wb"), -1)
        p.dump([data, self.__class__.CACHE_VERSION])

        bb.utils.unlockfile(glf)
Ejemplo n.º 22
0
def test_environment_is_unchanged(l):
    s = b''.join(l)
    #print('%r' % s)
    f.write('%r,\n' % s)
    unpickler = cPickle.Unpickler(cStringIO.StringIO(s))
    unpickler.find_global = None
    lcls = locals()
    gbls = globals()
    try:
        unpickler.load()
    except Exception:
        pass
    assert lcls == locals()
    assert gbls == globals()
Ejemplo n.º 23
0
def mergeOTGSResults(nbrProc, directory, algoName, printIO=False):
    results = np.zeros(shape=(nbrProc, nbrProc))
    for p1 in xrange(nbrProc):
        for p2 in xrange(p1):
            res = directory + str(p1) + '-' + str(
                p2) + '/output_' + algoName + '/result.bin'
            f = open(res, 'rb')
            p = pck.Unpickler(f)
            results[p1, p2] = p.load()
            results[p2, p1] = results[p1, p2]
            f.close()
    saveSymMatrixEig(directory + '/result_' + algoName, results)
    if printIO:
        print('Results merged in ' + directory + ' ...')
Ejemplo n.º 24
0
def uncpickle(fPath):  # Polemos: Compatibility fix
    """Same as cPickle.loads(f) but does module name translation"""
    if type(fPath) is unicode or type(fPath) is str:
        try:
            with open(fPath, 'rb') as f:
                pickleObj = cPickle.Unpickler(f)
                pickleObj.find_global = findClass
                return pickleObj.load()
        except:
            with open(fPath, 'r') as f:
                # Polemos: The Python 2.x version of pickle has a
                # bug when in binary (slightly more efficient but
                # also needed for newer protocols), thus besides
                # compatibility with older saved Wrye Mash settings
                # it is needed as a failover. More info:
                # https://bugs.python.org/issue11564
                pickleObj = cPickle.Unpickler(f)
                pickleObj.find_global = findClass
                return pickleObj.load()
    else:
        pickleObj = cPickle.Unpickler(fPath)
        pickleObj.find_global = findClass
        return pickleObj.load()
Ejemplo n.º 25
0
    def set_session_dir(self, session_dir):
        self._storage_file = os.path.join(session_dir, "user_download_choice.pickle")
        if DEBUG:
            print >> sys.stderr, "UserDownloadChoice: Using file:", self._storage_file

        try:
            self._choices = cPickle.Unpickler(open(self._storage_file, "r")).load()
        except:
            self._choices = {}

        # Ensure that there is a "download_state" dictionary. It
        # should contain infohash/state tuples.
        if not "download_state" in self._choices:
            self._choices["download_state"] = {}
Ejemplo n.º 26
0
 def load_from_file(model_file):
     """\
     Load the model from a pickle file or stream
     (supports GZip compression).
     """
     log_info('Loading model from file ' + str(model_file))
     fh = file_stream(model_file, mode='rb', encoding=None)
     unpickler = pickle.Unpickler(fh)
     model = unpickler.load()
     if not hasattr(model, 'attr_mask'):
         model.attr_mask = model.get_attr_mask()
     fh.close()
     log_info('Model loaded successfully.')
     return model
Ejemplo n.º 27
0
 def get(self, key, defValue=None):
     filename = self.dir + self.prefix + key + self.postfix
     if not os.path.exists(filename):
         return None
     create = os.stat(filename)[8]
     now = time.time()
     if now - create > self.timeout:
         print "FileCache: Timeout"
         print self.timeout + create, now
         return defValue
     f = open(filename, "r")
     unpickle = cPickle.Unpickler(f)
     value = unpickle.load()
     return value
Ejemplo n.º 28
0
def leer():
    file3 = open('students.db', 'rb')
    unpickler = pickle.Unpickler(file3)
    v = estudiante.numero_de_estudiantes
    for i in range(v):
        read_alumno = unpickler.load()
        print(f' NUA: {read_alumno.nua}')
        print(f' Nombre: {read_alumno.nombre}')
        print(f' Correo: {read_alumno.correo}')
        print(f' Contrasena: {read_alumno.contrasena}')
    file3.close()
    print('El numero de estudiantes en la clase es: ' +
          str(estudiante.numero_de_estudiantes))
    print('Alumnos mostrados')
Ejemplo n.º 29
0
def Unpickler(fileobj):
    """Like cPickle.Unpickler, but with our symbol-renamer.

    Note that like cPickle.Unpickler, this is not actually a class and
    you therefore can't subclass it.  It also doesn't allow us to load
    classes that changed from new- to old-style, so if you need that,
    see StyleChangeUnpickler below.
    """
    # With cPickle, to override how global-lookup is done, you just define
    # find_global.  See the docs for details:
    # https://docs.python.org/2/library/pickle.html#subclassing-unpicklers
    unpickler = cPickle.Unpickler(fileobj)
    unpickler.find_global = _renamed_symbol_loader
    return unpickler
Ejemplo n.º 30
0
    def __init__(self, filename=None):
        """If the filename parameter is passed it will create or use the given file, otherwise it uses a temporary file"""

        if filename is None:
            self.fd = tempfile.TemporaryFile()
            self.wpos = 0
        else:
            self.fd = open(filename, 'a+b')
            self.fd.seek(0, os.SEEK_END)
            self.wpos = self.fd.tell()
        self.rpos = 0
        self.pickler = cPickle.Pickler(self.fd)
        self.unpickler = cPickle.Unpickler(self.fd)
        self.size = 0