def validate_result(result, filter_plugins = True, print_files = False): '''Validate cached.filelist for the given result''' path_to_report_dir = result.get_report_dir() path_to_file = os.path.join(path_to_report_dir, 'cached.filelist') print "validating %s" % path_to_file if result.isThumbnail: print("cannot validate: thumbnail") return None # for this_type in dmtypes.FILESET_TYPES: # dmfs = result.get_filestat(this_type) # if dmfs.action_state != 'L': # print "cannot validate: not local" # return None dmfs = result.get_filestat(dmtypes.SIG) # Get the cached filelist from cached.filelist file try: with open(path_to_file, 'rb') as fhandle: pickle = Unpickler(fhandle) cached_filelist = pickle.load() except IOError as ioerror: print "%s" % ioerror return None # Get a list of files on the filesystem currently dirs = [dmfs.result.get_report_dir(), dmfs.result.experiment.expDir] current_fs_filelist = get_walk_filelist(dirs) # Ignore plugin_out directories if filter_plugins: current_fs_filelist = [filename for filename in current_fs_filelist if not '/plugin_out' in filename] # Ignore the cached.filelist file current_fs_filelist = [filename for filename in current_fs_filelist if not 'cached.filelist' in filename] # Ignore the status.txt file current_fs_filelist = [filename for filename in current_fs_filelist if not 'status.txt' in filename] # Ignore the serialized_*.json file current_fs_filelist = [filename for filename in current_fs_filelist if not 'serialized_' in filename] # See if there are differences #leftovers = list(set(cached_filelist) - set(current_fs_filelist)) #N.B. This difference here will tell us, "Of the files in the filesystem right now, how many are NOT in the cached.filelist file" #Even if the cached.filelist contains more files than are currently on the filesystem. #I am thinking this means we do not care if any action_state is not 'L'. It doesn't matter because we are looking for deficient #cached.filelist. leftovers = list(set(current_fs_filelist) - set(cached_filelist)) if print_files: for i, item in enumerate(leftovers): if not i: print "FILES MISSING FROM CACHED.FILELIST:" print item else: if len(leftovers) > 0: print "FILES MISSING FROM CACHED.FILELIST: %d" % len(leftovers) print "- %s\n" % ("Not valid" if len(leftovers) > 0 else "Valid") return None
def test_config_and_collector_pickling(self): from cPickle import Pickler, Unpickler dir1 = self.tmpdir.ensure("somedir", dir=1) config = py.test.config._reparse([self.tmpdir]) col = config.getfsnode(config.topdir) col1 = col.join(dir1.basename) assert col1.parent is col io = py.std.cStringIO.StringIO() pickler = Pickler(io) pickler.dump(config) pickler.dump(col) pickler.dump(col1) pickler.dump(col) io.seek(0) unpickler = Unpickler(io) newconfig = unpickler.load() topdir = self.tmpdir.ensure("newtopdir", dir=1) newconfig._initafterpickle(topdir) topdir.ensure("somedir", dir=1) newcol = unpickler.load() newcol2 = unpickler.load() newcol3 = unpickler.load() assert newcol2._config is newconfig assert newcol2.parent == newcol assert newcol._config is newconfig assert newconfig.topdir == topdir assert newcol3 is newcol assert newcol.fspath == topdir assert newcol2.fspath.basename == dir1.basename assert newcol2.fspath.relto(topdir)
def unpickle(file_path=None): """Load pickled Python objects from a file. Almost like ``cPickle.load(open(file_path))``, but also loads object saved with older versions of Eelbrain, and allows using a system file dialog to select a file. Parameters ---------- file_path : None | str Path to a pickled file. If None, a system file dialog will be used. If the user cancels the file dialog, a RuntimeError is raised. """ if file_path is None: filetypes = [("Pickles (*.pickled)", '*.pickled'), ("All files", '*')] file_path = ui.ask_file("Select File to Unpickle", "Select a pickled " "file to unpickle", filetypes) if file_path is False: raise RuntimeError("User canceled") if not os.path.exists(file_path): new_path = os.extsep.join((file_path, 'pickled')) if os.path.exists(new_path): file_path = new_path with open(file_path, 'r') as fid: unpickler = Unpickler(fid) unpickler.find_global = map_paths obj = unpickler.load() return obj
def load_weights(self): ''' Loads the stored data from previous sessions, if possible.''' valid = False try: fp = open(self.filename, 'r') except IOError: self.log_debug(11, "Couldn't read stats file '%s'", self.filename) else: self.log_debug(11, "Loading stats file '%s'", self.filename) try: pickler = Unpickler(fp) self.input_headers = pickler.load() wi = pickler.load() self.output_headers = pickler.load() wo = pickler.load() #self.seasons = pickler.load() #self.powers = pickler.load() #self.locs = pickler.load() #self.provinces = pickler.load() #self.centers = pickler.load() #self.coastals = pickler.load() #self.coastlines = pickler.load() #self.borders = pickler.load() finally: fp.close() ni = len(self.input_headers) no = len(self.output_headers) nh = len(wo) self.log_debug(7, "%d inputs => %d hidden => %d outputs", ni, nh, no) self.net = NN(ni, nh, no, wi, wo) valid = True return valid
def deepCopy(obj): stream = StringIO() p = Pickler(stream, 1) p.dump(obj) stream.seek(0) u = Unpickler(stream) return u.load()
def setklassstate(self, object): try: oid=object._p_oid __traceback_info__=oid p, serial = self._storage.load(oid, self._version) file=StringIO(p) unpickler=Unpickler(file) unpickler.persistent_load=self._persistent_load copy = unpickler.load() klass, args = copy if klass is not ExtensionKlass: LOG('ZODB',ERROR, "Unexpected klass when setting class state on %s" % getattr(object,'__name__','(?)')) return copy=apply(klass,args) object.__dict__.clear() object.__dict__.update(copy.__dict__) object._p_oid=oid object._p_jar=self object._p_changed=0 object._p_serial=serial except: LOG('ZODB',ERROR, 'setklassstate failed', error=sys.exc_info()) raise
def load_state(self, state): """Load an image_set_list's state from the string returned from save_state""" self.__image_sets = [] self.__image_sets_by_key = {} # Make a safe unpickler p = Unpickler(StringIO(state)) def find_global(module_name, class_name): logger.debug("Pickler wants %s:%s", module_name, class_name) if module_name not in ("numpy", "numpy.core.multiarray"): logger.critical( "WARNING WARNING WARNING - your batch file has asked to load %s.%s." " If this looks in any way suspicious please contact us at www.cellprofiler.org", module_name, class_name, ) raise ValueError("Illegal attempt to unpickle class %s.%s", (module_name, class_name)) __import__(module_name) mod = sys.modules[module_name] return getattr(mod, class_name) p.find_global = find_global count = p.load() all_keys = [p.load() for i in range(count)] self.__legacy_fields = p.load() # # Have to do in this order in order for the image set's # legacy_fields property to hook to the right legacy_fields # for i in range(count): self.get_image_set(all_keys[i])
def network_setup(self): dict = {} self.net_mtime = self.network_connect() if self.net_mtime != None: if os.path.exists( self.user_data_file ): self.network_update() local_mtime = int(os.stat( self.user_data_file ).st_mtime) if local_mtime > self.net_mtime: self.network_upload() elif local_mtime < self.net_mtime: self.network_download() else: self.network_download() if os.path.exists( self.user_data_file ): ph = open( self.user_data_file ) dict = Unpickler( ph ).load()[-1] if not os.path.exists( self.user_data_file ) and self.net_mtime == None: ph = open( self.user_data_file, 'w+' ) Pickler( ph ).dump( dict ) ph.close() os.utime( self.user_data_file, (0,0) ) last_month = dateDelta( date.today() ).get_last_month() keys = dict.keys() keys.sort() for key in keys: if key[:7] < '%0.4d-%0.2d' % (last_month.year,last_month.month): dict.pop( key ) else: break self.freetime.update( dict )
def unpickle(file_path=None): """Load pickled Python objects from a file. Almost like ``cPickle.load(open(file_path))``, but also loads object saved with older versions of Eelbrain, and allows using a system file dialog to select a file. Parameters ---------- file_path : None | str Path to a pickled file. If None (default), a system file dialog will be shown. If the user cancels the file dialog, a RuntimeError is raised. """ if file_path is None: filetypes = [("Pickles (*.pickled)", '*.pickled'), ("All files", '*')] file_path = ui.ask_file("Select File to Unpickle", "Select a pickled " "file to unpickle", filetypes) if file_path is False: raise RuntimeError("User canceled") else: print repr(file_path) else: file_path = os.path.expanduser(file_path) if not os.path.exists(file_path): new_path = os.extsep.join((file_path, 'pickled')) if os.path.exists(new_path): file_path = new_path with open(file_path, 'r') as fid: unpickler = Unpickler(fid) unpickler.find_global = map_paths obj = unpickler.load() return obj
def zodb_unpickle(data): """Unpickle an object stored using the format expected by ZODB.""" f = StringIO(data) u = Unpickler(f) u.persistent_load = persistent_load klass_info = u.load() if isinstance(klass_info, types.TupleType): if isinstance(klass_info[0], type): # Unclear: what is the second part of klass_info? klass, xxx = klass_info assert not xxx else: if isinstance(klass_info[0], tuple): modname, klassname = klass_info[0] else: modname, klassname = klass_info if modname == "__main__": ns = globals() else: mod = import_helper(modname) ns = mod.__dict__ try: klass = ns[klassname] except KeyError: print >> sys.stderr, "can't find %s in %r" % (klassname, ns) inst = klass() else: raise ValueError("expected class info: %s" % repr(klass_info)) state = u.load() inst.__setstate__(state) return inst
def __init__(self, inp_filename): self.filename = inp_filename with open(self.filename, 'rb') as inpfile: reader = Unpickler(inpfile) self.numgroups = reader.load() self.tot_msgs = reader.load() self.num_msgs = reader.load()
def getNewState(cls, file): # Would like to do load(file) here... but it doesn't work with universal line endings, see Python bug 1724366 from cStringIO import StringIO unpickler = Unpickler(StringIO(file.read())) # Magic to keep us backward compatible in the face of packages changing... unpickler.find_global = cls.findGlobal return unpickler.load()
def setstate(self, object): """ Unlike the 'stock' Connection class' setstate, this method doesn't raise ConflictErrors. This is potentially dangerous for applications that need absolute consistency, but sessioning is not one of those. """ oid=object._p_oid invalid = self._invalid if invalid(None): # only raise a conflict if there was # a mass invalidation, but not if we see this # object's oid as invalid raise ConflictError, `oid` p, serial = self._storage.load(oid, self._version) file=StringIO(p) unpickler=Unpickler(file) unpickler.persistent_load=self._persistent_load unpickler.load() state = unpickler.load() if hasattr(object, '__setstate__'): object.__setstate__(state) else: d=object.__dict__ for k,v in state.items(): d[k]=v object._p_serial=serial
def got_data(self, port_agent_packet): """ Called by the instrument connection when data is available. Also add data to the chunker and when received call got_chunk to publish results. """ data_length = port_agent_packet.get_data_length() data = port_agent_packet.get_data() timestamp = port_agent_packet.get_timestamp() log.debug("Got Data: %s" % data) log.debug("Add Port Agent Timestamp: %s" % timestamp) unpickler = Unpickler(StringIO(data)) # Disable class unpickling, for security; record should be all # built-in types. Note this only works with cPickle. unpickler.find_global = None # pkt is an antelope.Pkt.Packet object converted to a dict. Refer to # the documentation for the Antelope Python bindings for compelete # details. pkt = unpickler.load() for particle in self._particle_factory(pkt, timestamp): self._publish_particle(particle)
def _cloneByPickle(self, obj): """Returns a deep copy of a ZODB object, loading ghosts as needed. """ modifier = getToolByName(self, 'portal_modifier') callbacks = modifier.getOnCloneModifiers(obj) if callbacks is not None: pers_id, pers_load, inside_orefs, outside_orefs = callbacks[0:4] else: inside_orefs, outside_orefs = (), () stream = StringIO() p = Pickler(stream, 1) if callbacks is not None: p.persistent_id = pers_id cmf_uid = getattr(obj, 'cmf_uid', None) if IUniqueIdAnnotation.providedBy(cmf_uid): setattr(obj, 'cmf_uid', cmf_uid()) try: p.dump(aq_base(obj)) except TypeError: # just try again, this then seems to work # WTF? p.dump(aq_base(obj)) approxSize = stream.tell() stream.seek(0) u = Unpickler(stream) if callbacks is not None: u.persistent_load = pers_load return approxSize, u.load(), inside_orefs, outside_orefs
def cloneByPickle(obj, ignore_list=()): """Makes a copy of a ZODB object, loading ghosts as needed. Ignores specified objects along the way, replacing them with None in the copy. """ ignore_dict = {} for o in ignore_list: ignore_dict[id(o)] = o def persistent_id(ob, ignore_dict=ignore_dict): if ignore_dict.has_key(id(ob)): return 'ignored' if getattr(ob, '_p_changed', 0) is None: ob._p_changed = 0 return None def persistent_load(ref): assert ref == 'ignored' # Return a placeholder object that will be replaced by # removeNonVersionedData(). placeholder = SimpleItem() placeholder.id = "ignored_subobject" return placeholder stream = StringIO() p = Pickler(stream, 1) p.persistent_id = persistent_id p.dump(obj) stream.seek(0) u = Unpickler(stream) u.persistent_load = persistent_load return u.load()
def test_config_and_collector_pickling(self, testdir): from cPickle import Pickler, Unpickler tmpdir = testdir.tmpdir dir1 = tmpdir.ensure("somedir", dir=1) config = testdir.parseconfig() col = config.getfsnode(config.topdir) col1 = col.join(dir1.basename) assert col1.parent is col io = py.std.cStringIO.StringIO() pickler = Pickler(io) pickler.dump(col) pickler.dump(col1) pickler.dump(col) io.seek(0) unpickler = Unpickler(io) topdir = tmpdir.ensure("newtopdir", dir=1) topdir.ensure("somedir", dir=1) old = topdir.chdir() try: newcol = unpickler.load() newcol2 = unpickler.load() newcol3 = unpickler.load() assert newcol2.config is newcol.config assert newcol2.parent == newcol assert newcol2.config.topdir.realpath() == topdir.realpath() assert newcol.fspath.realpath() == topdir.realpath() assert newcol2.fspath.basename == dir1.basename assert newcol2.fspath.relto(newcol2.config.topdir) finally: old.chdir()
def network_update(self): # # We have network, we have local file, we may not have # remote file. if self.userkey in self.sftp.listdir( self.category ): local_mtime = os.stat( self.user_data_file ).st_mtime if int(local_mtime) == 0: self.sftp.get( self.category + '/' + self.userkey, self.user_data_file + '.tmp' ) ph = open( self.user_data_file + '.tmp' ) net_data = Unpickler( ph ).load()[-1] ph.close() os.unlink( self.user_data_file + '.tmp' ) ph = open( self.user_data_file ) local_data = Unpickler( ph ).load()[-1] ph.close() local_data.update( net_data ) self.freetime = freeTime( dict=local_data ) ph = open( self.user_data_file, 'w+' ) Pickler( ph ).dump( self.bundle_data() ) ph.close() else: self.sftp.put( self.user_data_file, self.category + '/' + self.userkey ) mtime = self.sftp.stat( self.category + '/' + self.userkey ).st_mtime os.utime( self.user_data_file, (mtime,mtime) )
def replay(self, name): replay_file_path = os.path.join(PROJECTS, self.name, "replays", name) replay = None with open(replay_file_path, "r") as fp: upkl = Unpickler(fp) replay = upkl.load() return replay
def zodb_unpickle(data): """Unpickle an object stored using the format expected by ZODB.""" f = StringIO(data) u = Unpickler(f) u.persistent_load = persistent_load klass_info = u.load() if isinstance(klass_info, tuple): if isinstance(klass_info[0], type): # Unclear: what is the second part of klass_info? klass, xxx = klass_info assert not xxx else: if isinstance(klass_info[0], tuple): modname, klassname = klass_info[0] else: modname, klassname = klass_info if modname == "__main__": ns = globals() else: mod = import_helper(modname) ns = mod.__dict__ try: klass = ns[klassname] except KeyError: print >> sys.stderr, "can't find %s in %r" % (klassname, ns) inst = klass() else: raise ValueError("expected class info: %s" % repr(klass_info)) state = u.load() inst.__setstate__(state) return inst
def loads(self, s): up = Unpickler(BytesIO(s)) up.persistent_load = self._get_object try: return up.load() except KeyError, e: raise UnpicklingError, "Could not find Node class for %s" % e
def deserialize(self, event, state): assert IFullDeserializationEvent.isImplementedBy(event) assert isinstance(event.obj, Persistent) # Set up to resolve cyclic references to the object. event.deserialized('self', event.obj) state = state.strip() if state: if state.startswith('#'): # Text-encoded pickles start with a pound sign. # (A pound sign is not a valid pickle opcode.) data = decode_from_text(state) else: data = state infile = StringIO(data) u = Unpickler(infile) u.persistent_load = event.resolve_internal s = u.load() if not hasattr(s, 'items'): # Turn the list back into a dictionary s_list = s s = {} for key, value in s_list: s[key] = value event.obj.__dict__.update(s) try: unmanaged = u.load() except EOFError: # old pickle with no list of unmanaged objects pass else: event.upos.extend(unmanaged)
def _unpickle(pickled): """ Unpickles a string and catch all types of errors it can throw, to raise only NotReadableJobError in case of error. OpenERP stores the text fields as 'utf-8', so we specify the encoding. `loads()` may raises many types of exceptions (AttributeError, IndexError, TypeError, KeyError, ...). They are all catched and raised as `NotReadableJobError`). Pickle could be exploited by an attacker who would write a value in a job that would run arbitrary code when unpickled. This is why we set a custom ``find_global`` method on the ``Unpickler``, only jobs and a whitelist of classes/functions are allowed to be unpickled (plus the builtins types). """ def restricted_find_global(mod_name, fn_name): __import__(mod_name) mod = sys.modules[mod_name] fn = getattr(mod, fn_name) if not (fn in JOB_REGISTRY or fn in _UNPICKLE_WHITELIST): raise UnpicklingError( '{}.{} is not allowed in jobs'.format(mod_name, fn_name) ) return fn unpickler = Unpickler(StringIO(pickled)) unpickler.find_global = restricted_find_global try: unpickled = unpickler.load() except (StandardError, UnpicklingError): raise NotReadableJobError('Could not unpickle.', pickled) return unpickled
def __getitem__(self, oid, tt=type(())): obj = self._cache.get(oid, None) if obj is not None: return obj __traceback_info__ = (oid) self.before_load() p, serial = self._storage.load(oid, self._version) __traceback_info__ = (oid, p) file=StringIO(p) unpickler=Unpickler(file) # unpickler.persistent_load=self._persistent_load try: classification = unpickler.load() except: raise ("Could not load oid %s. Pickled data in traceback info may " "contain clues." % (oid)) osio = self._get_osio() obj = osio.new_instance(oid, classification) assert obj is not None obj._p_oid=oid obj._p_jar=self obj._p_changed=None self._set_serial(obj, serial) self._cache[oid] = obj if oid == osio.conf.oid_gen.root_oid: self._root_=obj # keep a ref return obj
def loads(self, s): up = Unpickler(BytesIO(s)) up.persistent_load = self._get_object try: return up.load() except KeyError, e: raise UnpicklingError("Could not find Node class for %s" % e)
def deserialize(self, message, task_id=None): """Deserialize an object :param message: A serialized object (string). :param deferred: When true load deferreds. When false raise an error if the message contains deferreds. """ fail = [] if task_id is None: def persistent_load(task_id): raise UnpicklingError('message contained references to ' 'external objects: %s' % task_id) else: args = self._queue.get_arguments(task_id) args = {k: loads(v) for k, v in args.items()} def persistent_load(arg_id): value = args[arg_id] if isinstance(value, TaskFailure): fail.append(value) return value data = StringIO(message) pickle = Unpickler(data) pickle.persistent_load = persistent_load obj = pickle.load() if fail and not obj.on_error_pass: # TODO detect errors earlier, fail earlier, cancel enqueued tasks self.set_result(obj, fail[0]) obj = None return obj
def getNewState(self, file): # Would like to do load(file) here... but it doesn't work with universal line endings, see Python bug 1724366 from cStringIO import StringIO unpickler = Unpickler(StringIO(file.read())) # Magic to keep us backward compatible in the face of packages changing... unpickler.find_global = self.findGlobal return unpickler.load()
def validateSource(self): skip_to_pos = 0 if os.path.exists(self.pickle): upfh = open(self.pickle, "rb") unpickler = Unpickler(upfh) old_opt,old_pos = unpickler.load() if self.opt == old_opt: skip_to_pos = old_pos for i in range(0,skip_to_pos,1): sys.stdout.write(".") pos = -1 files = self.files['humans'].keys() files.sort() cp = ConfigParser() cp.read(os.path.join(path("config"), "test.cnf")) validator_path = cp.get("validation", "validator") csl_schema_path = cp.get("validation", "schema") cslm_schema_path = cp.get("validation", "schema-m") for filename in files: pos += 1 if pos < skip_to_pos: continue p = self.files['humans'][filename] test = CslTest(opt,p,filename,pos=pos) test.parse() test.validate(validator_path, csl_schema_path, cslm_schema_path) if os.path.exists( self.pickle ): os.unlink(self.pickle)
def validateSource(self): skip_to_pos = 0 if os.path.exists(self.pickle): upfh = open(self.pickle, "rb") unpickler = Unpickler(upfh) old_opt, old_pos = unpickler.load() if self.opt == old_opt: skip_to_pos = old_pos for i in range(0, skip_to_pos, 1): sys.stdout.write(".") pos = -1 files = self.files['humans'].keys() files.sort() cp = ConfigParser() cp.read(os.path.join(path("config"), "test.cnf")) validator_path = cp.get("validation", "validator") csl_schema_path = cp.get("validation", "schema") cslm_schema_path = cp.get("validation", "schema-m") for filename in files: pos += 1 if pos < skip_to_pos: continue p = self.files['humans'][filename] test = CslTest(opt, p, filename, pos=pos) test.parse() test.validate(validator_path, csl_schema_path, cslm_schema_path) if os.path.exists(self.pickle): os.unlink(self.pickle)
def zodb_unpickle(data): """Unpickle an object stored using the format expected by ZODB.""" f = StringIO(data) u = Unpickler(f) klass_info = u.load() if isinstance(klass_info, types.TupleType): if isinstance(klass_info[0], types.TupleType): modname, klassname = klass_info[0] args = klass_info[1] else: modname, klassname = klass_info args = None if modname == "__main__": ns = globals() else: mod = import_helper(modname) ns = mod.__dict__ try: klass = ns[klassname] except KeyError: sys.stderr.write("can't find %s in %s" % (klassname, repr(ns))) inst = klass() else: raise ValueError, "expected class info: %s" % repr(klass_info) state = u.load() inst.__setstate__(state) return inst
def __getitem__(self, oid, tt=type(())): obj = self._cache.get(oid, None) if obj is not None: return obj __traceback_info__ = (oid) self.before_load() p, serial = self._storage.load(oid, self._version) __traceback_info__ = (oid, p) file = StringIO(p) unpickler = Unpickler(file) # unpickler.persistent_load=self._persistent_load try: classification = unpickler.load() except: raise ("Could not load oid %s. Pickled data in traceback info may " "contain clues." % (oid)) osio = self._get_osio() obj = osio.new_instance(oid, classification) assert obj is not None obj._p_oid = oid obj._p_jar = self obj._p_changed = None self._set_serial(obj, serial) self._cache[oid] = obj if oid == osio.conf.oid_gen.root_oid: self._root_ = obj # keep a ref return obj
def load_configuration(cls, file_name): """ Unpickle lattice configuration from file""" with open(file_name, 'r') as f: upkl = Unpickler(f) lattice = upkl.load() if lattice is None: print("failed to unpickle or something else happened") return lattice
def state(self, oid, serial, prfactory, p=''): p = p or self.loadSerial(oid, serial) file = StringIO(p) unpickler = Unpickler(file) unpickler.persistent_load = prfactory.persistent_load class_tuple = unpickler.load() state = unpickler.load() return state
def oldstate(self, object, serial): oid=object._p_oid p = self._storage.loadSerial(oid, serial) file=StringIO(p) unpickler=Unpickler(file) unpickler.persistent_load=self._persistent_load unpickler.load() return unpickler.load()
def pickle_load(file_name): f = open(file_name, "rb") obj = None if f != None: u = Unpickler(f) obj = u.load() f.close() return obj
def deepcopy(obj): """Makes a deep copy of the object using the pickle mechanism. """ stream = StringIO() p = Pickler(stream, 1) p.dump(aq_base(obj)) stream.seek(0) u = Unpickler(stream) return u.load()
def testPickleUnpickle(self): s = StringIO() p = Pickler(s) p.dump(Allow) s.seek(0) u = Unpickler(s) newAllow = u.load() self.failUnless(newAllow is Allow)
def get_action_param_file(pfilename): ''' Argument is name of file to unpickle. Return value is dictionary value. ''' from cPickle import Unpickler fileh = open(pfilename, 'rb') pickle = Unpickler(fileh) list_of_dict_files = pickle.load() fileh.close() return list_of_dict_files
def get_action_param_var(pfilename): ''' Argument is name of file to unpickle. Return value is dictionary value. ''' from cPickle import Unpickler with open(pfilename,'rb') as fileh: pickle = Unpickler(fileh) list_of_dict_files = pickle.load() return list_of_dict_files '''
def server_decode(msg): """Decodes msg and returns its parts""" unpickler = Unpickler(StringIO(msg)) unpickler.find_global = server_find_global try: return unpickler.load() # msgid, flags, name, args except: log("can't decode message: %s" % short_repr(msg), level=logging.ERROR) raise
class IndexFile(object): '''Open an index file for reading. filename - the file containing the index. Use the get and get_all method to return objects in the index. ''' def __init__(self, filename): if not os.path.exists(filename): raise IndexFileMissingError(filename) self.filename = filename self.handle = open(self.filename) self.unpickler = Unpickler(self.handle) magic = self.handle.read(8) expected_magic = 'pdko\x00\x00\x00\x01' if magic != expected_magic: message = 'Magic bytes incorrect. Is %s really a pdko file?' \ % self.filename raise IndexFormatError, message table_offset = read_offset(self.handle) self.handle.seek(table_offset) self.key_dict = self.unpickler.load() def iter_addresses(self, key): '''Get a list of pickle addresses for the given key.''' try: list_offset = self.key_dict[key] self.handle.seek(list_offset) address_list = self.unpickler.load() for addresses in address_list: yield addresses except KeyError: return def get(self, key, column): '''The columnth object for all object groups under they key.''' for addresses in self.iter_addresses(key): offset = addresses[column] self.handle.seek(offset) yield self.unpickler.load() def get_all(self, key): '''Get the full object group count for the key.''' for addresses in self.iter_addresses(key): objects = [] for offset in addresses: self.handle.seek(offset) objects.append(self.unpickler.load()) yield tuple(objects) def count(self, key): '''Get the object group count for the given key.''' return len(list(self.iter_addresses(key)))
def readAST(filename): with open(filename, 'rb') as f: pk = Unpickler(f) opcode = pk.load() while True: try: name = pk.load() ast = pk.load() except EOFError: break return opcode, name, ast
def getCounter(self, key): # initialise counter try: f = open(self.counter_file, 'r+') self._counters = Unpickler(f).load() except: f = open(self.counter_file, 'w+') self._counters = {} f.close() if not self._counters.has_key(key): self._counters[key] = 0 return self._counters[key]
def retrieve(filename): ''' Retrieve a pickled object (e.g. state list or networkx graph) from file ''' filename = os.path.normcase(filename) try: f = open(filename, 'rb') u = Unpickler(f) stuff = u.load() f.close() return stuff except IOError: raise LogOpeningError("No file found for %s" % filename, filename)
def unpickle(filename): f, obj = None, None try: f = open(filename, "rb") p = Unpickler(f) x = p.load() f.close() f = None obj = x finally: if f: f.close() return obj
def _cpickle_loads(pkl_str): """A replacement for cPickle.loads that overrides the find_global attribute (Or find_class in Python3) """ pk = Unpickler(BytesIO(pkl_str)) if hasattr(pk, 'find_class'): # This is the method name in Python3 pk.find_class = _find_global else: # hasattr doe not really work on the cPickle objects in Python2, so we # just assume we're in python2 if hasattr returned false pk.find_global = _find_global return pk.load()
def readAST(filename): with open(filename, 'rb') as f: pk = Unpickler(f) opcode = pk.load() asts = {} while True: try: name = pk.load() ast = pk.load() asts[(name, opcode)] = ast except EOFError: break return asts
def unpickle_values(self): pi = Unpickler( open( self.idata ) ) self.centerpos = pi.load() self.maxhalfwidth = pi.load() self.leftmost = pi.load() self.rightmost = pi.load() self.conv_left = pi.load() self.conv_right = pi.load() self.middleypos = pi.load() self.radius = pi.load() self.miny = pi.load() self.maxy = pi.load() self.extend = pi.load()
def readAST(filename): with open(filename, 'rb') as f: pk = Unpickler(f) opcode = pk.load() print 'opcode: %02x' % opcode while True: try: name = pk.load() ast = pk.load() print '%s\n%s\n' % (name, str(ast)) except EOFError: break return opcode, ast
def load_game(self, path): """Restores a saved game from the specified path.""" f = open(path, "rb") check_file_magic(f, self.app_magic, "saved game") check_file_magic(f, self.save_file_magic, "saved game") p = Unpickler(f) file_version = p.load() app_version = p.load() check_file_version(file_version, app_version, self.save_min_version, self.save_file_version, self.save_version_ranges) data = p.load() f.close() self.restore_save_data(data) self.set_save_path(path) self.unsaved_progress = False
def test_collector_implicit_config_pickling(self, testdir): from cPickle import Pickler, Unpickler tmpdir = testdir.tmpdir testdir.chdir() testdir.makepyfile(hello="def test_x(): pass") config = testdir.parseconfig(tmpdir) col = config.getfsnode(config.topdir) io = py.std.cStringIO.StringIO() pickler = Pickler(io) pickler.dump(col) io.seek(0) unpickler = Unpickler(io) col2 = unpickler.load() assert col2.name == col.name assert col2.listnames() == col.listnames()