def test_config_and_collector_pickling(self, testdir): from cPickle import Pickler, Unpickler tmpdir = testdir.tmpdir dir1 = tmpdir.ensure("somedir", dir=1) config = testdir.parseconfig() col = config.getfsnode(config.topdir) col1 = col.join(dir1.basename) assert col1.parent is col io = py.std.cStringIO.StringIO() pickler = Pickler(io) pickler.dump(col) pickler.dump(col1) pickler.dump(col) io.seek(0) unpickler = Unpickler(io) topdir = tmpdir.ensure("newtopdir", dir=1) topdir.ensure("somedir", dir=1) old = topdir.chdir() try: newcol = unpickler.load() newcol2 = unpickler.load() newcol3 = unpickler.load() assert newcol2.config is newcol.config assert newcol2.parent == newcol assert newcol2.config.topdir.realpath() == topdir.realpath() assert newcol.fspath.realpath() == topdir.realpath() assert newcol2.fspath.basename == dir1.basename assert newcol2.fspath.relto(newcol2.config.topdir) finally: old.chdir()
def zodb_unpickle(data): """Unpickle an object stored using the format expected by ZODB.""" f = StringIO(data) u = Unpickler(f) u.persistent_load = persistent_load klass_info = u.load() if isinstance(klass_info, types.TupleType): if isinstance(klass_info[0], type): # Unclear: what is the second part of klass_info? klass, xxx = klass_info assert not xxx else: if isinstance(klass_info[0], tuple): modname, klassname = klass_info[0] else: modname, klassname = klass_info if modname == "__main__": ns = globals() else: mod = import_helper(modname) ns = mod.__dict__ try: klass = ns[klassname] except KeyError: print >> sys.stderr, "can't find %s in %r" % (klassname, ns) inst = klass() else: raise ValueError("expected class info: %s" % repr(klass_info)) state = u.load() inst.__setstate__(state) return inst
def load_weights(self): ''' Loads the stored data from previous sessions, if possible.''' valid = False try: fp = open(self.filename, 'r') except IOError: self.log_debug(11, "Couldn't read stats file '%s'", self.filename) else: self.log_debug(11, "Loading stats file '%s'", self.filename) try: pickler = Unpickler(fp) self.input_headers = pickler.load() wi = pickler.load() self.output_headers = pickler.load() wo = pickler.load() #self.seasons = pickler.load() #self.powers = pickler.load() #self.locs = pickler.load() #self.provinces = pickler.load() #self.centers = pickler.load() #self.coastals = pickler.load() #self.coastlines = pickler.load() #self.borders = pickler.load() finally: fp.close() ni = len(self.input_headers) no = len(self.output_headers) nh = len(wo) self.log_debug(7, "%d inputs => %d hidden => %d outputs", ni, nh, no) self.net = NN(ni, nh, no, wi, wo) valid = True return valid
def setstate(self, object): """ Unlike the 'stock' Connection class' setstate, this method doesn't raise ConflictErrors. This is potentially dangerous for applications that need absolute consistency, but sessioning is not one of those. """ oid = object._p_oid invalid = self._invalid if invalid(None): # only raise a conflict if there was # a mass invalidation, but not if we see this # object's oid as invalid raise ConflictError, ` oid ` p, serial = self._storage.load(oid, self._version) file = StringIO(p) unpickler = Unpickler(file) unpickler.persistent_load = self._persistent_load unpickler.load() state = unpickler.load() if hasattr(object, '__setstate__'): object.__setstate__(state) else: d = object.__dict__ for k, v in state.items(): d[k] = v object._p_serial = serial
def test_config_and_collector_pickling(self): from cPickle import Pickler, Unpickler dir1 = self.tmpdir.ensure("somedir", dir=1) config = py.test.config._reparse([self.tmpdir]) col = config.getfsnode(config.topdir) col1 = col.join(dir1.basename) assert col1.parent is col io = py.std.cStringIO.StringIO() pickler = Pickler(io) pickler.dump(config) pickler.dump(col) pickler.dump(col1) pickler.dump(col) io.seek(0) unpickler = Unpickler(io) newconfig = unpickler.load() topdir = self.tmpdir.ensure("newtopdir", dir=1) newconfig._initafterpickle(topdir) topdir.ensure("somedir", dir=1) newcol = unpickler.load() newcol2 = unpickler.load() newcol3 = unpickler.load() assert newcol2._config is newconfig assert newcol2.parent == newcol assert newcol._config is newconfig assert newconfig.topdir == topdir assert newcol3 is newcol assert newcol.fspath == topdir assert newcol2.fspath.basename == dir1.basename assert newcol2.fspath.relto(topdir)
def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle, committedData=''): # class_tuple, old, committed, newstate = ('',''), 0, 0, 0 try: prfactory = PersistentReferenceFactory() newpickle = self._crs_untransform_record_data(newpickle) file = StringIO(newpickle) unpickler = Unpickler(file) unpickler.find_global = find_global unpickler.persistent_load = prfactory.persistent_load meta = unpickler.load() if isinstance(meta, tuple): klass = meta[0] newargs = meta[1] or () if isinstance(klass, tuple): klass = find_global(*klass) else: klass = meta newargs = () if klass in _unresolvable: raise ConflictError newstate = unpickler.load() inst = klass.__new__(klass, *newargs) try: resolve = inst._p_resolveConflict except AttributeError: _unresolvable[klass] = 1 raise ConflictError old = state(self, oid, oldSerial, prfactory) committed = state(self, oid, committedSerial, prfactory, committedData) resolved = resolve(old, committed, newstate) file = StringIO() pickler = Pickler(file, 1) pickler.inst_persistent_id = persistent_id pickler.dump(meta) pickler.dump(resolved) return self._crs_transform_record_data(file.getvalue(1)) except (ConflictError, BadClassName): pass except: # If anything else went wrong, catch it here and avoid passing an # arbitrary exception back to the client. The error here will mask # the original ConflictError. A client can recover from a # ConflictError, but not necessarily from other errors. But log # the error so that any problems can be fixed. logger.error("Unexpected error", exc_info=True) raise ConflictError(oid=oid, serials=(committedSerial, oldSerial), data=newpickle)
def load_state(self, state): """Load an image_set_list's state from the string returned from save_state""" self.__image_sets = [] self.__image_sets_by_key = {} # Make a safe unpickler p = Unpickler(StringIO(state)) def find_global(module_name, class_name): logger.debug("Pickler wants %s:%s", module_name, class_name) if module_name not in ("numpy", "numpy.core.multiarray"): logger.critical( "WARNING WARNING WARNING - your batch file has asked to load %s.%s." " If this looks in any way suspicious please contact us at www.cellprofiler.org", module_name, class_name, ) raise ValueError("Illegal attempt to unpickle class %s.%s", (module_name, class_name)) __import__(module_name) mod = sys.modules[module_name] return getattr(mod, class_name) p.find_global = find_global count = p.load() all_keys = [p.load() for i in range(count)] self.__legacy_fields = p.load() # # Have to do in this order in order for the image set's # legacy_fields property to hook to the right legacy_fields # for i in range(count): self.get_image_set(all_keys[i])
def __init__(self, inp_filename): self.filename = inp_filename with open(self.filename, 'rb') as inpfile: reader = Unpickler(inpfile) self.numgroups = reader.load() self.tot_msgs = reader.load() self.num_msgs = reader.load()
def deserialize(self, event, state): assert IFullDeserializationEvent.isImplementedBy(event) assert isinstance(event.obj, Persistent) # Set up to resolve cyclic references to the object. event.deserialized('self', event.obj) state = state.strip() if state: if state.startswith('#'): # Text-encoded pickles start with a pound sign. # (A pound sign is not a valid pickle opcode.) data = decode_from_text(state) else: data = state infile = StringIO(data) u = Unpickler(infile) u.persistent_load = event.resolve_internal s = u.load() if not hasattr(s, 'items'): # Turn the list back into a dictionary s_list = s s = {} for key, value in s_list: s[key] = value event.obj.__dict__.update(s) try: unmanaged = u.load() except EOFError: # old pickle with no list of unmanaged objects pass else: event.upos.extend(unmanaged)
def setstate(self, object): """ Unlike the 'stock' Connection class' setstate, this method doesn't raise ConflictErrors. This is potentially dangerous for applications that need absolute consistency, but sessioning is not one of those. """ oid=object._p_oid invalid = self._invalid if invalid(None): # only raise a conflict if there was # a mass invalidation, but not if we see this # object's oid as invalid raise ConflictError, `oid` p, serial = self._storage.load(oid, self._version) file=StringIO(p) unpickler=Unpickler(file) unpickler.persistent_load=self._persistent_load unpickler.load() state = unpickler.load() if hasattr(object, '__setstate__'): object.__setstate__(state) else: d=object.__dict__ for k,v in state.items(): d[k]=v object._p_serial=serial
def zodb_unpickle(data): """Unpickle an object stored using the format expected by ZODB.""" f = StringIO(data) u = Unpickler(f) u.persistent_load = persistent_load klass_info = u.load() if isinstance(klass_info, tuple): if isinstance(klass_info[0], type): # Unclear: what is the second part of klass_info? klass, xxx = klass_info assert not xxx else: if isinstance(klass_info[0], tuple): modname, klassname = klass_info[0] else: modname, klassname = klass_info if modname == "__main__": ns = globals() else: mod = import_helper(modname) ns = mod.__dict__ try: klass = ns[klassname] except KeyError: print >> sys.stderr, "can't find %s in %r" % (klassname, ns) inst = klass() else: raise ValueError("expected class info: %s" % repr(klass_info)) state = u.load() inst.__setstate__(state) return inst
def zodb_unpickle(data): """Unpickle an object stored using the format expected by ZODB.""" f = StringIO(data) u = Unpickler(f) klass_info = u.load() if isinstance(klass_info, types.TupleType): if isinstance(klass_info[0], types.TupleType): modname, klassname = klass_info[0] args = klass_info[1] else: modname, klassname = klass_info args = None if modname == "__main__": ns = globals() else: mod = import_helper(modname) ns = mod.__dict__ try: klass = ns[klassname] except KeyError: sys.stderr.write("can't find %s in %s" % (klassname, repr(ns))) inst = klass() else: raise ValueError, "expected class info: %s" % repr(klass_info) state = u.load() inst.__setstate__(state) return inst
def load_state(self, state): '''Load an image_set_list's state from the string returned from save_state''' self.__image_sets = [] self.__image_sets_by_key = {} # Make a safe unpickler p = Unpickler(StringIO(state)) def find_global(module_name, class_name): logger.debug("Pickler wants %s:%s",module_name, class_name) if (module_name not in ("numpy", "numpy.core.multiarray")): logger.critical( "WARNING WARNING WARNING - your batch file has asked to load %s.%s." " If this looks in any way suspicious please contact us at www.cellprofiler.org", module_name, class_name) raise ValueError("Illegal attempt to unpickle class %s.%s", (module_name, class_name)) __import__(module_name) mod = sys.modules[module_name] return getattr(mod, class_name) p.find_global = find_global count = p.load() all_keys = [p.load() for i in range(count)] self.__legacy_fields = p.load() # # Have to do in this order in order for the image set's # legacy_fields property to hook to the right legacy_fields # for i in range(count): self.get_image_set(all_keys[i])
def state(self, oid, serial, prfactory, p=''): p = p or self.loadSerial(oid, serial) file = StringIO(p) unpickler = Unpickler(file) unpickler.persistent_load = prfactory.persistent_load class_tuple = unpickler.load() state = unpickler.load() return state
def state(self, oid, serial, prfactory, p=''): p = p or self.loadSerial(oid, serial) file = StringIO(p) unpickler = Unpickler(file) unpickler.find_global = find_global unpickler.persistent_load = prfactory.persistent_load unpickler.load() # skip the class tuple return unpickler.load()
def oldstate(self, object, serial): oid=object._p_oid p = self._storage.loadSerial(oid, serial) file=StringIO(p) unpickler=Unpickler(file) unpickler.persistent_load=self._persistent_load unpickler.load() return unpickler.load()
def state(self, oid, serial, prfactory, p=""): p = p or self.loadSerial(oid, serial) p = self._crs_untransform_record_data(p) file = StringIO(p) unpickler = Unpickler(file) unpickler.find_global = find_global unpickler.persistent_load = prfactory.persistent_load unpickler.load() # skip the class tuple return unpickler.load()
def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle, committedData=''): # class_tuple, old, committed, newstate = ('',''), 0, 0, 0 try: prfactory = PersistentReferenceFactory() newpickle = self._crs_untransform_record_data(newpickle) file = StringIO(newpickle) unpickler = Unpickler(file) unpickler.find_global = find_global unpickler.persistent_load = prfactory.persistent_load meta = unpickler.load() if isinstance(meta, tuple): klass = meta[0] newargs = meta[1] or () if isinstance(klass, tuple): klass = find_global(*klass) else: klass = meta newargs = () if klass in _unresolvable: raise ConflictError newstate = unpickler.load() inst = klass.__new__(klass, *newargs) try: resolve = inst._p_resolveConflict except AttributeError: _unresolvable[klass] = 1 raise ConflictError old = state(self, oid, oldSerial, prfactory) committed = state(self, oid, committedSerial, prfactory, committedData) resolved = resolve(old, committed, newstate) file = StringIO() pickler = Pickler(file,1) pickler.inst_persistent_id = persistent_id pickler.dump(meta) pickler.dump(resolved) return self._crs_transform_record_data(file.getvalue(1)) except (ConflictError, BadClassName): pass except: # If anything else went wrong, catch it here and avoid passing an # arbitrary exception back to the client. The error here will mask # the original ConflictError. A client can recover from a # ConflictError, but not necessarily from other errors. But log # the error so that any problems can be fixed. logger.error("Unexpected error", exc_info=True) raise ConflictError(oid=oid, serials=(committedSerial, oldSerial), data=newpickle)
def readAST(filename): with open(filename, 'rb') as f: pk = Unpickler(f) opcode = pk.load() while True: try: name = pk.load() ast = pk.load() except EOFError: break return opcode, name, ast
class IndexFile(object): '''Open an index file for reading. filename - the file containing the index. Use the get and get_all method to return objects in the index. ''' def __init__(self, filename): if not os.path.exists(filename): raise IndexFileMissingError(filename) self.filename = filename self.handle = open(self.filename) self.unpickler = Unpickler(self.handle) magic = self.handle.read(8) expected_magic = 'pdko\x00\x00\x00\x01' if magic != expected_magic: message = 'Magic bytes incorrect. Is %s really a pdko file?' \ % self.filename raise IndexFormatError, message table_offset = read_offset(self.handle) self.handle.seek(table_offset) self.key_dict = self.unpickler.load() def iter_addresses(self, key): '''Get a list of pickle addresses for the given key.''' try: list_offset = self.key_dict[key] self.handle.seek(list_offset) address_list = self.unpickler.load() for addresses in address_list: yield addresses except KeyError: return def get(self, key, column): '''The columnth object for all object groups under they key.''' for addresses in self.iter_addresses(key): offset = addresses[column] self.handle.seek(offset) yield self.unpickler.load() def get_all(self, key): '''Get the full object group count for the key.''' for addresses in self.iter_addresses(key): objects = [] for offset in addresses: self.handle.seek(offset) objects.append(self.unpickler.load()) yield tuple(objects) def count(self, key): '''Get the object group count for the given key.''' return len(list(self.iter_addresses(key)))
def readAST(filename): with open(filename, 'rb') as f: pk = Unpickler(f) opcode = pk.load() asts = {} while True: try: name = pk.load() ast = pk.load() asts[(name, opcode)] = ast except EOFError: break return asts
def readAST(filename): with open(filename, 'rb') as f: pk = Unpickler(f) opcode = pk.load() print 'opcode: %02x' % opcode while True: try: name = pk.load() ast = pk.load() print '%s\n%s\n' % (name, str(ast)) except EOFError: break return opcode, ast
def read_level(self, path): f = open(path, "rb") try: check_file_magic(f, self.app_magic, "level") check_file_magic(f, self.level_file_magic, "level") p = Unpickler(f) file_version = p.load() app_version = p.load() check_file_version(file_version, app_version, self.level_min_version, self.level_file_version, self.level_version_ranges) level = self.unpickle_level(p) finally: f.close() return level
def load_game(self, path): """Restores a saved game from the specified path.""" f = open(path, "rb") check_file_magic(f, self.app_magic, "saved game") check_file_magic(f, self.save_file_magic, "saved game") p = Unpickler(f) file_version = p.load() app_version = p.load() check_file_version(file_version, app_version, self.save_min_version, self.save_file_version, self.save_version_ranges) data = p.load() f.close() self.restore_save_data(data) self.set_save_path(path) self.unsaved_progress = False
def cloneByPickle(obj, ignore_list=()): """Makes a copy of a ZODB object, loading ghosts as needed. Ignores specified objects along the way, replacing them with None in the copy. """ ignore_dict = {} for o in ignore_list: ignore_dict[id(o)] = o def persistent_id(ob, ignore_dict=ignore_dict): if ignore_dict.has_key(id(ob)): return 'ignored' if getattr(ob, '_p_changed', 0) is None: ob._p_changed = 0 return None def persistent_load(ref): assert ref == 'ignored' # Return a placeholder object that will be replaced by # removeNonVersionedData(). placeholder = SimpleItem() placeholder.id = "ignored_subobject" return placeholder stream = StringIO() p = Pickler(stream, 1) p.persistent_id = persistent_id p.dump(obj) stream.seek(0) u = Unpickler(stream) u.persistent_load = persistent_load return u.load()
def loads(self, s): up = Unpickler(BytesIO(s)) up.persistent_load = self._get_object try: return up.load() except KeyError, e: raise UnpicklingError, "Could not find Node class for %s" % e
def validate_result(result, filter_plugins = True, print_files = False): '''Validate cached.filelist for the given result''' path_to_report_dir = result.get_report_dir() path_to_file = os.path.join(path_to_report_dir, 'cached.filelist') print "validating %s" % path_to_file if result.isThumbnail: print("cannot validate: thumbnail") return None # for this_type in dmtypes.FILESET_TYPES: # dmfs = result.get_filestat(this_type) # if dmfs.action_state != 'L': # print "cannot validate: not local" # return None dmfs = result.get_filestat(dmtypes.SIG) # Get the cached filelist from cached.filelist file try: with open(path_to_file, 'rb') as fhandle: pickle = Unpickler(fhandle) cached_filelist = pickle.load() except IOError as ioerror: print "%s" % ioerror return None # Get a list of files on the filesystem currently dirs = [dmfs.result.get_report_dir(), dmfs.result.experiment.expDir] current_fs_filelist = get_walk_filelist(dirs) # Ignore plugin_out directories if filter_plugins: current_fs_filelist = [filename for filename in current_fs_filelist if not '/plugin_out' in filename] # Ignore the cached.filelist file current_fs_filelist = [filename for filename in current_fs_filelist if not 'cached.filelist' in filename] # Ignore the status.txt file current_fs_filelist = [filename for filename in current_fs_filelist if not 'status.txt' in filename] # Ignore the serialized_*.json file current_fs_filelist = [filename for filename in current_fs_filelist if not 'serialized_' in filename] # See if there are differences #leftovers = list(set(cached_filelist) - set(current_fs_filelist)) #N.B. This difference here will tell us, "Of the files in the filesystem right now, how many are NOT in the cached.filelist file" #Even if the cached.filelist contains more files than are currently on the filesystem. #I am thinking this means we do not care if any action_state is not 'L'. It doesn't matter because we are looking for deficient #cached.filelist. leftovers = list(set(current_fs_filelist) - set(cached_filelist)) if print_files: for i, item in enumerate(leftovers): if not i: print "FILES MISSING FROM CACHED.FILELIST:" print item else: if len(leftovers) > 0: print "FILES MISSING FROM CACHED.FILELIST: %d" % len(leftovers) print "- %s\n" % ("Not valid" if len(leftovers) > 0 else "Valid") return None
def __getitem__(self, oid, tt=type(())): obj = self._cache.get(oid, None) if obj is not None: return obj __traceback_info__ = (oid) self.before_load() p, serial = self._storage.load(oid, self._version) __traceback_info__ = (oid, p) file=StringIO(p) unpickler=Unpickler(file) # unpickler.persistent_load=self._persistent_load try: classification = unpickler.load() except: raise ("Could not load oid %s. Pickled data in traceback info may " "contain clues." % (oid)) osio = self._get_osio() obj = osio.new_instance(oid, classification) assert obj is not None obj._p_oid=oid obj._p_jar=self obj._p_changed=None self._set_serial(obj, serial) self._cache[oid] = obj if oid == osio.conf.oid_gen.root_oid: self._root_=obj # keep a ref return obj
def got_data(self, port_agent_packet): """ Called by the instrument connection when data is available. Also add data to the chunker and when received call got_chunk to publish results. """ data_length = port_agent_packet.get_data_length() data = port_agent_packet.get_data() timestamp = port_agent_packet.get_timestamp() log.debug("Got Data: %s" % data) log.debug("Add Port Agent Timestamp: %s" % timestamp) unpickler = Unpickler(StringIO(data)) # Disable class unpickling, for security; record should be all # built-in types. Note this only works with cPickle. unpickler.find_global = None # pkt is an antelope.Pkt.Packet object converted to a dict. Refer to # the documentation for the Antelope Python bindings for compelete # details. pkt = unpickler.load() for particle in self._particle_factory(pkt, timestamp): self._publish_particle(particle)
def __getitem__(self, oid, tt=type(())): obj = self._cache.get(oid, None) if obj is not None: return obj __traceback_info__ = (oid) self.before_load() p, serial = self._storage.load(oid, self._version) __traceback_info__ = (oid, p) file = StringIO(p) unpickler = Unpickler(file) # unpickler.persistent_load=self._persistent_load try: classification = unpickler.load() except: raise ("Could not load oid %s. Pickled data in traceback info may " "contain clues." % (oid)) osio = self._get_osio() obj = osio.new_instance(oid, classification) assert obj is not None obj._p_oid = oid obj._p_jar = self obj._p_changed = None self._set_serial(obj, serial) self._cache[oid] = obj if oid == osio.conf.oid_gen.root_oid: self._root_ = obj # keep a ref return obj
def validateSource(self): skip_to_pos = 0 if os.path.exists(self.pickle): upfh = open(self.pickle, "rb") unpickler = Unpickler(upfh) old_opt, old_pos = unpickler.load() if self.opt == old_opt: skip_to_pos = old_pos for i in range(0, skip_to_pos, 1): sys.stdout.write(".") pos = -1 files = self.files['humans'].keys() files.sort() cp = ConfigParser() cp.read(os.path.join(path("config"), "test.cnf")) validator_path = cp.get("validation", "validator") csl_schema_path = cp.get("validation", "schema") cslm_schema_path = cp.get("validation", "schema-m") for filename in files: pos += 1 if pos < skip_to_pos: continue p = self.files['humans'][filename] test = CslTest(opt, p, filename, pos=pos) test.parse() test.validate(validator_path, csl_schema_path, cslm_schema_path) if os.path.exists(self.pickle): os.unlink(self.pickle)
def _cloneByPickle(self, obj): """Returns a deep copy of a ZODB object, loading ghosts as needed. """ modifier = getToolByName(self, 'portal_modifier') callbacks = modifier.getOnCloneModifiers(obj) if callbacks is not None: pers_id, pers_load, inside_orefs, outside_orefs = callbacks[0:4] else: inside_orefs, outside_orefs = (), () stream = StringIO() p = Pickler(stream, 1) if callbacks is not None: p.persistent_id = pers_id cmf_uid = getattr(obj, 'cmf_uid', None) if IUniqueIdAnnotation.providedBy(cmf_uid): setattr(obj, 'cmf_uid', cmf_uid()) try: p.dump(aq_base(obj)) except TypeError: # just try again, this then seems to work # WTF? p.dump(aq_base(obj)) approxSize = stream.tell() stream.seek(0) u = Unpickler(stream) if callbacks is not None: u.persistent_load = pers_load return approxSize, u.load(), inside_orefs, outside_orefs
def setklassstate(self, object): try: oid=object._p_oid __traceback_info__=oid p, serial = self._storage.load(oid, self._version) file=StringIO(p) unpickler=Unpickler(file) unpickler.persistent_load=self._persistent_load copy = unpickler.load() klass, args = copy if klass is not ExtensionKlass: LOG('ZODB',ERROR, "Unexpected klass when setting class state on %s" % getattr(object,'__name__','(?)')) return copy=apply(klass,args) object.__dict__.clear() object.__dict__.update(copy.__dict__) object._p_oid=oid object._p_jar=self object._p_changed=0 object._p_serial=serial except: LOG('ZODB',ERROR, 'setklassstate failed', error=sys.exc_info()) raise
def _unpickle(pickled): """ Unpickles a string and catch all types of errors it can throw, to raise only NotReadableJobError in case of error. OpenERP stores the text fields as 'utf-8', so we specify the encoding. `loads()` may raises many types of exceptions (AttributeError, IndexError, TypeError, KeyError, ...). They are all catched and raised as `NotReadableJobError`). Pickle could be exploited by an attacker who would write a value in a job that would run arbitrary code when unpickled. This is why we set a custom ``find_global`` method on the ``Unpickler``, only jobs and a whitelist of classes/functions are allowed to be unpickled (plus the builtins types). """ def restricted_find_global(mod_name, fn_name): __import__(mod_name) mod = sys.modules[mod_name] fn = getattr(mod, fn_name) if not (fn in JOB_REGISTRY or fn in _UNPICKLE_WHITELIST): raise UnpicklingError( '{}.{} is not allowed in jobs'.format(mod_name, fn_name) ) return fn unpickler = Unpickler(StringIO(pickled)) unpickler.find_global = restricted_find_global try: unpickled = unpickler.load() except (StandardError, UnpicklingError): raise NotReadableJobError('Could not unpickle.', pickled) return unpickled
def replay(self, name): replay_file_path = os.path.join(PROJECTS, self.name, "replays", name) replay = None with open(replay_file_path, "r") as fp: upkl = Unpickler(fp) replay = upkl.load() return replay
def getNewState(self, file): # Would like to do load(file) here... but it doesn't work with universal line endings, see Python bug 1724366 from cStringIO import StringIO unpickler = Unpickler(StringIO(file.read())) # Magic to keep us backward compatible in the face of packages changing... unpickler.find_global = self.findGlobal return unpickler.load()
def unpickle(file_path=None): """Load pickled Python objects from a file. Almost like ``cPickle.load(open(file_path))``, but also loads object saved with older versions of Eelbrain, and allows using a system file dialog to select a file. Parameters ---------- file_path : None | str Path to a pickled file. If None, a system file dialog will be used. If the user cancels the file dialog, a RuntimeError is raised. """ if file_path is None: filetypes = [("Pickles (*.pickled)", '*.pickled'), ("All files", '*')] file_path = ui.ask_file("Select File to Unpickle", "Select a pickled " "file to unpickle", filetypes) if file_path is False: raise RuntimeError("User canceled") if not os.path.exists(file_path): new_path = os.extsep.join((file_path, 'pickled')) if os.path.exists(new_path): file_path = new_path with open(file_path, 'r') as fid: unpickler = Unpickler(fid) unpickler.find_global = map_paths obj = unpickler.load() return obj
def deepCopy(obj): stream = StringIO() p = Pickler(stream, 1) p.dump(obj) stream.seek(0) u = Unpickler(stream) return u.load()
def getNewState(cls, file): # Would like to do load(file) here... but it doesn't work with universal line endings, see Python bug 1724366 from cStringIO import StringIO unpickler = Unpickler(StringIO(file.read())) # Magic to keep us backward compatible in the face of packages changing... unpickler.find_global = cls.findGlobal return unpickler.load()
def validateSource(self): skip_to_pos = 0 if os.path.exists(self.pickle): upfh = open(self.pickle, "rb") unpickler = Unpickler(upfh) old_opt,old_pos = unpickler.load() if self.opt == old_opt: skip_to_pos = old_pos for i in range(0,skip_to_pos,1): sys.stdout.write(".") pos = -1 files = self.files['humans'].keys() files.sort() cp = ConfigParser() cp.read(os.path.join(path("config"), "test.cnf")) validator_path = cp.get("validation", "validator") csl_schema_path = cp.get("validation", "schema") cslm_schema_path = cp.get("validation", "schema-m") for filename in files: pos += 1 if pos < skip_to_pos: continue p = self.files['humans'][filename] test = CslTest(opt,p,filename,pos=pos) test.parse() test.validate(validator_path, csl_schema_path, cslm_schema_path) if os.path.exists( self.pickle ): os.unlink(self.pickle)
def unpickle(file_path=None): """Load pickled Python objects from a file. Almost like ``cPickle.load(open(file_path))``, but also loads object saved with older versions of Eelbrain, and allows using a system file dialog to select a file. Parameters ---------- file_path : None | str Path to a pickled file. If None (default), a system file dialog will be shown. If the user cancels the file dialog, a RuntimeError is raised. """ if file_path is None: filetypes = [("Pickles (*.pickled)", '*.pickled'), ("All files", '*')] file_path = ui.ask_file("Select File to Unpickle", "Select a pickled " "file to unpickle", filetypes) if file_path is False: raise RuntimeError("User canceled") else: print repr(file_path) else: file_path = os.path.expanduser(file_path) if not os.path.exists(file_path): new_path = os.extsep.join((file_path, 'pickled')) if os.path.exists(new_path): file_path = new_path with open(file_path, 'r') as fid: unpickler = Unpickler(fid) unpickler.find_global = map_paths obj = unpickler.load() return obj
def loads(self, s): up = Unpickler(BytesIO(s)) up.persistent_load = self._get_object try: return up.load() except KeyError, e: raise UnpicklingError("Could not find Node class for %s" % e)
def deserialize(self, message, task_id=None): """Deserialize an object :param message: A serialized object (string). :param deferred: When true load deferreds. When false raise an error if the message contains deferreds. """ fail = [] if task_id is None: def persistent_load(task_id): raise UnpicklingError('message contained references to ' 'external objects: %s' % task_id) else: args = self._queue.get_arguments(task_id) args = {k: loads(v) for k, v in args.items()} def persistent_load(arg_id): value = args[arg_id] if isinstance(value, TaskFailure): fail.append(value) return value data = StringIO(message) pickle = Unpickler(data) pickle.persistent_load = persistent_load obj = pickle.load() if fail and not obj.on_error_pass: # TODO detect errors earlier, fail earlier, cancel enqueued tasks self.set_result(obj, fail[0]) obj = None return obj
def pickle_load(file_name): f = open(file_name, "rb") obj = None if f != None: u = Unpickler(f) obj = u.load() f.close() return obj
def load(self, model_path): """ Load the pickled classifier model from disk :param model_path: path to the model :type model_path: str """ try: with open(model_path) as f: pickle = Unpickler(f) self.classifiers = pickle.load() with open(model_path.replace('.m', '.vec')) as f: pickle = Unpickler(f) self.vectorizer = pickle.load() except IOError: self.logger.info('Could not load model: {}'.format(model_path))
def deepcopy(obj): """Makes a deep copy of the object using the pickle mechanism. """ stream = StringIO() p = Pickler(stream, 1) p.dump(aq_base(obj)) stream.seek(0) u = Unpickler(stream) return u.load()
def server_decode(msg): """Decodes msg and returns its parts""" unpickler = Unpickler(StringIO(msg)) unpickler.find_global = server_find_global try: return unpickler.load() # msgid, flags, name, args except: log("can't decode message: %s" % short_repr(msg), level=logging.ERROR) raise
def get_action_param_var(pfilename): ''' Argument is name of file to unpickle. Return value is dictionary value. ''' from cPickle import Unpickler with open(pfilename,'rb') as fileh: pickle = Unpickler(fileh) list_of_dict_files = pickle.load() return list_of_dict_files '''
def get_action_param_file(pfilename): ''' Argument is name of file to unpickle. Return value is dictionary value. ''' from cPickle import Unpickler fileh = open(pfilename, 'rb') pickle = Unpickler(fileh) list_of_dict_files = pickle.load() fileh.close() return list_of_dict_files