def server_decode(msg): """Decodes msg and returns its parts""" unpickler = Unpickler(StringIO(msg)) unpickler.find_global = server_find_global try: return unpickler.load() # msgid, flags, name, args except: log("can't decode message: %s" % short_repr(msg), level=logging.ERROR) raise
def _zip_getitem(self, key): """ ``get-and-uncompress-item'' from dbase """ try: value = self.cache[key] except KeyError: f = StringIO(zlib.decompress(self.dict[key])) value = Unpickler(f).load() if self.writeback: self.cache[key] = value return value
def send_command(self, *args): verb_string = 'to %s %s ' % (self.role, args,) # verbose('sendcommand args: '+`args`) Pickler(self.input, 1).dump(args) self.input.flush() res = Unpickler(self.output).load() verb_string = verb_string + '= %s' % (`res`,) #print(verb_string) >> sys.stderr if type(res) == types.StringType and res[:5] == 'Error': raise TestRunException(res) return res
def _zip_getitem(self, key): """ ``get-and-uncompress-item'' from dbase """ GET_ITEM = 'SELECT value FROM %s WHERE key = ?' % self.tablename item = self.conn.select_one(GET_ITEM, (key, )) if item is None: raise KeyError(key) f = BytesIO(zlib.decompress(item[0])) value = Unpickler(f).load() return value
def retrieve(filename): ''' Retrieve a pickled object (e.g. state list or networkx graph) from file ''' filename = os.path.normcase(filename) try: f = open(filename, 'rb') u = Unpickler(f) stuff = u.load() f.close() return stuff except IOError: raise LogOpeningError("No file found for %s" % filename, filename)
def unpickle(filename): f, obj = None, None try: f = open(filename, "rb") p = Unpickler(f) x = p.load() f.close() f = None obj = x finally: if f: f.close() return obj
def getCounter(self, key): # initialise counter try: f = open(self.counter_file, 'r+') self._counters = Unpickler(f).load() except: f = open(self.counter_file, 'w+') self._counters = {} f.close() if not self._counters.has_key(key): self._counters[key] = 0 return self._counters[key]
def readAST(filename): with open(filename, 'rb') as f: pk = Unpickler(f) opcode = pk.load() while True: try: name = pk.load() ast = pk.load() except EOFError: break return opcode, name, ast
def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle, committedData=''): # class_tuple, old, committed, newstate = ('',''), 0, 0, 0 try: prfactory = PersistentReferenceFactory() file = StringIO(newpickle) unpickler = Unpickler(file) unpickler.find_global = find_global unpickler.persistent_load = prfactory.persistent_load meta = unpickler.load() if isinstance(meta, tuple): klass = meta[0] newargs = meta[1] or () if isinstance(klass, tuple): klass = find_global(*klass) else: klass = meta newargs = () if klass in _unresolvable: return None newstate = unpickler.load() inst = klass.__new__(klass, *newargs) try: resolve = inst._p_resolveConflict except AttributeError: _unresolvable[klass] = 1 return None old = state(self, oid, oldSerial, prfactory) committed = state(self, oid, committedSerial, prfactory, committedData) resolved = resolve(old, committed, newstate) file = StringIO() pickler = Pickler(file,1) pickler.persistent_id = persistent_id pickler.dump(meta) pickler.dump(resolved) return file.getvalue(1) except (ConflictError, BadClassName): return None except: # If anything else went wrong, catch it here and avoid passing an # arbitrary exception back to the client. The error here will mask # the original ConflictError. A client can recover from a # ConflictError, but not necessarily from other errors. But log # the error so that any problems can be fixed. logger.error("Unexpected error", exc_info=True) return None
def _cpickle_loads(pkl_str): """A replacement for cPickle.loads that overrides the find_global attribute (Or find_class in Python3) """ pk = Unpickler(BytesIO(pkl_str)) if hasattr(pk, 'find_class'): # This is the method name in Python3 pk.find_class = _find_global else: # hasattr doe not really work on the cPickle objects in Python2, so we # just assume we're in python2 if hasattr returned false pk.find_global = _find_global return pk.load()
def unpickle_values(self): pi = Unpickler( open( self.idata ) ) self.centerpos = pi.load() self.maxhalfwidth = pi.load() self.leftmost = pi.load() self.rightmost = pi.load() self.conv_left = pi.load() self.conv_right = pi.load() self.middleypos = pi.load() self.radius = pi.load() self.miny = pi.load() self.maxy = pi.load() self.extend = pi.load()
def readAST(filename): with open(filename, 'rb') as f: pk = Unpickler(f) opcode = pk.load() asts = {} while True: try: name = pk.load() ast = pk.load() asts[(name, opcode)] = ast except EOFError: break return asts
def __getitem__(self, key): try: value = self._cache[key] except KeyError: if key not in self: raise KeyError(key) f = StringIO(self._storage.redis.hget(self._hash_key, key)) value = Unpickler(f).load() self._cache[key] = value return value
def readAST(filename): with open(filename, 'rb') as f: pk = Unpickler(f) opcode = pk.load() print 'opcode: %02x' % opcode while True: try: name = pk.load() ast = pk.load() print '%s\n%s\n' % (name, str(ast)) except EOFError: break return opcode, ast
def read_level(self, path): f = open(path, "rb") try: check_file_magic(f, self.app_magic, "level") check_file_magic(f, self.level_file_magic, "level") p = Unpickler(f) file_version = p.load() app_version = p.load() check_file_version(file_version, app_version, self.level_min_version, self.level_file_version, self.level_version_ranges) level = self.unpickle_level(p) finally: f.close() return level
def __init__(self, filename): if not os.path.exists(filename): raise IndexFileMissingError(filename) self.filename = filename self.handle = open(self.filename) self.unpickler = Unpickler(self.handle) magic = self.handle.read(8) expected_magic = 'pdko\x00\x00\x00\x01' if magic != expected_magic: message = 'Magic bytes incorrect. Is %s really a pdko file?' \ % self.filename raise IndexFormatError, message table_offset = read_offset(self.handle) self.handle.seek(table_offset) self.key_dict = self.unpickler.load()
def cloneByPickle(obj): """Makes a copy of a ZODB object, loading ghosts as needed. """ def persistent_id(o): if getattr(o, '_p_changed', 0) is None: o._p_changed = 0 return None stream = StringIO() p = Pickler(stream, 1) p.persistent_id = persistent_id p.dump(obj) stream.seek(0) u = Unpickler(stream) return u.load()
def test_collector_implicit_config_pickling(self, testdir): from cPickle import Pickler, Unpickler tmpdir = testdir.tmpdir testdir.chdir() testdir.makepyfile(hello="def test_x(): pass") config = testdir.parseconfig(tmpdir) col = config.getfsnode(config.topdir) io = py.std.cStringIO.StringIO() pickler = Pickler(io) pickler.dump(col) io.seek(0) unpickler = Unpickler(io) col2 = unpickler.load() assert col2.name == col.name assert col2.listnames() == col.listnames()
def load_game(self, path): """Restores a saved game from the specified path.""" f = open(path, "rb") check_file_magic(f, self.app_magic, "saved game") check_file_magic(f, self.save_file_magic, "saved game") p = Unpickler(f) file_version = p.load() app_version = p.load() check_file_version(file_version, app_version, self.save_min_version, self.save_file_version, self.save_version_ranges) data = p.load() f.close() self.restore_save_data(data) self.set_save_path(path) self.unsaved_progress = False
def loadBreakpoints(self, fn): try: if os.path.exists(fn): f = open(fn, 'rb') u = Unpickler(f) newlines = u.load() # The following line isn't quite correct # when multiple breakpoints are set on a # single line. self.lines.update(newlines) return 1 else: return 0 except: self.lines = {} return 0
def unpickle_old_pyfits(fn): """ This function unpickles everything in the specified filename, and correctly adapts pyfits files pre 2.3 to the new module structure. """ try: import cPickle as pickle from cPickle import Unpickler except: import pickle from pickle import Unpickler import imp, sys def fg(modname, classname): if 'NP_pyfits' in modname: modname = modname.replace('NP_pyfits', 'core') if '.' in modname: mod = __import__(modname, fromlist=[0]) else: mod = __import__(modname) return getattr(mod, classname) objs = [] if isinstance(fn, str): f = open(fn) else: f = fn try: u = Unpickler(f) u.find_global = fg while True: objs.append(u.load()) except EOFError: pass finally: if isinstance(fn, str): f.close() if len(objs) == 1: return objs[0] else: return objs
def get_blocked_artists_pickle(self): dump = os.path.join(self.player_get_userdir(), "autoqueue_block_cache") try: pickle = open(dump, 'r') try: unpickler = Unpickler(pickle) artists, times = unpickler.load() if isinstance(artists, list): artists = deque(artists) if isinstance(times, list): times = deque(times) self._blocked_artists = artists self._blocked_artists_times = times finally: pickle.close() except IOError: pass
def copyOf(source): """Copies a ZODB object, loading subobjects as needed. Re-ghostifies objects along the way to save memory. """ former_ghosts = [] zclass_refs = {} def persistent_id(ob, former_ghosts=former_ghosts, zclass_refs=zclass_refs): if getattr(ob, '_p_changed', 0) is None: # Load temporarily. former_ghosts.append(ob) ob._p_changed = 0 if hasattr(ob, '__bases__'): m = getattr(ob, '__module__', None) if (m is not None and isinstance(m, StringType) and m.startswith('*')): n = getattr(ob, '__name__', None) if n is not None: # Pickling a ZClass instance. Store the reference to # the ZClass class separately, so that the pickler # and unpickler don't trip over the apparently # missing module. ref = (m, n) zclass_refs[ref] = ob return ref return None def persistent_load(ref, zclass_refs=zclass_refs): return zclass_refs[ref] stream = StringIO() p = Pickler(stream, 1) p.persistent_id = persistent_id p.dump(source) if former_ghosts: for g in former_ghosts: del g._p_changed del former_ghosts[:] stream.seek(0) u = Unpickler(stream) u.persistent_load = persistent_load return u.load()
def load_pickle(self): if not self._check_pickle_path_exists(): logging.error("Pickle does not exist at {}".format(self.pickle_path)) sys.exit(1) logging.info("Unpacking pickle at {0}".format(self.pickle_path)) pickle_fp = self._get_pickle_file_pointer('rb') unpickler = Unpickler(pickle_fp) try: package = unpickler.load() except (UnpicklingError, AttributeError, EOFError, ImportError, IndexError) as err: err_msg = "Unpickling failed. Error message: {}".format(repr(err)) logging.critical(err_msg) print err_msg sys.exit(1) finally: pickle_fp.close() logging.info("Pickle at {0} unpacked.".format(self.pickle_path)) return(package)
def validateSource(self): skip_to_pos = 0 if os.path.exists(self.pickle): upfh = open(self.pickle, 'rb') unpickler = Unpickler(upfh) old_opt, old_pos = unpickler.load() if self.opt == old_opt: skip_to_pos = old_pos pos = -1 for filename in sorted(self.files['humans']): pos += 1 if pos < skip_to_pos: continue p = self.files['humans'][filename] test = CslTest(opt, self.cp, p, filename, pos=pos) test.parse() test.validate() if os.path.exists(self.pickle): os.unlink(self.pickle)
def test_210_index(self): new_mtimes = self.get_profile_mtimes() buffer = StringIO(self.readscratch("stashed_mtimes")) unpickler = Unpickler(buffer) stashed_mtimes = unpickler.load() command = [ "search_host", "--domain=unittest", "--service=utsvc", "--instance=utsi1" ] hosts = self.commandtest(command).splitlines() for host in hosts: self.assertTrue(host in stashed_mtimes, "host %s missing from old profiles-info" % host) self.assertTrue(host in new_mtimes, "host %s missing from new profiles-info" % host) self.assertTrue( new_mtimes[host] > stashed_mtimes[host], "host %s mtime %s not greater than original %s" % (host, new_mtimes[host], stashed_mtimes[host]))
def _cloneByPickle(self, obj): """Returns a deep copy of a ZODB object, loading ghosts as needed. """ modifier = getToolByName(self, 'portal_modifier') callbacks = modifier.getOnCloneModifiers(obj) if callbacks is not None: pers_id, pers_load, inside_orefs, outside_orefs = callbacks[0:4] else: inside_orefs, outside_orefs = (), () stream = StringIO() p = Pickler(stream, 1) if callbacks is not None: p.persistent_id = pers_id p.dump(aq_base(obj)) approxSize = stream.tell() stream.seek(0) u = Unpickler(stream) if callbacks is not None: u.persistent_load = pers_load return approxSize, u.load(), inside_orefs, outside_orefs
def testRepickler(self): r2 = Obj("foo") r2.__setstate__("bar") r2 = Reduce(r2) r3 = Reduce(1, 2) r3.__setstate__(NewObj()) r4 = Reduce() r4.args = r2.args r4.__setstate__("bar") r4.extend("!!!") r5 = Reduce() r5.append("!!!") r5["foo"] = "bar" state = {r2: r3, r4: r5} p = StringIO() Pickler(p, 1).dump(Obj).dump(state) p = p.getvalue() r = DummyRepickler()(p) load = Unpickler(StringIO(r)).load self.assertIs(Obj, load()) self.assertDictEqual(state, load())
def dispatch_command(self): cmd = Unpickler(self.input).load() self.log("command get: " + cmd[0]) if not hasattr(self, cmd[0]) or type(getattr( self, cmd[0])) != types.MethodType: Pickler(self.output, 1).dump(None) self.output.flush() f = getattr(self, cmd[0]) self.log("params: " + ` cmd[1:] `) start = time.time() now = start try: self.__check_children() res = apply(f, cmd[1:]) except Exception, e: res = 'Error: %s' % e traceback.print_exc(traceback, sys.stderr) self.log("Exception " + res)
def _pickled_getitem_(self, key): """ Get object (unpickle if needed) from dbase >>> obj = db['A/B/C'] """ ## try: value = self.cache[key] except KeyError: value = self.dict[key] ## blob ? from ostap.core.core import Ostap if isinstance(value, Ostap.BLOB): ## unpack it! z = Ostap.blob_to_bytes(value) u = zlib.decompress(z) ## unpickle it! f = BytesIO(u) value = Unpickler(f).load() del z, u, f if self.writeback: self.cache[key] = value return value