def main(path=None): verbose = 0 if path is None: import sys import getopt opts, args = getopt.getopt(sys.argv[1:], "v") for k, v in opts: if k == "-v": verbose += 1 path, = args fs = FileStorage(path, read_only=1) # Set of oids in the index that failed to load due to POSKeyError. # This is what happens if undo is applied to the transaction creating # the object (the oid is still in the index, but its current data # record has a backpointer of 0, and POSKeyError is raised then # because of that backpointer). undone = {} # Set of oids that were present in the index but failed to load. # This does not include oids in undone. noload = {} for oid in fs._index.keys(): try: data, serial = fs.load(oid, "") except (KeyboardInterrupt, SystemExit): raise except POSKeyError: undone[oid] = 1 except: if verbose: traceback.print_exc() noload[oid] = 1 inactive = noload.copy() inactive.update(undone) for oid in fs._index.keys(): if oid in inactive: continue data, serial = fs.load(oid, "") refs = get_refs(data) missing = [] # contains 3-tuples of oid, klass-metadata, reason for ref, klass in refs: if klass is None: klass = '<unknown>' if ref not in fs._index: missing.append((ref, klass, "missing")) if ref in noload: missing.append((ref, klass, "failed to load")) if ref in undone: missing.append((ref, klass, "object creation was undone")) if missing: report(oid, data, serial, missing)
def main(path=None): if path is None: import sys import getopt opts, args = getopt.getopt(sys.argv[1:], "v") for k, v in opts: if k == "-v": VERBOSE += 1 path, = args fs = FileStorage(path, read_only=1) # Set of oids in the index that failed to load due to POSKeyError. # This is what happens if undo is applied to the transaction creating # the object (the oid is still in the index, but its current data # record has a backpointer of 0, and POSKeyError is raised then # because of that backpointer). undone = {} # Set of oids that were present in the index but failed to load. # This does not include oids in undone. noload = {} for oid in fs._index.keys(): try: data, serial = fs.load(oid, "") except (KeyboardInterrupt, SystemExit): raise except POSKeyError: undone[oid] = 1 except: if VERBOSE: traceback.print_exc() noload[oid] = 1 inactive = noload.copy() inactive.update(undone) for oid in fs._index.keys(): if oid in inactive: continue data, serial = fs.load(oid, "") refs = get_refs(data) missing = [] # contains 3-tuples of oid, klass-metadata, reason for ref, klass in refs: if klass is None: klass = '<unknown>' if ref not in fs._index: missing.append((ref, klass, "missing")) if ref in noload: missing.append((ref, klass, "failed to load")) if ref in undone: missing.append((ref, klass, "object creation was undone")) if missing: report(oid, data, serial, missing)
def main(path, search_oids): fs = FileStorage(path, read_only=1) # Set of oids in the index that failed to load due to POSKeyError. # This is what happens if undo is applied to the transaction creating # the object (the oid is still in the index, but its current data # record has a backpointer of 0, and POSKeyError is raised then # because of that backpointer). undone = {} # Set of oids that were present in the index but failed to load. # This does not include oids in undone. noload = {} for oid in fs._index.keys(): try: data, serial = fs.load(oid, "") except (KeyboardInterrupt, SystemExit): raise except POSKeyError: undone[oid] = 1 except: if VERBOSE: traceback.print_exc() noload[oid] = 1 inactive = noload.copy() inactive.update(undone) for oid in fs._index.keys(): if oid in inactive: continue data, serial = fs.load(oid, "") refs = get_refs(data) missing = [] # contains 3-tuples of oid, klass-metadata, reason for info in refs: try: ref, klass = info except (ValueError, TypeError): # failed to unpack ref = info klass = '<unknown>' if ref in search_oids: report(oid, data, serial, [(ref, klass, "searching for")]) if ref not in fs._index: missing.append((ref, klass, "missing")) if ref in noload: missing.append((ref, klass, "failed to load")) if ref in undone: missing.append((ref, klass, "object creation was undone")) if missing: report(oid, data, serial, missing)
def test_pack_with_1_day(self): from ZODB.DB import DB from ZODB.FileStorage import FileStorage import time import transaction from relstorage.zodbpack import main storage = FileStorage(self.db_fn, create=True) db = DB(storage) conn = db.open() conn.root()['x'] = 1 transaction.commit() oid = b'\0' * 8 state, serial = storage.load(oid, '') time.sleep(0.1) conn.root()['x'] = 2 transaction.commit() conn.close() self.assertEqual(state, storage.loadSerial(oid, serial)) db.close() storage = None main(['', '--days=1', self.cfg_fn]) # packing should not have removed the old state. storage = FileStorage(self.db_fn) self.assertEqual(state, storage.loadSerial(oid, serial)) storage.close()
def test_pack_defaults(self): from ZODB.DB import DB from ZODB.FileStorage import FileStorage from ZODB.POSException import POSKeyError import time import transaction from relstorage.zodbpack import main storage = FileStorage(self.db_fn, create=True) db = DB(storage) conn = db.open() conn.root()['x'] = 1 transaction.commit() oid = b'\0' * 8 state, serial = storage.load(oid, '') time.sleep(0.1) conn.root()['x'] = 2 transaction.commit() conn.close() self.assertEqual(state, storage.loadSerial(oid, serial)) db.close() storage = None main(['', self.cfg_fn]) # packing should have removed the old state. storage = FileStorage(self.db_fn) self.assertRaises(POSKeyError, storage.loadSerial, oid, serial) storage.close()
def test_pack_with_1_day(self): from ZODB.DB import DB from ZODB.FileStorage import FileStorage from ZODB.POSException import POSKeyError import time import transaction from relstorage.zodbpack import main storage = FileStorage(self.db_fn, create=True) db = DB(storage) conn = db.open() conn.root()['x'] = 1 transaction.commit() oid = b('\0' * 8) state, serial = storage.load(oid, b('')) time.sleep(0.1) conn.root()['x'] = 2 transaction.commit() conn.close() self.assertEqual(state, storage.loadSerial(oid, serial)) db.close() storage = None main(['', '--days=1', self.cfg_fn]) # packing should not have removed the old state. storage = FileStorage(self.db_fn) self.assertEqual(state, storage.loadSerial(oid, serial)) storage.close()
def generate_durus_object_records(): sio = cStringIO.StringIO() zodb_storage = ZODBFileStorage(zodb_file_name) n = 0 for oid in zodb_storage._index.keys(): n += 1 if n % 10000 == 0: sys.stdout.write('.') sys.stdout.flush() p, serial = zodb_storage.load(oid, '') refs = referencesf(p) # unwrap extra tuple from class meta data sio.seek(0) sio.write(p) sio.truncate() sio.seek(0) def get_class(module_class): module, klass = module_class if module not in sys.modules: __import__(module) return getattr(sys.modules[module], klass) class PersistentRef: def __init__(self, v): oid, module_class = v self.oid_klass = (oid, get_class(module_class)) unpickler = cPickle.Unpickler(sio) unpickler.persistent_load = lambda v: PersistentRef(v) class_meta = unpickler.load() class_meta, extra = class_meta assert extra is None object_state = unpickler.load() if type(object_state) == dict and '_container' in object_state: assert 'data' not in object_state object_state['data'] = object_state['_container'] del object_state['_container'] sio.seek(0) sio.truncate() cPickle.dump(get_class(class_meta), sio, 2) pickler = cPickle.Pickler(sio, 2) def persistent_id(v): if isinstance(v, PersistentRef): return v.oid_klass return None pickler.persistent_id = persistent_id pickler.dump(object_state) record = pack_record(oid, sio.getvalue(), ''.join(refs)) yield record print print n, 'objects written'
def main(path): fs = FileStorage(path, read_only=1) if PACK: fs.pack() db = ZODB.DB(fs) rt = db.open().root() paths = find_paths(rt, 3) def total_size(oid): cache = {} cache_size = 1000 def _total_size(oid, seen): v = cache.get(oid) if v is not None: return v data, serialno = fs.load(oid, '') size = len(data) for suboid in referencesf(data): if seen.has_key(suboid): continue seen[suboid] = 1 size += _total_size(suboid, seen) cache[oid] = size if len(cache) == cache_size: cache.popitem() return size return _total_size(oid, {}) keys = fs._index.keys() keys.sort() keys.reverse() if not VERBOSE: # If not running verbosely, don't print an entry for an object # unless it has an entry in paths. keys = filter(paths.has_key, keys) fmt = "%8s %5d %8d %s %s.%s" for oid in keys: data, serialno = fs.load(oid, '') mod, klass = get_pickle_metadata(data) refs = referencesf(data) path = paths.get(oid, '-') print fmt % (U64(oid), len(data), total_size(oid), path, mod, klass)
def main(path): fs = FileStorage(path, read_only=1) if PACK: fs.pack() db = ZODB.DB(fs) rt = db.open().root() paths = find_paths(rt, 3) def total_size(oid): cache = {} cache_size = 1000 def _total_size(oid, seen): v = cache.get(oid) if v is not None: return v data, serialno = fs.load(oid, "") size = len(data) for suboid in referencesf(data): if seen.has_key(suboid): continue seen[suboid] = 1 size += _total_size(suboid, seen) cache[oid] = size if len(cache) == cache_size: cache.popitem() return size return _total_size(oid, {}) keys = fs._index.keys() keys.sort() keys.reverse() if not VERBOSE: # If not running verbosely, don't print an entry for an object # unless it has an entry in paths. keys = filter(paths.has_key, keys) fmt = "%8s %5d %8d %s %s.%s" for oid in keys: data, serialno = fs.load(oid, "") mod, klass = get_pickle_metadata(data) refs = referencesf(data) path = paths.get(oid, "-") print fmt % (U64(oid), len(data), total_size(oid), path, mod, klass)
def run(path, v=0): fs = FileStorage(path, read_only=1) # break into the file implementation if hasattr(fs._index, 'iterkeys'): iter = six.iterkeys(fs._index) else: iter = fs._index.keys() totals = {} for oid in iter: data, serialno = fs.load(oid, '') mod, klass = get_pickle_metadata(data) key = "%s.%s" % (mod, klass) bytes, count = totals.get(key, (0, 0)) bytes += len(data) count += 1 totals[key] = bytes, count if v: print("%8s %5d %s" % (U64(oid), len(data), key)) L = totals.items() L.sort(lambda a, b: cmp(a[1], b[1])) L.reverse() print("Totals per object class:") for key, (bytes, count) in L: print("%8d %8d %s" % (count, bytes, key))
def run(path, v=0): fs = FileStorage(path, read_only=1) # break into the file implementation if hasattr(fs._index, 'iterkeys'): iter = fs._index.iterkeys() else: iter = fs._index.keys() totals = {} for oid in iter: data, serialno = fs.load(oid, '') mod, klass = get_pickle_metadata(data) key = "%s.%s" % (mod, klass) bytes, count = totals.get(key, (0, 0)) bytes += len(data) count += 1 totals[key] = bytes, count if v: print "%8s %5d %s" % (U64(oid), len(data), key) L = totals.items() L.sort(lambda a, b: cmp(a[1], b[1])) L.reverse() print "Totals per object class:" for key, (bytes, count) in L: print "%8d %8d %s" % (count, bytes, key)