# Test copy() u2a = u2.copy() assert u2a == u2 class MyUserDict(UserDict): def display(self): print self m2 = MyUserDict(u2) m2a = m2.copy() assert m2a == m2 # Test keys, items, values assert u2.keys() == d2.keys() assert u2.items() == d2.items() assert u2.values() == d2.values() # Test has_key for i in u2.keys(): assert u2.has_key(i) == 1 assert u1.has_key(i) == d1.has_key(i) assert u0.has_key(i) == d0.has_key(i) # Test update t = UserDict() t.update(u2) assert t == u2
def items(self): self.__populate() return UserDict.items( self )
frqc = 0 if os.path.isfile("dumps/dumps.cache"): print "Loading hashlist... (cached)" (frqc, tables) = cPickle.load(open("dumps/dumps.cache", "rb")) else: print "Loading hashes and purging sole occurences" for file in sorted(glob.glob("dumps/*.dump")): tp = cPickle.load(open(file, "rb")) table = UserDict(tp) table.filename = os.path.basename(file).replace(".dump","") table.idx = fi fi += 1 table.memory = (768,1024)[random.randint(0,1)] tables.append(table) for k,v in table.items(): freq[k] = freq.get(k, 0) + v print("Length of table %s: %d" % (file, len(table))) print("Length of freqtable: %s" % len(freq)) for k,v in freq.items(): if v == 1 or k == nulhash: del freq[k] for table in tables: if k in table: del table[k] else: frqc+=v print("Length of freqtable after purging of non-shareable pages: %s" % len(freq))
# Test copy() u2a = u2.copy() verify(u2a == u2) class MyUserDict(UserDict): def display(self): print self m2 = MyUserDict(u2) m2a = m2.copy() verify(m2a == m2) # Test keys, items, values verify(u2.keys() == d2.keys()) verify(u2.items() == d2.items()) verify(u2.values() == d2.values()) # Test has_key for i in u2.keys(): verify(u2.has_key(i) == 1) verify(u1.has_key(i) == d1.has_key(i)) verify(u0.has_key(i) == d0.has_key(i)) # Test update t = UserDict() t.update(u2) verify(t == u2)