def _clean_all(self): """ Cleans all disks and machines """ machine = TestMachine() keys = DataList.get_pks(machine._namespace, machine._name) for guid in keys: try: machine = TestMachine(guid) for disk in machine.disks: disk.delete() machine.delete() except (ObjectNotFoundException, ValueError): DataList.delete_pk(machine._namespace, machine._name, guid) disk = TestDisk() keys = DataList.get_pks(disk._namespace, disk._name) for guid in keys: try: disk = TestDisk(guid) disk.delete() except (ObjectNotFoundException, ValueError): DataList.delete_pk(disk._namespace, disk._name, guid)
def test_pk_stretching(self): """ Validates whether the primary key lists scale correctly. * X entries will be added (e.g. 10000) * X/2 random entries will be deleted (5000) * X/2 entries will be added again (5000) * X entries will be removed (10000) No entries should be remaining """ print '' print 'starting test' amount_of_objects = 10000 # Must be an even number! machine = TestMachine() runtimes = [] # First fill start = time.time() keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual( len(list(keys)), 0, 'There should be no primary keys yet ({0})'.format( len(list(keys)))) guids = [] mstart = time.time() for i in xrange(0, amount_of_objects): guid = str(uuid.uuid4()) guids.append(guid) DataList.add_pk(machine._namespace, machine._name, guid) keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual( len(list(keys)), len(guids), 'There should be {0} primary keys instead of {1}'.format( len(guids), len(list(keys)))) if i % 100 == 99: avgitemspersec = (i + 1) / (time.time() - start) itemspersec = 100 / (time.time() - mstart) runtimes.append(itemspersec) self._print_progress( '* adding object {0}/{1} (run: {2} ops, avg: {3} ops)'. format(i + 1, int(amount_of_objects), round(itemspersec, 2), round(avgitemspersec, 2))) mstart = time.time() print '' # First delete amount_of_objects /= 2 shuffle(guids) # Make the test a bit more realistic guids_copy = guids[:] dstart = time.time() mstart = time.time() for i in xrange(0, amount_of_objects): guid = guids_copy[i] guids.remove(guid) DataList.delete_pk(machine._namespace, machine._name, guid) keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual( len(list(keys)), len(guids), 'There should be {0} primary keys instead of {1}'.format( len(guids), len(list(keys)))) if i % 100 == 99: avgitemspersec = (i + 1) / (time.time() - dstart) itemspersec = 100 / (time.time() - mstart) runtimes.append(itemspersec) self._print_progress( '* delete object {0}/{1} (run: {2} ops, avg: {3} ops)'. format(i + 1, int(amount_of_objects), round(itemspersec, 2), round(avgitemspersec, 2))) mstart = time.time() keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual( len(list(keys)), amount_of_objects, 'There should be {0} primary keys ({1})'.format( amount_of_objects, len(list(keys)))) print '' # Second round sstart = time.time() mstart = time.time() for i in xrange(0, amount_of_objects): guid = str(uuid.uuid4()) guids.append(guid) DataList.add_pk(machine._namespace, machine._name, guid) keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual( len(list(keys)), len(guids), 'There should be {0} primary keys instead of {1}'.format( len(guids), len(list(keys)))) if i % 100 == 99: avgitemspersec = (i + 1) / (time.time() - sstart) itemspersec = 100 / (time.time() - mstart) runtimes.append(itemspersec) self._print_progress( '* adding object {0}/{1} (run: {2} ops, avg: {3} ops)'. format(i + 1, int(amount_of_objects), round(itemspersec, 2), round(avgitemspersec, 2))) mstart = time.time() print '' # Second delete amount_of_objects *= 2 shuffle(guids) # Make the test a bit more realistic guids_copy = guids[:] dstart = time.time() mstart = time.time() for i in xrange(0, amount_of_objects): guid = guids_copy[i] guids.remove(guid) DataList.delete_pk(machine._namespace, machine._name, guid) keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual( len(list(keys)), len(guids), 'There should be {0} primary keys instead of {1}'.format( len(guids), len(list(keys)))) if i % 100 == 99: avgitemspersec = (i + 1) / (time.time() - dstart) itemspersec = 100 / (time.time() - mstart) runtimes.append(itemspersec) self._print_progress( '* delete object {0}/{1} (run: {2} ops, avg: {3} ops)'. format(i + 1, int(amount_of_objects), round(itemspersec, 2), round(avgitemspersec, 2))) mstart = time.time() keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual( len(guids), 0, 'All guids should be removed. {0} left'.format(len(guids))) self.assertEqual( len(list(keys)), 0, 'There should be no primary keys ({0})'.format(len(list(keys)))) seconds_passed = (time.time() - start) runtimes.sort() print '\ncompleted in {0} seconds (avg: {1} ops, min: {2} ops, max: {3} ops)'.format( round(seconds_passed, 2), round((amount_of_objects * 3) / seconds_passed, 2), round(runtimes[1], 2), round(runtimes[-2], 2))
def test_pk_stretching(self): """ Validates whether the primary key lists scale correctly. * X entries will be added (e.g. 10000) * X/2 random entries will be deleted (5000) * X/2 entries will be added again (5000) * X entries will be removed (10000) No entries should be remaining """ print '' print 'starting test' amount_of_objects = 10000 # Must be an even number! machine = TestMachine() runtimes = [] # First fill start = time.time() keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual(len(list(keys)), 0, 'There should be no primary keys yet ({0})'.format(len(list(keys)))) guids = [] mstart = time.time() for i in xrange(0, amount_of_objects): guid = str(uuid.uuid4()) guids.append(guid) DataList.add_pk(machine._namespace, machine._name, guid) keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual(len(list(keys)), len(guids), 'There should be {0} primary keys instead of {1}'.format(len(guids), len(list(keys)))) if i % 100 == 99: avgitemspersec = (i + 1) / (time.time() - start) itemspersec = 100 / (time.time() - mstart) runtimes.append(itemspersec) self._print_progress('* adding object {0}/{1} (run: {2} ops, avg: {3} ops)'.format(i + 1, int(amount_of_objects), round(itemspersec, 2), round(avgitemspersec, 2))) mstart = time.time() print '' # First delete amount_of_objects /= 2 shuffle(guids) # Make the test a bit more realistic guids_copy = guids[:] dstart = time.time() mstart = time.time() for i in xrange(0, amount_of_objects): guid = guids_copy[i] guids.remove(guid) DataList.delete_pk(machine._namespace, machine._name, guid) keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual(len(list(keys)), len(guids), 'There should be {0} primary keys instead of {1}'.format(len(guids), len(list(keys)))) if i % 100 == 99: avgitemspersec = (i + 1) / (time.time() - dstart) itemspersec = 100 / (time.time() - mstart) runtimes.append(itemspersec) self._print_progress('* delete object {0}/{1} (run: {2} ops, avg: {3} ops)'.format(i + 1, int(amount_of_objects), round(itemspersec, 2), round(avgitemspersec, 2))) mstart = time.time() keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual(len(list(keys)), amount_of_objects, 'There should be {0} primary keys ({1})'.format(amount_of_objects, len(list(keys)))) print '' # Second round sstart = time.time() mstart = time.time() for i in xrange(0, amount_of_objects): guid = str(uuid.uuid4()) guids.append(guid) DataList.add_pk(machine._namespace, machine._name, guid) keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual(len(list(keys)), len(guids), 'There should be {0} primary keys instead of {1}'.format(len(guids), len(list(keys)))) if i % 100 == 99: avgitemspersec = (i + 1) / (time.time() - sstart) itemspersec = 100 / (time.time() - mstart) runtimes.append(itemspersec) self._print_progress('* adding object {0}/{1} (run: {2} ops, avg: {3} ops)'.format(i + 1, int(amount_of_objects), round(itemspersec, 2), round(avgitemspersec, 2))) mstart = time.time() print '' # Second delete amount_of_objects *= 2 shuffle(guids) # Make the test a bit more realistic guids_copy = guids[:] dstart = time.time() mstart = time.time() for i in xrange(0, amount_of_objects): guid = guids_copy[i] guids.remove(guid) DataList.delete_pk(machine._namespace, machine._name, guid) keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual(len(list(keys)), len(guids), 'There should be {0} primary keys instead of {1}'.format(len(guids), len(list(keys)))) if i % 100 == 99: avgitemspersec = (i + 1) / (time.time() - dstart) itemspersec = 100 / (time.time() - mstart) runtimes.append(itemspersec) self._print_progress('* delete object {0}/{1} (run: {2} ops, avg: {3} ops)'.format(i + 1, int(amount_of_objects), round(itemspersec, 2), round(avgitemspersec, 2))) mstart = time.time() keys = DataList._get_pks(machine._namespace, machine._name) self.assertEqual(len(guids), 0, 'All guids should be removed. {0} left'.format(len(guids))) self.assertEqual(len(list(keys)), 0, 'There should be no primary keys ({0})'.format(len(list(keys)))) seconds_passed = (time.time() - start) runtimes.sort() print '\ncompleted in {0} seconds (avg: {1} ops, min: {2} ops, max: {3} ops)'.format(round(seconds_passed, 2), round((amount_of_objects * 3) / seconds_passed, 2), round(runtimes[1], 2), round(runtimes[-2], 2))
def delete(self, abandon=False): """ Delete the given object. It also invalidates certain lists """ # Check foreign relations relations = RelationMapper.load_foreign_relations(self.__class__) if relations is not None: for key, info in relations.iteritems(): items = getattr(self, key) if info['list'] is True: if len(items) > 0: if abandon is True: for item in items.itersafe(): setattr(item, info['key'], None) try: item.save() except ObjectNotFoundException: pass else: raise LinkedObjectException('There are {0} items left in self.{1}'.format(len(items), key)) elif items is not None: # No list (so a 1-to-1 relation), so there should be an object, or None item = items # More clear naming if abandon is True: setattr(item, info['key'], None) try: item.save() except ObjectNotFoundException: pass else: raise LinkedObjectException('There is still an item linked in self.{0}'.format(key)) # First, update reverse index try: self._mutex_reverseindex.acquire(60) for relation in self._relations: key = relation.name original_guid = self._original[key]['guid'] if original_guid is not None: if relation.foreign_type is None: classname = self.__class__.__name__.lower() else: classname = relation.foreign_type.__name__.lower() reverse_key = 'ovs_reverseindex_{0}_{1}'.format(classname, original_guid) reverse_index = self._volatile.get(reverse_key) if reverse_index is not None: if relation.foreign_key in reverse_index: entries = reverse_index[relation.foreign_key] if self.guid in entries: entries.remove(self.guid) reverse_index[relation.foreign_key] = entries self._volatile.set(reverse_key, reverse_index) self._volatile.delete('ovs_reverseindex_{0}_{1}'.format(self._name, self.guid)) finally: self._mutex_reverseindex.release() # Second, invalidate property lists try: self._mutex_listcache.acquire(60) cache_key = '{0}_{1}'.format(DataList.cachelink, self._name) cache_list = Toolbox.try_get(cache_key, {}) change = False for list_key in cache_list.keys(): fields = cache_list[list_key] if '__all' in fields: change = True self._volatile.delete(list_key) del cache_list[list_key] if change is True: self._volatile.set(cache_key, cache_list) self._persistent.set(cache_key, cache_list) finally: self._mutex_listcache.release() # Delete the object out of the persistent store try: self._persistent.delete(self._key) except KeyNotFoundException: pass # Delete the object and its properties out of the volatile store self.invalidate_dynamics() self._volatile.delete(self._key) DataList.delete_pk(self._namespace, self._name, self._guid)
def delete(self, abandon=False): """ Delete the given object. It also invalidates certain lists """ # Check foreign relations relations = RelationMapper.load_foreign_relations(self.__class__) if relations is not None: for key, info in relations.iteritems(): items = getattr(self, key) if info['list'] is True: if len(items) > 0: if abandon is True: for item in items.itersafe(): setattr(item, info['key'], None) try: item.save() except ObjectNotFoundException: pass else: raise LinkedObjectException( 'There are {0} items left in self.{1}'.format( len(items), key)) elif items is not None: # No list (so a 1-to-1 relation), so there should be an object, or None item = items # More clear naming if abandon is True: setattr(item, info['key'], None) try: item.save() except ObjectNotFoundException: pass else: raise LinkedObjectException( 'There is still an item linked in self.{0}'.format( key)) # First, update reverse index try: self._mutex_reverseindex.acquire(60) for relation in self._relations: key = relation.name original_guid = self._original[key]['guid'] if original_guid is not None: if relation.foreign_type is None: classname = self.__class__.__name__.lower() else: classname = relation.foreign_type.__name__.lower() reverse_key = 'ovs_reverseindex_{0}_{1}'.format( classname, original_guid) reverse_index = self._volatile.get(reverse_key) if reverse_index is not None: if relation.foreign_key in reverse_index: entries = reverse_index[relation.foreign_key] if self.guid in entries: entries.remove(self.guid) reverse_index[relation.foreign_key] = entries self._volatile.set(reverse_key, reverse_index) self._volatile.delete('ovs_reverseindex_{0}_{1}'.format( self._name, self.guid)) finally: self._mutex_reverseindex.release() # Second, invalidate property lists try: self._mutex_listcache.acquire(60) cache_key = '{0}_{1}'.format(DataList.cachelink, self._name) cache_list = Toolbox.try_get(cache_key, {}) change = False for list_key in cache_list.keys(): fields = cache_list[list_key] if '__all' in fields: change = True self._volatile.delete(list_key) del cache_list[list_key] if change is True: self._volatile.set(cache_key, cache_list) self._persistent.set(cache_key, cache_list) finally: self._mutex_listcache.release() # Delete the object out of the persistent store try: self._persistent.delete(self._key) except KeyNotFoundException: pass # Delete the object and its properties out of the volatile store self.invalidate_dynamics() self._volatile.delete(self._key) DataList.delete_pk(self._namespace, self._name, self._guid)