def naturalKeySerializerTest(format, self): # Create all the objects defined in the test data objects = [] instance_count = {} for (func, pk, klass, datum) in natural_key_test_data: with connection.constraint_checks_disabled(): objects.extend(func[0](pk, klass, datum)) # Get a count of the number of objects created for each class for klass in instance_count: instance_count[klass] = klass.objects.count() # use_natural_keys is deprecated and to be removed in Django 1.9 with warnings.catch_warnings(record=True): warnings.simplefilter("always") # Serialize the test database serialized_data = serializers.serialize(format, objects, indent=2, use_natural_keys=True) for obj in serializers.deserialize(format, serialized_data): obj.save() # Assert that the deserialized data is the same # as the original source for (func, pk, klass, datum) in natural_key_test_data: func[1](self, pk, klass, datum) # Assert that the number of objects deserialized is the # same as the number that was serialized. for klass, count in instance_count.items(): self.assertEqual(count, klass.objects.count())
def serializerTest(format, self): # Create all the objects defined in the test data objects = [] instance_count = {} for (func, pk, klass, datum) in test_data: with connection.constraint_checks_disabled(): objects.extend(func[0](pk, klass, datum)) # Get a count of the number of objects created for each class for klass in instance_count: instance_count[klass] = klass.objects.count() # Add the generic tagged objects to the object list objects.extend(Tag.objects.all()) # Serialize the test database serialized_data = serializers.serialize(format, objects, indent=2) for obj in serializers.deserialize(format, serialized_data): obj.save() # Assert that the deserialized data is the same # as the original source for (func, pk, klass, datum) in test_data: func[1](self, pk, klass, datum) # Assert that the number of objects deserialized is the # same as the number that was serialized. for klass, count in instance_count.items(): self.assertEqual(count, klass.objects.count())
def handle(self, **options): drinks = models.Drink.objects.all() pos = 0 count = drinks.count() for d in drinks: pos += 1 progbar('clear drink sessions', pos, count) d.session = None d.save() print '' pics = models.PourPicture.objects.all() count = pics.count() pos = 0 for p in pics: pos += 1 progbar('clear image sessions', pos, count) p.session = None p.save() print '' print 'deleting old sessions..', try: with connection.constraint_checks_disabled(): cursor = connection.cursor() for table in ('core_drinkingsession', 'core_kegsessionchunk', 'core_usersessionchunk', 'core_sessionchunk'): cursor.execute('TRUNCATE TABLE `%s`' % table) print 'truncate successful' except Exception, e: models.DrinkingSession.objects.all().delete() print 'orm delete successful'
def test_admin_password_change(self): u = UUIDUser.objects.create_superuser(username='******', email='*****@*****.**', password='******') self.assertTrue(self.client.login(username='******', password='******')) user_change_url = reverse('custom_user_admin:auth_tests_uuiduser_change', args=(u.pk,)) response = self.client.get(user_change_url) self.assertEqual(response.status_code, 200) password_change_url = reverse('custom_user_admin:auth_user_password_change', args=(u.pk,)) response = self.client.get(password_change_url) self.assertEqual(response.status_code, 200) # A LogEntry is created with pk=1 which breaks a FK constraint on MySQL with connection.constraint_checks_disabled(): response = self.client.post(password_change_url, { 'password1': 'password1', 'password2': 'password1', }) self.assertRedirects(response, user_change_url) row = LogEntry.objects.latest('id') self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change() self.assertEqual(row.object_id, str(u.pk)) self.assertEqual(row.get_change_message(), 'Changed password.') # The LogEntry.user column isn't altered to a UUID type so it's set to # an integer manually in CustomUserAdmin to avoid an error. To avoid a # constraint error, delete the entry before constraints are checked # after the test. row.delete()
def GetMatchHistory(**kargs): """Loads items into MatchHistoryQueue. This will poll the WebAPI and acquire a list of matches. This will never return a match that has already been processed into MatchDetails. This function is intended to be used in conjunction with specified kwargs. Args: **kargs (dict): kargs to pass into the WebAPI for filtering lookups. Valid kargs are: player_name=<name> # Search matches with a player name, exact match only hero_id=<id> # Search for matches with a specific hero being played, hero id's are in dota/scripts/npc/npc_heroes.txt in your Dota install directory skill=<skill> # 0 for any, 1 for normal, 2 for high, 3 for very high skill date_min=<date> # date in UTC seconds since Jan 1, 1970 (unix time format) date_max=<date> # date in UTC seconds since Jan 1, 1970 (unix time format) account_id=<id> # Steam account id (this is not SteamID, its only the account number portion) league_id=<id> # matches for a particular league start_at_match_id=<id> # Start the search at the indicated match id, descending matches_requested=<n> # Defaults is 25 matches, this can limit to less Returns: A list of MatchHistoryQueue objects to be iterated on, sorted by match `start_time` """ create_queue = [] account_list = [] try: json_data = GetMatchHistoryJson(**kargs) if json_data['status'] == 15: # Match history denied, is set to private. raise SteamAPIError("This user has his DotA2 Profile set to private.") with connection.constraint_checks_disabled(): for match in json_data['matches']: if(len(match['players']) < 1): # Don't log matches without players. continue bulk_json = [] json_player_data = match['players'] if MatchDetails.objects.filter(pk=match['match_id']).exists() or MatchHistoryQueue.objects.filter(pk=match['match_id']).exists() or match['lobby_type'] == 4: continue # Object in queue or already created. Can ignore for now. match_history = MatchHistoryQueue.from_json_response(match) match_history.save() # Save here so the pk is created. for json_player in json_player_data: bulk_json.append(json_player) account_list.append(convertAccountNumbertoSteam64(json_player.get('account_id', None))) create_queue.append((match_history, bulk_json)) GetPlayerNames(account_list) # Loads accounts into cache for create_match_history, json_player_list in create_queue: queue_player_set = [] for json_player in json_player_list: queue_player_set.append(MatchHistoryQueuePlayers.from_json_response(create_match_history, json_player)) create_match_history.matchhistoryqueueplayers_set.bulk_create(queue_player_set) return_history = MatchHistoryQueue.objects.all().order_by('-start_time') transaction.commit() except: transaction.rollback() raise return return_history
def test_constraint_checks_disabled_atomic_allowed(self): """ SQLite schema editor is usable within an outer transaction as long as foreign key constraints checks are disabled beforehand. """ def constraint_checks_enabled(): with connection.cursor() as cursor: return bool(cursor.execute('PRAGMA foreign_keys').fetchone()[0]) with connection.constraint_checks_disabled(), transaction.atomic(): with connection.schema_editor(atomic=True): self.assertFalse(constraint_checks_enabled()) self.assertFalse(constraint_checks_enabled()) self.assertTrue(constraint_checks_enabled())
def test_check_constraints(self): """ Constraint checks should raise an IntegrityError when bad data is in the DB. """ with transaction.atomic(): # Create an Article. models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r) # Retrieve it from the DB a = models.Article.objects.get(headline="Test article") a.reporter_id = 30 with connection.constraint_checks_disabled(): a.save() with self.assertRaises(IntegrityError): connection.check_constraints() transaction.set_rollback(True)
def test_disable_constraint_checks_context_manager(self): """ When constraint checks are disabled (using context manager), should be able to write bad data without IntegrityErrors. """ with transaction.atomic(): # Create an Article. models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r) # Retrieve it from the DB a = models.Article.objects.get(headline="Test article") a.reporter_id = 30 try: with connection.constraint_checks_disabled(): a.save() except IntegrityError: self.fail("IntegrityError should not have occurred.") transaction.set_rollback(True)
def test_forward_refs(self): """ Tests that objects ids can be referenced before they are defined in the serialization data. """ # The deserialization process needs to run in a transaction in order # to test forward reference handling. with transaction.atomic(): objs = serializers.deserialize(self.serializer_name, self.fwd_ref_str) with connection.constraint_checks_disabled(): for obj in objs: obj.save() for model_cls in (Category, Author, Article): self.assertEqual(model_cls.objects.all().count(), 1) art_obj = Article.objects.all()[0] self.assertEqual(art_obj.categories.all().count(), 1) self.assertEqual(art_obj.author.name, "Agnes")
def test_admin_password_change(self): u = UUIDUser.objects.create_superuser(username="******", email="*****@*****.**", password="******") self.assertTrue(self.client.login(username="******", password="******")) user_change_url = reverse("custom_user_admin:auth_uuiduser_change", args=(u.pk,)) response = self.client.get(user_change_url) self.assertEqual(response.status_code, 200) password_change_url = reverse("custom_user_admin:auth_user_password_change", args=(u.pk,)) response = self.client.get(password_change_url) self.assertEqual(response.status_code, 200) # A LogEntry is created with pk=1 which breaks a FK constraint on MySQL with connection.constraint_checks_disabled(): response = self.client.post(password_change_url, {"password1": "password1", "password2": "password1"}) self.assertRedirects(response, user_change_url) row = LogEntry.objects.latest("id") self.assertEqual(row.user_id, 1) # harcoded in CustomUserAdmin.log_change() self.assertEqual(row.object_id, str(u.pk)) self.assertEqual(row.change_message, "Changed password.")
def test_disable_constraint_checks_context_manager(self): """ When constraint checks are disabled (using context manager), should be able to write bad data without IntegrityErrors. """ with transaction.atomic(): # Create an Article. Article.objects.create( headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r, ) # Retrieve it from the DB a = Article.objects.get(headline="Test article") a.reporter_id = 30 try: with connection.constraint_checks_disabled(): a.save() except IntegrityError: self.fail("IntegrityError should not have occurred.") transaction.set_rollback(True)
def test_forward_refs(self): """ Tests that objects ids can be referenced before they are defined in the serialization data. """ # The deserialization process needs to be contained # within a transaction in order to test forward reference # handling. transaction.enter_transaction_management() objs = serializers.deserialize(self.serializer_name, self.fwd_ref_str) with connection.constraint_checks_disabled(): for obj in objs: obj.save() transaction.commit() transaction.leave_transaction_management() for model_cls in (Category, Author, Article): self.assertEqual(model_cls.objects.all().count(), 1) art_obj = Article.objects.all()[0] self.assertEqual(art_obj.categories.all().count(), 1) self.assertEqual(art_obj.author.name, "Agnes")
def test_admin_password_change(self): u = UUIDUser.objects.create_superuser(username='******', email='*****@*****.**', password='******') self.assertTrue(self.client.login(username='******', password='******')) user_change_url = reverse('custom_user_admin:auth_tests_uuiduser_change', args=(u.pk,)) response = self.client.get(user_change_url) self.assertEqual(response.status_code, 200) password_change_url = reverse('custom_user_admin:auth_user_password_change', args=(u.pk,)) response = self.client.get(password_change_url) self.assertEqual(response.status_code, 200) # A LogEntry is created with pk=1 which breaks a FK constraint on MySQL with connection.constraint_checks_disabled(): response = self.client.post(password_change_url, { 'password1': 'password1', 'password2': 'password1', }) self.assertRedirects(response, user_change_url) row = LogEntry.objects.latest('id') self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change() self.assertEqual(row.object_id, str(u.pk)) self.assertEqual(row.get_change_message(), 'Changed password.')
def test_check_constraints(self): """ Constraint checks should raise an IntegrityError when bad data is in the DB. """ try: transaction.set_autocommit(False) # Create an Article. models.Article.objects.create(headline="Test article", pub_date=datetime.datetime( 2010, 9, 4), reporter=self.r) # Retrive it from the DB a = models.Article.objects.get(headline="Test article") a.reporter_id = 30 try: with connection.constraint_checks_disabled(): a.save() with self.assertRaises(IntegrityError): connection.check_constraints() finally: transaction.rollback() finally: transaction.set_autocommit(True)
def natural_key_serializer_test(format, self): # Create all the objects defined in the test data with connection.constraint_checks_disabled(): objects = [ NaturalKeyAnchor.objects.create(id=1100, data="Natural Key Anghor"), FKDataNaturalKey.objects.create(id=1101, data_id=1100), FKDataNaturalKey.objects.create(id=1102, data_id=None), ] # Serialize the test database serialized_data = serializers.serialize(format, objects, indent=2, use_natural_foreign_keys=True) for obj in serializers.deserialize(format, serialized_data): obj.save() # Assert that the deserialized data is the same # as the original source for obj in objects: instance = obj.__class__.objects.get(id=obj.pk) self.assertEqual(obj.data, instance.data, "Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % ( obj.pk, obj.data, type(obj.data), instance, type(instance.data)) )