def test_hard_deleted_model_propagates_to_store_record(self): """ It could be the case that we have two store records, one that is hard deleted and the other that has a fk pointing to the hard deleted record. When we deserialize, we want to ensure that the record with the fk pointer also gets the hard deleted flag set, while also not deserializing the data into a model. """ # user will be deleted user = MyUser(username='******') user.save(update_dirty_bit_to=False) # log may be synced in from other device log = SummaryLog(user_id=user.id) log.id = log.calculate_uuid() StoreModelFacilityFactory(model_name="user", id=user.id, serialized=json.dumps(user.serialize()), deleted=True, hard_deleted=True) StoreModelFacilityFactory(model_name="contentsummarylog", id=log.id, serialized=json.dumps(log.serialize())) # make sure delete propagates to store due to deleted foreign key self.mc.deserialize_from_store() # have to serialize to update deleted models self.mc.serialize_into_store() self.assertFalse(SummaryLog.objects.filter(id=log.id).exists()) self.assertTrue(Store.objects.get(id=log.id).hard_deleted)
def _create_two_users_to_deserialize(self): user = MyUser(username="******", password="******") user2 = MyUser(username="******", password="******") user.save() user2.save() self.mc.serialize_into_store() user.username = "******" user2.username = "******" Store.objects.filter(id=user.id).update(serialized=json.dumps(user.serialize()), dirty_bit=True) Store.objects.filter(id=user2.id).update(serialized=json.dumps(user2.serialize()), dirty_bit=True) return user, user2
def test_store_hard_delete_propagates(self): user = MyUser(username='******') user.save(update_dirty_bit_to=False) log = SummaryLog(user=user) log.save(update_dirty_bit_to=False) StoreModelFacilityFactory(model_name="user", id=user.id, serialized=json.dumps(user.serialize()), hard_deleted=True, deleted=True) # make sure hard_deleted propagates to related models even if they are not hard_deleted self.mc.deserialize_from_store() self.assertTrue(HardDeletedModels.objects.filter(id=log.id).exists())
def test_regular_model_deserialization(self): # deserialization should be able to handle multiple records user = MyUser(username='******', password='******') user2 = MyUser(username='******', password='******') user.save(update_dirty_bit_to=False) user2.save(update_dirty_bit_to=False) user.username = '******' user2.username = '******' StoreModelFacilityFactory(id=user.id, serialized=json.dumps(user.serialize()), model_name="user") StoreModelFacilityFactory(id=user2.id, serialized=json.dumps(user2.serialize()), model_name="user") self.mc.deserialize_from_store() self.assertFalse(MyUser.objects.filter(username='******').exists()) self.assertFalse(MyUser.objects.filter(username='******').exists()) self.assertTrue(MyUser.objects.filter(username='******').exists()) self.assertTrue(MyUser.objects.filter(username='******').exists())
class CertificateTestCaseMixin(object): def setUp(self): self.user = MyUser(username="******") self.user.actual_password = "******" self.user.set_password(self.user.actual_password) self.user.save() self.superuser = MyUser(username="******", is_superuser=True) self.superuser.actual_password = "******" self.superuser.set_password(self.superuser.actual_password) self.superuser.save() self.fakeuser = MyUser(username="******") self.fakeuser.actual_password = "******" self.profile = "facilitydata" self.root_scope_def = ScopeDefinition.objects.create( id="rootcert", profile=self.profile, version=1, primary_scope_param_key="mainpartition", description="Root cert for ${mainpartition}.", read_filter_template="", write_filter_template="", read_write_filter_template="${mainpartition}", ) self.subset_scope_def = ScopeDefinition.objects.create( id="subcert", profile=self.profile, version=1, primary_scope_param_key="", description= "Subset cert under ${mainpartition} for ${subpartition}.", read_filter_template= "${mainpartition}:shared\n${mainpartition}:${subpartition}", write_filter_template="${mainpartition}:${subpartition}", read_write_filter_template="", ) self.root_cert1_with_key = Certificate.generate_root_certificate( self.root_scope_def.id) self.subset_cert1_without_key = Certificate( parent=self.root_cert1_with_key, profile=self.profile, scope_definition=self.subset_scope_def, scope_version=self.subset_scope_def.version, scope_params=json.dumps({ "mainpartition": self.root_cert1_with_key.id, "subpartition": "abracadabra" }), private_key=Key(), ) self.root_cert1_with_key.sign_certificate( self.subset_cert1_without_key) self.subset_cert1_without_key.save() self.sub_subset_cert1_with_key = Certificate( parent=self.subset_cert1_without_key, profile=self.profile, scope_definition=self.subset_scope_def, scope_version=self.subset_scope_def.version, scope_params=self.subset_cert1_without_key.scope_params, private_key=Key(), ) self.subset_cert1_without_key.sign_certificate( self.sub_subset_cert1_with_key) self.sub_subset_cert1_with_key.save() self.subset_cert1_without_key._private_key = None self.subset_cert1_without_key.save() self.root_cert2_without_key = Certificate.generate_root_certificate( self.root_scope_def.id) self.subset_cert2_with_key = Certificate( parent=self.root_cert2_without_key, profile=self.profile, scope_definition=self.subset_scope_def, scope_version=self.subset_scope_def.version, scope_params=json.dumps({ "mainpartition": self.root_cert2_without_key.id, "subpartition": "abracadabra" }), private_key=Key(), ) self.root_cert2_without_key.sign_certificate( self.subset_cert2_with_key) self.subset_cert2_with_key.save() self.root_cert2_without_key._private_key = None self.root_cert2_without_key.save() self.original_cert_count = Certificate.objects.count() self.sharedkey = SharedKey.get_or_create_shared_key() # create a root cert self.unsaved_root_cert = Certificate( scope_definition=self.root_scope_def, scope_version=self.root_scope_def.version, profile=self.profile, private_key=Key()) self.unsaved_root_cert.id = self.unsaved_root_cert.calculate_uuid() self.unsaved_root_cert.scope_params = json.dumps({ self.root_scope_def.primary_scope_param_key: self.unsaved_root_cert.id }) self.unsaved_root_cert.sign_certificate(self.unsaved_root_cert) # create a child cert self.unsaved_subset_cert = Certificate( parent=self.unsaved_root_cert, profile=self.profile, scope_definition=self.subset_scope_def, scope_version=self.subset_scope_def.version, scope_params=json.dumps({ "mainpartition": self.unsaved_root_cert.id, "subpartition": "hooplah" }), public_key=self.sharedkey.public_key, ) def make_cert_endpoint_request(self, params={}, method="GET"): fn = getattr(self.client, method.lower()) response = fn(reverse('certificates-list'), params, format='json') data = json.loads(response.content.decode()) return (response, data) def perform_basic_authentication(self, user): basic_auth_header = b'Basic ' + base64.encodestring( ("username=%s:%s" % (user.username, user.actual_password)).encode()) self.client.credentials(HTTP_AUTHORIZATION=basic_auth_header) def create_syncsession(self, client_certificate=None, server_certificate=None): if not client_certificate: client_certificate = self.sub_subset_cert1_with_key if not server_certificate: server_certificate = self.root_cert1_with_key # fetch a nonce value to use in creating the syncsession response = self.client.post(reverse('nonces-list'), {}, format='json') nonce = json.loads(response.content.decode())["id"] # prepare the data to send in the syncsession creation request data = { "id": uuid.uuid4().hex, "server_certificate_id": server_certificate.id, "client_certificate_id": client_certificate.id, "profile": client_certificate.profile, "certificate_chain": json.dumps( CertificateSerializer( client_certificate.get_ancestors(include_self=True), many=True).data), "connection_path": "http://127.0.0.1:8000", "instance": json.dumps( InstanceIDSerializer( InstanceIDModel.get_or_create_current_instance()[0]).data), "nonce": nonce, } # sign the nonce/ID combo to attach to the request data["signature"] = client_certificate.sign( "{nonce}:{id}".format(**data)) # make the API call to create the SyncSession response = self.client.post(reverse('syncsessions-list'), data, format='json') self.assertEqual(response.status_code, 201) return SyncSession.objects.get(id=data["id"]) def make_transfersession_creation_request(self, filter, push, syncsession=None, expected_status=201, expected_message=None, **kwargs): if not syncsession: syncsession = self.create_syncsession() data = { "id": uuid.uuid4().hex, "filter": filter, "push": push, "records_total": 0, "sync_session_id": syncsession.id, } # make the API call to attempt to create the TransferSession response = self.client.post(reverse('transfersessions-list'), data, format='json') self.assertEqual(response.status_code, expected_status) if expected_status == 201: # check that the syncsession was created transfersession = TransferSession.objects.get( id=json.loads(response.content.decode())["id"]) self.assertTrue(transfersession.active) else: # check that the syncsession was not created self.assertEqual(TransferSession.objects.count(), 0) if expected_message: self.assertIn(expected_message, response.content.decode()) return response