def setUp(self): self.profile = "facilitydata" self.root_scope_def = ScopeDefinition.objects.create( id="rootcert", profile=self.profile, version=1, primary_scope_param_key="mainpartition", description="Root cert for ${mainpartition}.", read_filter_template="", write_filter_template="", read_write_filter_template="${mainpartition}", ) self.subset_scope_def = ScopeDefinition.objects.create( id="subcert", profile=self.profile, version=1, primary_scope_param_key="", description= "Subset cert under ${mainpartition} for ${subpartition}.", read_filter_template="${mainpartition}", write_filter_template="${mainpartition}:${subpartition}", read_write_filter_template="", ) self.root_cert = Certificate.generate_root_certificate( self.root_scope_def.id) self.subset_cert = Certificate( parent=self.root_cert, profile=self.profile, scope_definition=self.subset_scope_def, scope_version=self.subset_scope_def.version, scope_params=json.dumps({ "mainpartition": self.root_cert.id, "subpartition": "abracadabra" }), private_key=Key(), ) self.root_cert.sign_certificate(self.subset_cert) self.subset_cert.save() self.unsaved_cert = Certificate( parent=self.root_cert, profile=self.profile, scope_definition=self.subset_scope_def, scope_version=self.subset_scope_def.version, scope_params=json.dumps({ "mainpartition": self.root_cert.id, "subpartition": "other" }), public_key=Key(), ) self.root_cert.sign_certificate(self.unsaved_cert) self.controller = MorangoProfileController("facilitydata") self.network_connection = self.controller.create_network_connection( self.live_server_url) self.key = SharedKey.get_or_create_shared_key()
def setUp(self): (self.current_id, _) = InstanceIDModel.get_or_create_current_instance() self.range = 10 self.mc = MorangoProfileController("facilitydata") for i in range(self.range): self.ident = uuid.uuid4().hex StoreModelFacilityFactory( pk=self.ident, serialized=serialized_facility_factory(self.ident) )
class DateTimeTZFieldTestCase(TestCase): def setUp(self): self.controller = MorangoProfileController(PROFILE_FACILITY_DATA) InstanceIDModel.get_or_create_current_instance() def test_deserializing_field(self): facility = Facility.objects.create(name="hallo") FacilityUser.objects.create(username="******", facility=facility) self.controller.serialize_into_store() Store.objects.update(dirty_bit=True) try: self.controller.deserialize_from_store() except AttributeError as e: self.fail(e.message)
def validate_and_prepare_peer_sync_job(request, **kwargs): # validate the baseurl try: address = request.data.get("baseurl") if not address: raise KeyError() baseurl = NetworkClient(address=address).base_url except KeyError: raise ParseError("Missing `baseurl` parameter") except URLParseError: raise ParseError("Invalid URL") except NetworkLocationNotFound: raise ResourceGoneError() job_data = validate_prepare_sync_job(request, baseurl=baseurl, **kwargs) facility_id = job_data.get("facility") username = request.data.get("username", None) password = request.data.get("password", None) # call this in case user directly syncs without migrating database if not ScopeDefinition.objects.filter(): call_command("loaddata", "scopedefinitions") controller = MorangoProfileController(PROFILE_FACILITY_DATA) network_connection = controller.create_network_connection(baseurl) # try to get the certificate, which will save it if successful try: # make sure we get the dataset ID dataset_id = get_dataset_id(baseurl, identifier=facility_id, noninteractive=True) # username and password are not required for this to succeed unless there is no cert get_client_and_server_certs(username, password, dataset_id, network_connection, noninteractive=True) except (CommandError, HTTPError) as e: if not username and not password: raise PermissionDenied() else: raise AuthenticationFailed(e) return job_data
class PostDeleteSignalsTestCase(TestCase): def setUp(self): InstanceIDModel.get_or_create_current_instance() [FacilityModelFactory() for _ in range(10)] self.mc = MorangoProfileController('facilitydata') self.mc.serialize_into_store() def test_deleted_flag_gets_set(self): facility = Facility.objects.first() deleted_id = facility.id facility.delete() self.assertTrue(DeletedModels.objects.filter(id=deleted_id)) def test_cascading_delete(self): facility = Facility.objects.first() child = FacilityModelFactory(parent=facility) deleted_child_id = child.id facility.delete() self.assertTrue(DeletedModels.objects.filter(id=deleted_child_id))
def validate_and_prepare_peer_sync_job(request, **kwargs): # validate the baseurl try: address = request.data.get("baseurl") if not address: raise KeyError() baseurl = NetworkClient(address=address).base_url except KeyError: raise ParseError("Missing `baseurl` parameter") except URLParseError: raise ParseError("Invalid URL") except NetworkLocationNotFound: raise ResourceGoneError() job_data = validate_prepare_sync_job(request, baseurl=baseurl, **kwargs) facility_id = job_data.get("facility") username = request.data.get("username", None) password = request.data.get("password", None) controller = MorangoProfileController(PROFILE_FACILITY_DATA) network_connection = controller.create_network_connection(baseurl) # try to get the certificate, which will save it if successful try: # username and password are not required for this to succeed unless there is no cert get_client_and_server_certs(username, password, facility_id, network_connection, noninteractive=True) except CommandError as e: if not username and not password: raise NotAuthenticated() else: raise AuthenticationFailed(e) return job_data
def setUp(self): settings.MORANGO_DESERIALIZE_AFTER_DEQUEUING = False self.data = {} DatabaseIDModel.objects.create() (self.current_id, _) = InstanceIDModel.get_or_create_current_instance() # create controllers for app/store/buffer operations self.data["mc"] = MorangoProfileController("facilitydata") self.data["sc"] = BaseSyncClient(None, "host") session = SyncSession.objects.create( id=uuid.uuid4().hex, profile="", last_activity_timestamp=timezone.now()) self.data[ "sc"].current_transfer_session = TransferSession.objects.create( id=uuid.uuid4().hex, sync_session=session, push=True, last_activity_timestamp=timezone.now(), ) self.data.update( create_buffer_and_store_dummy_data( self.data["sc"].current_transfer_session.id))
def handle_async(self, *args, **options): baseurl, facility_id, chunk_size, username, password, no_push, no_pull, noninteractive = ( options["baseurl"], options["facility"], options["chunk_size"], options["username"], options["password"], options["no_push"], options["no_pull"], options["noninteractive"], ) PORTAL_SYNC = baseurl == DATA_PORTAL_SYNCING_BASE_URL # validate url that is passed in if not PORTAL_SYNC: baseurl = get_baseurl(baseurl) # call this in case user directly syncs without migrating database if not ScopeDefinition.objects.filter(): call_command("loaddata", "scopedefinitions") # try to connect to server controller = MorangoProfileController("facilitydata") network_connection = controller.create_network_connection(baseurl) # if instance_ids are equal, this means device is trying to sync with itself, which we don't allow if (InstanceIDModel.get_or_create_current_instance()[0].id == network_connection.server_info["instance_id"]): raise CommandError( "Device can not sync with itself. Please recheck base URL and try again." ) if PORTAL_SYNC: # do portal sync setup facility = get_facility(facility_id=facility_id, noninteractive=noninteractive) # check for the certs we own for the specific facility client_cert = (facility.dataset.get_owned_certificates().filter( scope_definition_id=FULL_FACILITY).first()) if not client_cert: raise CommandError( "This device does not own a certificate for Facility: {}". format(facility.name)) # get primary partition scope_params = json.loads(client_cert.scope_params) dataset_id = scope_params["dataset_id"] # check if the server already has a cert for this facility server_certs = network_connection.get_remote_certificates( dataset_id, scope_def_id=FULL_FACILITY) # if necessary, push a cert up to the server server_cert = ( server_certs[0] if server_certs else network_connection.push_signed_client_certificate_chain( local_parent_cert=client_cert, scope_definition_id=FULL_FACILITY, scope_params=scope_params, )) else: # do P2P setup dataset_id = get_dataset_id(baseurl, identifier=facility_id, noninteractive=noninteractive) client_cert, server_cert, username = get_client_and_server_certs( username, password, dataset_id, network_connection, noninteractive=noninteractive, ) self.stdout.write( "Syncing has been initiated (this may take a while)...") sync_client = network_connection.create_sync_session( client_cert, server_cert, chunk_size=chunk_size) # pull from server and push our own data to server if not no_pull: sync_client.initiate_pull(Filter(dataset_id)) if not no_push: sync_client.initiate_push(Filter(dataset_id)) create_superuser_and_provision_device(username, dataset_id, noninteractive=noninteractive) sync_client.close_sync_session() self.stdout.write("Syncing has been completed.")
def handle_async(self, *args, **options): self.stderr.write( "`fullfacilitysync` command is deprecated and will be removed in 0.13.0 in favor of `sync`, which accepts the same options." " Use `sync` command instead." ) # validate url that is passed in try: URLValidator()((options["base_url"])) except ValidationError: raise CommandError( "Base URL is not valid. Please retry command and enter a valid URL." ) # call this in case user directly syncs without migrating database if not ScopeDefinition.objects.filter(): call_command("loaddata", "scopedefinitions") controller = MorangoProfileController(PROFILE_FACILITY_DATA) with self.start_progress(total=7) as progress_update: try: network_connection = controller.create_network_connection( options["base_url"] ) except ConnectionError: raise CommandError( "Can not connect to server with base URL: {}".format( options["base_url"] ) ) # if instance_ids are equal, this means device is trying to sync with itself, which we don't allow if ( InstanceIDModel.get_or_create_current_instance()[0].id == network_connection.server_info["instance_id"] ): raise CommandError( "Device can not sync with itself. Please recheck base URL and try again." ) progress_update(1) options["dataset_id"] = self.get_dataset_id( options["base_url"], options["dataset_id"] ) progress_update(1) ( client_cert, server_cert, options["username"], ) = self.get_client_and_server_certs( options["username"], options["password"], options["dataset_id"], network_connection, ) progress_update(1) sync_client = network_connection.create_sync_session( client_cert, server_cert, chunk_size=options["chunk_size"] ) progress_update(1) # pull from server and push our own data to server if not options["no_pull"]: sync_client.initiate_pull(Filter(options["dataset_id"])) if not options["no_push"]: sync_client.initiate_push(Filter(options["dataset_id"])) progress_update(1) self.create_superuser_and_provision_device( options["username"], options["dataset_id"] ) progress_update(1) sync_client.close_sync_session() progress_update(1)
def create_dummy_store_data(): data = {} DatabaseIDModel.objects.create() data["group1_id"] = InstanceIDModel.get_or_create_current_instance()[ 0] # counter is at 0 # create controllers for app/store/buffer operations data["mc"] = MorangoProfileController("facilitydata") data["sc"] = BaseSyncClient(None, "host") session = SyncSession.objects.create( id=uuid.uuid4().hex, profile="facilitydata", last_activity_timestamp=timezone.now(), ) data["sc"].current_transfer_session = TransferSession.objects.create( id=uuid.uuid4().hex, sync_session=session, push=True, last_activity_timestamp=timezone.now(), ) data["mc"].serialize_into_store() # counter is at 1 # create group of facilities and first serialization data["group1_c1"] = [FacilityFactory() for _ in range(5)] data["mc"].serialize_into_store() # counter is at 2 # create group of facilities and second serialization data["group1_c2"] = [FacilityFactory() for _ in range(5)] # create users and logs associated with user data["user1"] = MyUser.objects.create(username="******") data["user1_sumlogs"] = [ SummaryLog.objects.create(user=data["user1"]) for _ in range(5) ] data["mc"].serialize_into_store() # counter is at 3 # create new instance id and group of facilities with EnvironmentVarGuard() as env: env["MORANGO_SYSTEM_ID"] = "new_sys_id" data["group2_id"] = InstanceIDModel.get_or_create_current_instance( clear_cache=True)[0] # new counter is at 0 data["mc"].serialize_into_store() # new counter is at 1 data["group2_c1"] = [FacilityFactory() for _ in range(5)] # create users and logs associated with user data["user2"] = MyUser.objects.create(username="******") data["user2_sumlogs"] = [ SummaryLog.objects.create(user=data["user2"]) for _ in range(5) ] data["user2_interlogs"] = [ InteractionLog.objects.create(user=data["user2"]) for _ in range(5) ] data["user3"] = MyUser.objects.create(username="******") data["user3_sumlogs"] = [ SummaryLog.objects.create(user=data["user3"]) for _ in range(5) ] data["user3_interlogs"] = [ InteractionLog.objects.create(user=data["user3"]) for _ in range(5) ] data["mc"].serialize_into_store() # new counter is at 2 data["user4"] = MyUser.objects.create( username="******", _morango_partition="badpartition") data["mc"].serialize_into_store() # new counter is at 3 return data
def handle_async(self, *args, **options): # noqa C901 ( baseurl, facility_id, chunk_size, username, password, user_id, no_push, no_pull, noninteractive, no_provision, ) = ( options["baseurl"], options["facility"], options["chunk_size"], options["username"], options["password"], options["user"], options["no_push"], options["no_pull"], options["noninteractive"], options["no_provision"], ) PORTAL_SYNC = baseurl == DATA_PORTAL_SYNCING_BASE_URL # validate url that is passed in if not PORTAL_SYNC: baseurl = get_baseurl(baseurl) # call this in case user directly syncs without migrating database if not ScopeDefinition.objects.filter(): call_command("loaddata", "scopedefinitions") dataset_cache.clear() dataset_cache.activate() # try to connect to server controller = MorangoProfileController(PROFILE_FACILITY_DATA) network_connection = controller.create_network_connection(baseurl) # if instance_ids are equal, this means device is trying to sync with itself, which we don't allow if (InstanceIDModel.get_or_create_current_instance()[0].id == network_connection.server_info["instance_id"]): raise CommandError( "Device can not sync with itself. Please recheck base URL and try again." ) if user_id: # it's a single-user sync if not facility_id: raise CommandError( "Facility ID must be specified in order to do single-user syncing" ) if not re.match("[a-f0-9]{32}", user_id): raise CommandError( "User ID must be a 32-character UUID (no dashes)") dataset_id = get_dataset_id(baseurl, identifier=facility_id, noninteractive=True) client_cert, server_cert, username = get_client_and_server_certs( username, password, dataset_id, network_connection, user_id=user_id, noninteractive=noninteractive, ) scopes = [ client_cert.scope_definition_id, server_cert.scope_definition_id ] if len(set(scopes)) != 2: raise CommandError( "To do a single-user sync, one device must have a single-user certificate, and the other a full-facility certificate." ) elif PORTAL_SYNC: # do portal sync setup facility = get_facility(facility_id=facility_id, noninteractive=noninteractive) # check for the certs we own for the specific facility client_cert = (facility.dataset.get_owned_certificates().filter( scope_definition_id=ScopeDefinitions.FULL_FACILITY).first()) if not client_cert: raise CommandError( "This device does not own a certificate for Facility: {}". format(facility.name)) # get primary partition scope_params = json.loads(client_cert.scope_params) dataset_id = scope_params["dataset_id"] # check if the server already has a cert for this facility server_certs = network_connection.get_remote_certificates( dataset_id, scope_def_id=ScopeDefinitions.FULL_FACILITY) # if necessary, push a cert up to the server server_cert = ( server_certs[0] if server_certs else network_connection.push_signed_client_certificate_chain( local_parent_cert=client_cert, scope_definition_id=ScopeDefinitions.FULL_FACILITY, scope_params=scope_params, )) else: # do P2P setup dataset_id = get_dataset_id(baseurl, identifier=facility_id, noninteractive=noninteractive) client_cert, server_cert, username = get_client_and_server_certs( username, password, dataset_id, network_connection, noninteractive=noninteractive, ) logger.info("Syncing has been initiated (this may take a while)...") sync_session_client = network_connection.create_sync_session( client_cert, server_cert, chunk_size=chunk_size) try: # pull from server if not no_pull: self._handle_pull( sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, ) # and push our own data to server if not no_push: self._handle_push( sync_session_client, noninteractive, dataset_id, client_cert, server_cert, user_id=user_id, ) if not no_provision: with self._lock(): if user_id: provision_single_user_device(user_id) else: create_superuser_and_provision_device( username, dataset_id, noninteractive=noninteractive) except UserCancelledError: if self.job: self.job.extra_metadata.update(sync_state=State.CANCELLED) self.job.save_meta() logger.info("Syncing has been cancelled.") return network_connection.close() if self.job: self.job.extra_metadata.update(sync_state=State.COMPLETED) self.job.save_meta() dataset_cache.deactivate() logger.info("Syncing has been completed.")
def setUp(self): (self.current_id, _) = InstanceIDModel.get_or_create_current_instance() self.mc = MorangoProfileController("facilitydata")
class SelfReferentialFKDeserializationTestCase(TestCase): def setUp(self): (self.current_id, _) = InstanceIDModel.get_or_create_current_instance() self.mc = MorangoProfileController("facilitydata") def test_self_ref_fk(self): self.assertEqual(_self_referential_fk(Facility), "parent_id") self.assertEqual(_self_referential_fk(MyUser), None) def test_delete_model_in_store_deletes_models_in_app(self): root = FacilityModelFactory() child1 = FacilityModelFactory(parent=root) child2 = FacilityModelFactory(parent=root) self.mc.serialize_into_store() # simulate a node being deleted and synced Store.objects.filter(id=child2.id).update(deleted=True) Store.objects.update(dirty_bit=True) grandchild1 = FacilityModelFactory(parent=child2) grandchild2 = FacilityModelFactory(parent=child2) self.mc.deserialize_from_store() # ensure tree structure in app layer is correct child1 = Facility.objects.filter(id=child1.id) self.assertTrue(child1.exists()) self.assertEqual(child1[0].parent_id, root.id) self.assertFalse(Facility.objects.filter(id=child2.id).exists()) self.assertFalse(Facility.objects.filter(id=grandchild1.id).exists()) self.assertFalse(Facility.objects.filter(id=grandchild2.id).exists()) def test_models_created_successfully(self): root = FacilityModelFactory() child1 = FacilityModelFactory(parent=root) child2 = FacilityModelFactory(parent=root) self.mc.serialize_into_store() Facility.objects.all().delete() DeletedModels.objects.all().delete() Store.objects.update(dirty_bit=True, deleted=False) self.mc.deserialize_from_store() # ensure tree structure in app layer is correct self.assertTrue(Facility.objects.filter(id=root.id).exists()) child1 = Facility.objects.filter(id=child1.id) self.assertTrue(child1.exists()) self.assertEqual(child1[0].parent_id, root.id) child2 = Facility.objects.filter(id=child2.id) self.assertTrue(child2.exists()) self.assertEqual(child2[0].parent_id, root.id) def test_deserialization_of_model_with_missing_parent(self): self._test_deserialization_of_model_with_missing_parent(correct_self_ref_fk=True) def test_deserialization_of_model_with_mismatched_self_ref_fk(self): self._test_deserialization_of_model_with_missing_parent(correct_self_ref_fk=False) def _test_deserialization_of_model_with_missing_parent(self, correct_self_ref_fk): root = FacilityModelFactory() child1 = FacilityModelFactory(parent=root) self.mc.serialize_into_store() new_child = Store.objects.get(id=child1.id) data = json.loads(new_child.serialized) new_child.id = data["id"] = "a" * 32 data["parent_id"] = "b" * 32 if correct_self_ref_fk: new_child._self_ref_fk = data["parent_id"] new_child.serialized = json.dumps(data) new_child.dirty_bit = True new_child.save() self.mc.deserialize_from_store() new_child.refresh_from_db() self.assertTrue(new_child.dirty_bit) self.assertIn("exist", new_child.deserialization_error) def test_deserialization_of_model_with_missing_foreignkey_referent(self): user = MyUser.objects.create(username="******") log = SummaryLog.objects.create(user=user) self.mc.serialize_into_store() new_log = Store.objects.get(id=log.id) data = json.loads(new_log.serialized) new_log.id = data["id"] = "f" * 32 data["user_id"] = "e" * 32 new_log.serialized = json.dumps(data) new_log.dirty_bit = True new_log.save() self.mc.deserialize_from_store() new_log.refresh_from_db() self.assertTrue(new_log.dirty_bit) self.assertIn("exist", new_log.deserialization_error)
def setUp(self): InstanceIDModel.get_or_create_current_instance() self.range = 10 self.mc = MorangoProfileController("facilitydata") self.original_name = "ralphie" self.new_name = "rafael"
def setUp(self): InstanceIDModel.get_or_create_current_instance() [FacilityModelFactory() for _ in range(10)] self.mc = MorangoProfileController('facilitydata') self.mc.serialize_into_store()
def setUp(self): self.controller = MorangoProfileController("facilitydata") InstanceIDModel.get_or_create_current_instance()
class DeserializationFromStoreIntoAppTestCase(TestCase): def setUp(self): (self.current_id, _) = InstanceIDModel.get_or_create_current_instance() self.range = 10 self.mc = MorangoProfileController("facilitydata") for i in range(self.range): self.ident = uuid.uuid4().hex StoreModelFacilityFactory( pk=self.ident, serialized=serialized_facility_factory(self.ident) ) def test_dirty_store_records_are_deserialized(self): self.assertFalse(Facility.objects.all().exists()) self.mc.deserialize_from_store() self.assertEqual(len(Facility.objects.all()), self.range) def test_clean_store_records_do_not_get_deserialized(self): self.assertFalse(Facility.objects.exists()) Store.objects.update(dirty_bit=False) self.mc.deserialize_from_store() self.assertFalse(Facility.objects.exists()) def test_deleted_models_do_not_get_deserialized(self): Store.objects.update_or_create(defaults={"deleted": True}, id=self.ident) self.mc.deserialize_from_store() self.assertFalse(Facility.objects.filter(id=self.ident).exists()) def test_deleted_models_deletes_them_in_app(self): # put models in app layer self.mc.deserialize_from_store() # deleted flag on store should delete model in app layer Store.objects.update_or_create( defaults={"deleted": True, "dirty_bit": True}, id=self.ident ) self.mc.deserialize_from_store() self.assertFalse(Facility.objects.filter(id=self.ident).exists()) def test_update_app_with_newer_data_from_store(self): name = "test" fac = FacilityModelFactory(id=self.ident, name=name) fac.save(update_dirty_bit_to=False) self.assertEqual(fac.name, name) self.mc.deserialize_from_store() fac = Facility.objects.get(id=self.ident) self.assertNotEqual(fac.name, name) def test_handle_extra_field_deserialization(self): # modify a store record by adding extra serialized field store_model = Store.objects.get(id=self.ident) serialized = json.loads(store_model.serialized) serialized.update({"wacky": True}) store_model.serialized = json.dumps(serialized) store_model.save() # deserialize records self.mc.deserialize_from_store() # by this point no errors should have occurred but we check list of fields anyways fac = Facility.objects.get(id=self.ident) self.assertNotIn("wacky", fac.__dict__) def test_store_dirty_bit_resets(self): self.assertTrue(Store.objects.filter(dirty_bit=True)) self.mc.deserialize_from_store() self.assertFalse(Store.objects.filter(dirty_bit=True)) def test_record_with_dirty_bit_off_doesnt_deserialize(self): st = Store.objects.first() st.dirty_bit = False st.save() self.mc.deserialize_from_store() self.assertFalse(Facility.objects.filter(id=st.id).exists()) def test_broken_fk_leaves_store_dirty_bit(self): serialized = """{"user_id": "40de9a3fded95d7198f200c78e559353", "id": "bd205b5ee5bc42da85925d24c61341a8"}""" st = StoreModelFacilityFactory( id=uuid.uuid4().hex, serialized=serialized, model_name="contentsummarylog" ) self.mc.deserialize_from_store() st.refresh_from_db() self.assertTrue(st.dirty_bit) def test_invalid_model_leaves_store_dirty_bit(self): user = MyUser(username="******" * 21) st = StoreModelFacilityFactory( model_name="user", id=uuid.uuid4().hex, serialized=json.dumps(user.serialize()), ) self.mc.deserialize_from_store() st.refresh_from_db() self.assertTrue(st.dirty_bit) def test_deleted_model_propagates_to_store_record(self): """ It could be the case that we have two store records, one that is deleted and the other that has a fk pointing to the deleted record. When we deserialize, we want to ensure that the record with the fk pointer also gets the deleted flag set, while also not deserializing the data into a model. """ # user will be deleted user = MyUser(username="******") user.save(update_dirty_bit_to=False) # log may be synced in from other device log = SummaryLog(user_id=user.id) log.id = log.calculate_uuid() StoreModelFacilityFactory( model_name="user", id=user.id, serialized=json.dumps(user.serialize()), deleted=True, ) StoreModelFacilityFactory( model_name="contentsummarylog", id=log.id, serialized=json.dumps(log.serialize()), ) # make sure delete propagates to store due to deleted foreign key self.mc.deserialize_from_store() # have to serialize to update deleted models self.mc.serialize_into_store() self.assertFalse(SummaryLog.objects.filter(id=log.id).exists()) self.assertTrue(Store.objects.get(id=log.id).deleted) def test_hard_deleted_model_propagates_to_store_record(self): """ It could be the case that we have two store records, one that is hard deleted and the other that has a fk pointing to the hard deleted record. When we deserialize, we want to ensure that the record with the fk pointer also gets the hard deleted flag set, while also not deserializing the data into a model. """ # user will be deleted user = MyUser(username="******") user.save(update_dirty_bit_to=False) # log may be synced in from other device log = SummaryLog(user_id=user.id) log.id = log.calculate_uuid() StoreModelFacilityFactory( model_name="user", id=user.id, serialized=json.dumps(user.serialize()), deleted=True, hard_deleted=True, ) StoreModelFacilityFactory( model_name="contentsummarylog", id=log.id, serialized=json.dumps(log.serialize()), ) # make sure delete propagates to store due to deleted foreign key self.mc.deserialize_from_store() # have to serialize to update deleted models self.mc.serialize_into_store() self.assertFalse(SummaryLog.objects.filter(id=log.id).exists()) self.assertTrue(Store.objects.get(id=log.id).hard_deleted) def _create_two_users_to_deserialize(self): user = MyUser(username="******", password="******") user2 = MyUser(username="******", password="******") user.save() user2.save() self.mc.serialize_into_store() user.username = "******" user2.username = "******" Store.objects.filter(id=user.id).update(serialized=json.dumps(user.serialize()), dirty_bit=True) Store.objects.filter(id=user2.id).update(serialized=json.dumps(user2.serialize()), dirty_bit=True) return user, user2 def test_regular_model_deserialization(self): # deserialization should be able to handle multiple records user, user2 = self._create_two_users_to_deserialize() self.mc.deserialize_from_store() self.assertFalse(MyUser.objects.filter(username="******").exists()) self.assertFalse(MyUser.objects.filter(username="******").exists()) self.assertTrue(MyUser.objects.filter(username="******").exists()) self.assertTrue(MyUser.objects.filter(username="******").exists()) def test_filtered_deserialization(self): # filtered deserialization only impacts specific records user, user2 = self._create_two_users_to_deserialize() self.mc.deserialize_from_store(filter=Filter(user._morango_partition)) self.assertFalse(MyUser.objects.filter(username="******").exists()) self.assertTrue(MyUser.objects.filter(username="******").exists()) self.assertTrue(MyUser.objects.filter(username="******").exists()) self.assertFalse(MyUser.objects.filter(username="******").exists())
def setUp(self): (self.current_id, _) = InstanceIDModel.get_or_create_current_instance() self.mc = MorangoProfileController("facilitydata") self.fac1 = FacilityModelFactory(name="school") self.mc.serialize_into_store() self.old_rmc = RecordMaxCounter.objects.first()
class RecordMaxCounterUpdatesDuringSerialization(TestCase): def setUp(self): (self.current_id, _) = InstanceIDModel.get_or_create_current_instance() self.mc = MorangoProfileController("facilitydata") self.fac1 = FacilityModelFactory(name="school") self.mc.serialize_into_store() self.old_rmc = RecordMaxCounter.objects.first() def test_new_rmc_for_existing_model(self): with EnvironmentVarGuard() as env: env['MORANGO_SYSTEM_ID'] = 'new_sys_id' (new_id, _) = InstanceIDModel.get_or_create_current_instance(clear_cache=True) Facility.objects.update(name="facility") self.mc.serialize_into_store() new_rmc = RecordMaxCounter.objects.get( instance_id=new_id.id, store_model_id=self.fac1.id ) new_store_record = Store.objects.get(id=self.fac1.id) self.assertEqual(new_rmc.counter, new_store_record.last_saved_counter) self.assertEqual(new_rmc.instance_id, new_store_record.last_saved_instance) def test_update_rmc_for_existing_model(self): Facility.objects.update(name="facility") self.mc.serialize_into_store() # there should only be 1 RecordMaxCounter for a specific instance_id and a specific model (unique_together) self.assertEqual( RecordMaxCounter.objects.filter( instance_id=self.current_id.id, store_model_id=self.fac1.id ).count(), 1, ) new_rmc = RecordMaxCounter.objects.get( instance_id=self.current_id.id, store_model_id=self.fac1.id ) new_store_record = Store.objects.get(id=self.fac1.id) self.assertEqual(self.old_rmc.counter + 1, new_rmc.counter) self.assertEqual(new_rmc.counter, new_store_record.last_saved_counter) self.assertEqual(new_rmc.instance_id, new_store_record.last_saved_instance) def test_new_rmc_for_non_existent_model(self): with EnvironmentVarGuard() as env: env['MORANGO_SYSTEM_ID'] = 'new_sys_id' (new_id, _) = InstanceIDModel.get_or_create_current_instance(clear_cache=True) new_fac = FacilityModelFactory(name="college") self.mc.serialize_into_store() new_rmc = RecordMaxCounter.objects.get( instance_id=new_id.id, store_model_id=new_fac.id ) new_store_record = Store.objects.get(id=new_fac.id) self.assertNotEqual(new_id.id, self.current_id.id) self.assertEqual(new_store_record.last_saved_instance, new_rmc.instance_id) self.assertEqual(new_store_record.last_saved_counter, new_rmc.counter)
class NetworkSyncConnectionTestCase(LiveServerTestCase): def setUp(self): self.profile = "facilitydata" self.root_scope_def = ScopeDefinition.objects.create( id="rootcert", profile=self.profile, version=1, primary_scope_param_key="mainpartition", description="Root cert for ${mainpartition}.", read_filter_template="", write_filter_template="", read_write_filter_template="${mainpartition}", ) self.subset_scope_def = ScopeDefinition.objects.create( id="subcert", profile=self.profile, version=1, primary_scope_param_key="", description= "Subset cert under ${mainpartition} for ${subpartition}.", read_filter_template="${mainpartition}", write_filter_template="${mainpartition}:${subpartition}", read_write_filter_template="", ) self.root_cert = Certificate.generate_root_certificate( self.root_scope_def.id) self.subset_cert = Certificate( parent=self.root_cert, profile=self.profile, scope_definition=self.subset_scope_def, scope_version=self.subset_scope_def.version, scope_params=json.dumps({ "mainpartition": self.root_cert.id, "subpartition": "abracadabra" }), private_key=Key(), ) self.root_cert.sign_certificate(self.subset_cert) self.subset_cert.save() self.unsaved_cert = Certificate( parent=self.root_cert, profile=self.profile, scope_definition=self.subset_scope_def, scope_version=self.subset_scope_def.version, scope_params=json.dumps({ "mainpartition": self.root_cert.id, "subpartition": "other" }), public_key=Key(), ) self.root_cert.sign_certificate(self.unsaved_cert) self.controller = MorangoProfileController("facilitydata") self.network_connection = self.controller.create_network_connection( self.live_server_url) self.key = SharedKey.get_or_create_shared_key() @mock.patch.object(SyncSession.objects, "create", return_value=None) def test_creating_sync_session_successful(self, mock_object): self.assertEqual(SyncSession.objects.filter(active=True).count(), 0) self.network_connection.create_sync_session(self.subset_cert, self.root_cert) self.assertEqual(SyncSession.objects.filter(active=True).count(), 1) @mock.patch.object(NetworkSyncConnection, "_create_sync_session") @mock.patch.object(Certificate, "verify", return_value=False) def test_creating_sync_session_cert_fails_to_verify( self, mock_verify, mock_create): mock_create.return_value.json.return_value = {} with self.assertRaises(CertificateSignatureInvalid): self.network_connection.create_sync_session( self.subset_cert, self.root_cert) def test_get_remote_certs(self): certs = self.subset_cert.get_ancestors(include_self=True) remote_certs = self.network_connection.get_remote_certificates( self.root_cert.id) self.assertSetEqual(set(certs), set(remote_certs)) @mock.patch.object(SessionWrapper, "request") def test_csr(self, mock_request): # mock a "signed" cert being returned by server cert_serialized = json.dumps( CertificateSerializer(self.subset_cert).data) mock_request.return_value.json.return_value = json.loads( cert_serialized) self.subset_cert.delete() # we only want to make sure the "signed" cert is saved with mock.patch.object( Key, "get_private_key_string", return_value=self.subset_cert.private_key. get_private_key_string(), ): self.network_connection.certificate_signing_request( self.root_cert, "", "") self.assertTrue( Certificate.objects.filter( id=json.loads(cert_serialized)["id"]).exists()) @override_settings(ALLOW_CERTIFICATE_PUSHING=True) def test_push_signed_client_certificate_chain(self): self.network_connection.capabilities = [ALLOW_CERTIFICATE_PUSHING] cert = self.network_connection.push_signed_client_certificate_chain( self.root_cert, self.subset_scope_def.id, { "mainpartition": self.root_cert.id, "subpartition": "abracadabra" }, ) self.assertEqual(cert.private_key, None) self.assertTrue(Certificate.objects.filter(id=cert.id).exists()) @override_settings(ALLOW_CERTIFICATE_PUSHING=True) def test_push_signed_client_certificate_chain_publickey_error(self): self.network_connection.capabilities = [ALLOW_CERTIFICATE_PUSHING] with mock.patch.object(NetworkSyncConnection, "_get_public_key"): NetworkSyncConnection._get_public_key.return_value.json.return_value = [ { "public_key": Key().get_public_key_string() } ] with self.assertRaises(HTTPError) as e: self.network_connection.push_signed_client_certificate_chain( self.root_cert, self.subset_scope_def.id, { "mainpartition": self.root_cert.id, "subpartition": "abracadabra" }, ) self.assertEqual(e.exception.response.status_code, 400) @override_settings(ALLOW_CERTIFICATE_PUSHING=True) def test_push_signed_client_certificate_chain_bad_cert(self): self.network_connection.capabilities = [ALLOW_CERTIFICATE_PUSHING] with self.assertRaises(HTTPError) as e: self.network_connection.push_signed_client_certificate_chain( self.root_cert, self.subset_scope_def.id, {"bad": "scope_params"}) self.assertEqual(e.exception.response.status_code, 400) @override_settings(ALLOW_CERTIFICATE_PUSHING=True) @mock.patch.object(NetworkSyncConnection, "_get_nonce") def test_push_signed_client_certificate_chain_nonce_error( self, mock_nonce): self.network_connection.capabilities = [ALLOW_CERTIFICATE_PUSHING] mock_nonce.return_value.json.return_value = {"id": uuid.uuid4().hex} with self.assertRaises(HTTPError) as e: self.network_connection.push_signed_client_certificate_chain( self.root_cert, self.subset_scope_def.id, { "mainpartition": self.root_cert.id, "subpartition": "abracadabra" }, ) self.assertEqual(e.exception.response.status_code, 403) def test_push_signed_client_certificate_chain_not_allowed(self): with self.assertRaises(MorangoServerDoesNotAllowNewCertPush) as e: self.network_connection.push_signed_client_certificate_chain( self.root_cert, self.subset_scope_def.id, { "mainpartition": self.root_cert.id, "subpartition": "abracadabra" }, ) self.assertEqual(e.exception.response.status_code, 403) def test_get_cert_chain(self): response = self.network_connection._get_certificate_chain( params={"ancestors_of": self.subset_cert.id}) data = response.json() self.assertEqual(len(data), Certificate.objects.count()) self.assertEqual(data[0]["id"], self.root_cert.id) self.assertEqual(data[1]["id"], self.subset_cert.id) @mock.patch.object(SyncSession.objects, "create") def test_close_sync_session(self, mock_create): mock_session = mock.Mock(spec=SyncSession) def create(**data): mock_session.id = data.get("id") return mock_session mock_create.side_effect = create self.assertEqual(SyncSession.objects.filter(active=True).count(), 0) client = self.network_connection.create_sync_session( self.subset_cert, self.root_cert) self.assertEqual(SyncSession.objects.filter(active=True).count(), 1) self.network_connection.close_sync_session(client.sync_session) self.assertEqual(SyncSession.objects.filter(active=True).count(), 0)
def setUp(self): self.controller = MorangoProfileController(PROFILE_FACILITY_DATA) InstanceIDModel.get_or_create_current_instance()
class SerializeIntoStoreTestCase(TestCase): def setUp(self): InstanceIDModel.get_or_create_current_instance() self.range = 10 self.mc = MorangoProfileController("facilitydata") self.original_name = "ralphie" self.new_name = "rafael" def test_all_models_get_serialized(self): [FacilityModelFactory() for _ in range(self.range)] self.mc.serialize_into_store() self.assertEqual(len(Store.objects.all()), self.range) def test_no_models_get_serialized(self): # set dirty bit off on new models created [ FacilityModelFactory.build().save(update_dirty_bit_to=False) for _ in range(self.range) ] # only models with dirty bit on should be serialized self.mc.serialize_into_store() self.assertFalse(Store.objects.exists()) def test_dirty_bit_gets_set(self): [FacilityModelFactory() for _ in range(self.range)] # dirty bit should be on for facility in Facility.objects.all(): self.assertTrue(facility._morango_dirty_bit) self.mc.serialize_into_store() # dirty bit should have been toggled off for facility in Facility.objects.all(): self.assertFalse(facility._morango_dirty_bit) def test_store_models_get_updated(self): FacilityModelFactory(name=self.original_name) self.mc.serialize_into_store() store_facility = Store.objects.first() deserialized_model = json.loads(store_facility.serialized) self.assertEqual(deserialized_model["name"], self.original_name) Facility.objects.update(name=self.new_name) self.mc.serialize_into_store() store_facility = Store.objects.first() deserialized_model = json.loads(store_facility.serialized) self.assertEqual(deserialized_model["name"], self.new_name) def test_last_saved_counter_updates(self): FacilityModelFactory(name=self.original_name) self.mc.serialize_into_store() old_counter = Store.objects.first().last_saved_counter Facility.objects.all().update(name=self.new_name) self.mc.serialize_into_store() new_counter = Store.objects.first().last_saved_counter self.assertEqual(old_counter + 1, new_counter) def test_last_saved_instance_updates(self): FacilityModelFactory(name=self.original_name) self.mc.serialize_into_store() old_instance_id = Store.objects.first().last_saved_instance with EnvironmentVarGuard() as env: env['MORANGO_SYSTEM_ID'] = 'new_sys_id' (new_id, _) = InstanceIDModel.get_or_create_current_instance(clear_cache=True) Facility.objects.all().update(name=self.new_name) self.mc.serialize_into_store() new_instance_id = Store.objects.first().last_saved_instance self.assertNotEqual(old_instance_id, new_instance_id) self.assertEqual(new_instance_id, new_id.id) def test_extra_fields_dont_get_overwritten(self): serialized = """{"username": "******", "height": 6.0, "weight": 100}""" MyUser.objects.create(username="******") self.mc.serialize_into_store() Store.objects.update(serialized=serialized) MyUser.objects.update(username="******") self.mc.serialize_into_store() serialized = json.loads(Store.objects.first().serialized) self.assertIn("height", serialized) def test_updates_store_deleted_flag(self): fac = FacilityModelFactory() fac_id = fac.id self.mc.serialize_into_store() self.assertFalse(Store.objects.get(pk=fac_id).deleted) fac.delete() self.assertTrue(DeletedModels.objects.exists()) self.mc.serialize_into_store() self.assertFalse(DeletedModels.objects.exists()) self.assertTrue(Store.objects.get(pk=fac_id).deleted) def test_cascading_delete_updates_store_deleted_flag(self): fac = FacilityModelFactory() child = FacilityModelFactory(parent_id=fac.id) child_id = child.id self.mc.serialize_into_store() self.assertFalse(Store.objects.get(pk=child_id).deleted) fac.delete() self.mc.serialize_into_store() self.assertTrue(Store.objects.get(pk=child_id).deleted) def test_conflicting_data_appended(self): self.maxDiff = None serialized = json.dumps({"username": "******"}) conflicting = [] user = MyUser.objects.create(username="******") self.mc.serialize_into_store() # add serialized fields to conflicting data conflicting.insert(0, serialized) conflicting.insert(0, json.dumps(user.serialize())) # set store record and app record dirty bits to true to force serialization merge conflict Store.objects.update(conflicting_serialized_data=serialized, dirty_bit=True) user.username = "******" user.save(update_dirty_bit_to=True) self.mc.serialize_into_store() # assert we have placed serialized object into store's serialized field st = Store.objects.get(id=user.id) self.assertEqual(json.loads(st.serialized), user.serialize()) # assert store serialized field is moved to conflicting data conflicting_serialized_data = st.conflicting_serialized_data.split("\n") for x in range(len(conflicting)): self.assertEqual(conflicting[x], conflicting_serialized_data[x]) def test_filtered_serialization_single_filter(self): fac = FacilityModelFactory() user = MyUser.objects.create(username="******") log = SummaryLog.objects.create(user=user) self.mc.serialize_into_store(filter=Filter(user._morango_partition)) self.assertFalse(Store.objects.filter(id=fac.id).exists()) self.assertTrue(Store.objects.filter(id=user.id).exists()) self.assertTrue(Store.objects.filter(id=log.id).exists()) def test_filtered_serialization_multiple_filter(self): fac = FacilityModelFactory() user = MyUser.objects.create(username="******") user2 = MyUser.objects.create(username="******") log = SummaryLog.objects.create(user=user) self.mc.serialize_into_store( filter=Filter(user._morango_partition + "\n" + user2._morango_partition) ) self.assertFalse(Store.objects.filter(id=fac.id).exists()) self.assertTrue(Store.objects.filter(id=user2.id).exists()) self.assertTrue(Store.objects.filter(id=user.id).exists()) self.assertTrue(Store.objects.filter(id=log.id).exists()) def test_self_ref_fk_class_adds_value_to_store(self): root = FacilityModelFactory() child = FacilityModelFactory(parent=root) self.mc.serialize_into_store() self.assertEqual(Store.objects.get(id=child.id)._self_ref_fk, root.id) def test_regular_class_leaves_value_blank_in_store(self): log = SummaryLog.objects.create(user=MyUser.objects.create(username="******")) self.mc.serialize_into_store() self.assertEqual(Store.objects.get(id=log.id)._self_ref_fk, "") def test_previously_deleted_store_flag_resets(self): # create and delete object user = MyUser.objects.create(username="******") user_id = user.id self.mc.serialize_into_store() MyUser.objects.all().delete() self.mc.serialize_into_store() self.assertTrue(Store.objects.get(id=user_id).deleted) # recreate object with same id user = MyUser.objects.create(username="******") # ensure deleted flag is updated after recreation self.mc.serialize_into_store() self.assertFalse(Store.objects.get(id=user_id).deleted) def test_previously_hard_deleted_store_flag_resets(self): # create and delete object user = MyUser.objects.create(username="******") user_id = user.id self.mc.serialize_into_store() user.delete(hard_delete=True) self.mc.serialize_into_store() self.assertTrue(Store.objects.get(id=user_id).hard_deleted) # recreate object with same id user = MyUser.objects.create(username="******") # ensure hard deleted flag is updated after recreation self.mc.serialize_into_store() self.assertFalse(Store.objects.get(id=user_id).hard_deleted) def test_hard_delete_wipes_serialized(self): user = MyUser.objects.create(username="******") log = SummaryLog.objects.create(user=user) self.mc.serialize_into_store() Store.objects.update(conflicting_serialized_data="store") st = Store.objects.get(id=log.id) self.assertNotEqual(st.serialized, "") self.assertNotEqual(st.conflicting_serialized_data, "") user.delete(hard_delete=True) # cascade hard delete self.mc.serialize_into_store() st.refresh_from_db() self.assertEqual(st.serialized, "{}") self.assertEqual(st.conflicting_serialized_data, "") def test_in_app_hard_delete_propagates(self): user = MyUser.objects.create(username="******") log_id = uuid.uuid4().hex log = SummaryLog(user=user, id=log_id) StoreModelFacilityFactory( model_name="user", id=user.id, serialized=json.dumps(user.serialize()) ) store_log = StoreModelFacilityFactory( model_name="contentsummarylog", id=log.id, serialized=json.dumps(log.serialize()), ) user.delete(hard_delete=True) # preps log to be hard_deleted self.mc.deserialize_from_store() # updates store log to be hard_deleted self.mc.serialize_into_store() store_log.refresh_from_db() self.assertTrue(store_log.hard_deleted) self.assertEqual(store_log.serialized, "{}") def test_store_hard_delete_propagates(self): user = MyUser(username="******") user.save(update_dirty_bit_to=False) log = SummaryLog(user=user) log.save(update_dirty_bit_to=False) StoreModelFacilityFactory( model_name="user", id=user.id, serialized=json.dumps(user.serialize()), hard_deleted=True, deleted=True, ) # make sure hard_deleted propagates to related models even if they are not hard_deleted self.mc.deserialize_from_store() self.assertTrue(HardDeletedModels.objects.filter(id=log.id).exists())