def test_pull_client__initialize(self, mock_parent_create): client = self.build_client(PullClient) sync_filter = Filter("abc123") client.initialize(sync_filter) self.assertEqual(sync_filter, client.sync_filter) mock_parent_create.assert_called_with(sync_filter, push=False)
def test_push_client__initialize( self, mock_settings, mock_serialize, mock_parent_create, mock_queue, mock_transfer_update, ): mock_handler = mock.Mock() client = self.build_client(PushClient) client.signals.queuing.connect(mock_handler) sync_filter = Filter("abc123") setattr(mock_settings, "MORANGO_SERIALIZE_BEFORE_QUEUING", True) client.initialize(sync_filter) mock_serialize.assert_called_with(self.session.profile, filter=sync_filter) mock_parent_create.assert_called_with(sync_filter, push=True) mock_queue.assert_called_with(client.current_transfer_session) mock_transfer_update.assert_called_once_with( {"records_total": client.current_transfer_session.records_total}, client.current_transfer_session, ) mock_handler.assert_any_call( transfer_session=client.current_transfer_session)
def test_filtered_deserialization(self): # filtered deserialization only impacts specific records user, user2 = self._create_two_users_to_deserialize() self.mc.deserialize_from_store(filter=Filter(user._morango_partition)) self.assertFalse(MyUser.objects.filter(username="******").exists()) self.assertTrue(MyUser.objects.filter(username="******").exists()) self.assertTrue(MyUser.objects.filter(username="******").exists()) self.assertFalse(MyUser.objects.filter(username="******").exists())
def test_filtered_serialization_single_filter(self): fac = FacilityModelFactory() user = MyUser.objects.create(username="******") log = SummaryLog.objects.create(user=user) self.mc.serialize_into_store(filter=Filter(user._morango_partition)) self.assertFalse(Store.objects.filter(id=fac.id).exists()) self.assertTrue(Store.objects.filter(id=user.id).exists()) self.assertTrue(Store.objects.filter(id=log.id).exists())
def test_no_fsics_get_updated(self): client_fsic = {'a' * 32: 1, 'b' * 32: 1, 'c' * 32: 1} server_fsic = {'a' * 32: 2, 'b' * 32: 2, 'c' * 32: 2} self.assertFalse(DatabaseMaxCounter.objects.filter(counter=1).exists()) for instance_id, counter in iteritems(server_fsic): DatabaseMaxCounter.objects.create(instance_id=instance_id, counter=counter, partition=self.filter) DatabaseMaxCounter.update_fsics(client_fsic, Filter(self.filter)) self.assertFalse(DatabaseMaxCounter.objects.filter(counter=1).exists())
def test_pull_client__finalize(self, mock_dequeue, mock_close): mock_handler = mock.Mock() client = self.build_client(PullClient) client.signals.dequeuing.connect(mock_handler) client.sync_filter = Filter("abc123") client.current_transfer_session.server_fsic = "{}" client.finalize() mock_dequeue.assert_called_with(client.current_transfer_session) mock_handler.assert_any_call( transfer_session=client.current_transfer_session) mock_close.assert_called_once()
def test_initiate_push(self, MockPushClient): """ TODO: should eventually be removed as this method is deprecated """ mock_pull_client = mock.Mock(spec=PushClient) MockPushClient.return_value = mock_pull_client client = self.build_client(SyncSessionClient) sync_filter = Filter("abc123") client.initiate_push(sync_filter) MockPushClient.assert_called_with(self.conn, self.session, chunk_size=self.chunk_size) mock_pull_client.initialize.assert_called_once_with(sync_filter) mock_pull_client.run.assert_called_once() mock_pull_client.finalize.assert_called_once() self.assertEqual(client.signals, mock_pull_client.signals)
def test_all_partitions_have_all_instances(self): fmcs = DatabaseMaxCounter.calculate_filter_max_counters( Filter(self.user_prefix_a + "\n" + self.user2_prefix_b)) self.assertEqual(fmcs[self.instance_a], 17) self.assertEqual(fmcs[self.instance_b], 10)
def test_single_partition_with_all_instances(self): fmcs = DatabaseMaxCounter.calculate_filter_max_counters( Filter(self.user_prefix_a)) self.assertEqual(fmcs[self.instance_a], 20) self.assertEqual(fmcs[self.instance_b], 10)
def test_insufficient_instances_for_all_partitions(self): user_with_prefix = self.prefix_b + "user_id:richard" fmcs = DatabaseMaxCounter.calculate_filter_max_counters( Filter(self.prefix_a + "\n" + user_with_prefix)) self.assertFalse(fmcs)
def test_instances_for_one_partition_but_not_other(self): fmcs = DatabaseMaxCounter.calculate_filter_max_counters( Filter(self.user_prefix_a + "\n" + self.user_prefix_b)) self.assertEqual(fmcs[self.instance_b], 10)
def test_filter_not_in_dmc(self): fmcs = DatabaseMaxCounter.calculate_filter_max_counters(Filter("ZZZ")) self.assertEqual(fmcs, {})
def _queue_into_buffer(transfersession): """ Takes a chunk of data from the store to be put into the buffer to be sent to another morango instance. ALGORITHM: We do Filter Specific Instance Counter arithmetic to get our newest data compared to the server's older data. We use raw sql queries to place data in the buffer and the record max counter buffer, which matches the conditions of the FSIC, as well as the partition for the data we are syncing. """ last_saved_by_conditions = [] filter_prefixes = Filter(transfersession.filter) server_fsic = json.loads(transfersession.server_fsic) client_fsic = json.loads(transfersession.client_fsic) if transfersession.push: fsics = _fsic_queuing_calc(client_fsic, server_fsic) else: fsics = _fsic_queuing_calc(server_fsic, client_fsic) # if fsics are identical or receiving end has newer data, then there is nothing to queue if not fsics: return # create condition for all push FSICs where instance_ids are equal, but internal counters are higher than FSICs counters for instance, counter in six.iteritems(fsics): last_saved_by_conditions += [ "(last_saved_instance = '{0}' AND last_saved_counter > {1})". format(instance, counter) ] if fsics: last_saved_by_conditions = [ _join_with_logical_operator(last_saved_by_conditions, "OR") ] partition_conditions = [] # create condition for filtering by partitions for prefix in filter_prefixes: partition_conditions += ["partition LIKE '{}%'".format(prefix)] if filter_prefixes: partition_conditions = [ _join_with_logical_operator(partition_conditions, "OR") ] # combine conditions fsic_and_partition_conditions = _join_with_logical_operator( last_saved_by_conditions + partition_conditions, "AND") # filter by profile where_condition = _join_with_logical_operator( [ fsic_and_partition_conditions, "profile = '{}'".format(transfersession.sync_session.profile), ], "AND", ) # execute raw sql to take all records that match condition, to be put into buffer for transfer with connection.cursor() as cursor: queue_buffer = """INSERT INTO {outgoing_buffer} (model_uuid, serialized, deleted, last_saved_instance, last_saved_counter, hard_deleted, model_name, profile, partition, source_id, conflicting_serialized_data, transfer_session_id, _self_ref_fk) SELECT id, serialized, deleted, last_saved_instance, last_saved_counter, hard_deleted, model_name, profile, partition, source_id, conflicting_serialized_data, '{transfer_session_id}', _self_ref_fk FROM {store} WHERE {condition}""".format( outgoing_buffer=Buffer._meta.db_table, transfer_session_id=transfersession.id, condition=where_condition, store=Store._meta.db_table, ) cursor.execute(queue_buffer) # take all record max counters that are foreign keyed onto store models, which were queued into the buffer queue_rmc_buffer = """INSERT INTO {outgoing_rmcb} (instance_id, counter, transfer_session_id, model_uuid) SELECT instance_id, counter, '{transfer_session_id}', store_model_id FROM {record_max_counter} AS rmc INNER JOIN {outgoing_buffer} AS buffer ON rmc.store_model_id = buffer.model_uuid WHERE buffer.transfer_session_id = '{transfer_session_id}' """.format( outgoing_rmcb=RecordMaxCounterBuffer._meta.db_table, transfer_session_id=transfersession.id, record_max_counter=RecordMaxCounter._meta.db_table, outgoing_buffer=Buffer._meta.db_table, ) cursor.execute(queue_rmc_buffer)