def _bulk_update_events(self, cursor, events, statement_base): event_ids = [event[0] for event in events] # split into multiple queries as SQLite has a maximum 1000 variables per query for event_ids_subset in ichunked(event_ids, constants.DATABASE_EVENT_CHUNK_SIZE): statement = statement_base % ','.join('?' * len(event_ids_subset)) cursor.execute(statement, event_ids_subset)
def test_empty_sequence(self): chunk_size = 5 test_sequence = [] # keep results for assertions iterations = 0 iterated_elements = [] # test for sequence_subset in ichunked(test_sequence, chunk_size): iterations += 1 iterated_elements.extend(sequence_subset) self.assertLessEqual(len(sequence_subset), chunk_size) expected_iterations = 0 self.assertListEqual(iterated_elements, test_sequence) self.assertEqual(iterations, expected_iterations)
def _test_chunking(self, chunk_size, chunk_iterations): # test data random_extra_chunk_size = randint(0, chunk_size - 1) test_sequence_size = chunk_size * chunk_iterations + random_extra_chunk_size test_sequence = list(range(test_sequence_size)) # keep results for assertions iterations = 0 iterated_elements = [] # test for sequence_subset in ichunked(test_sequence, chunk_size): iterations += 1 iterated_elements.extend(sequence_subset) self.assertLessEqual(len(sequence_subset), chunk_size) expected_iterations = chunk_iterations if random_extra_chunk_size > 0: expected_iterations += 1 # add 1 because of 'random_extra_chunk_size' self.assertListEqual(iterated_elements, test_sequence) self.assertEqual(iterations, expected_iterations)
def send(self, events, use_logging=False): client = pylogbeat.PyLogBeatClient(use_logging=use_logging, **self._client_arguments) with client: for events_subset in ichunked(events, self._batch_size): client.send(events_subset)