예제 #1
0
 def setUp(self):
     self.ticks_between_batches = 1
     self.tup_dicts = [{
         'id': 14,
         'comp': 'some_spout',
         'stream': 'default',
         'task': 'some_bolt',
         'tuple': [1, 2, 3]
     }, {
         'id': 15,
         'comp': 'some_spout',
         'stream': 'default',
         'task': 'some_bolt',
         'tuple': [4, 5, 6]
     }, {
         'id': None,
         'comp': '__system',
         'stream': '__tick',
         'task': -1,
         'tuple': [1]
     }, {
         'id': 16,
         'comp': 'some_spout',
         'stream': 'default',
         'task': 'some_bolt',
         'tuple': [7, 8, 9]
     }, {
         'id': None,
         'comp': '__system',
         'stream': '__tick',
         'task': -1,
         'tuple': [2]
     }]
     tups_json = '\nend\n'.join(
         [json.dumps(tup_dict) for tup_dict in self.tup_dicts] + [''])
     self.tups = [
         Tuple(tup_dict['id'],
               tup_dict['comp'], tup_dict['stream'], tup_dict['task'],
               tuple(tup_dict['tuple'])) for tup_dict in self.tup_dicts
     ]
     self.nontick_tups = [
         tup for tup in self.tups if tup.stream != '__tick'
     ]
     self.bolt = BatchingBolt(input_stream=BytesIO(
         tups_json.encode('utf-8')),
                              output_stream=BytesIO())
     self.bolt.initialize({}, {})
예제 #2
0
 def setUp(self):
     self.ticks_between_batches = 1
     self.tup_dicts = [
         {"id": 14, "comp": "some_spout", "stream": "default", "task": "some_bolt", "tuple": [1, 2, 3]},
         {"id": 15, "comp": "some_spout", "stream": "default", "task": "some_bolt", "tuple": [4, 5, 6]},
         {"id": None, "comp": "__system", "stream": "__tick", "task": -1, "tuple": [1]},
         {"id": 16, "comp": "some_spout", "stream": "default", "task": "some_bolt", "tuple": [7, 8, 9]},
         {"id": None, "comp": "__system", "stream": "__tick", "task": -1, "tuple": [2]},
     ]
     tups_json = "\nend\n".join([json.dumps(tup_dict) for tup_dict in self.tup_dicts] + [""])
     self.tups = [
         Tuple(tup_dict["id"], tup_dict["comp"], tup_dict["stream"], tup_dict["task"], tuple(tup_dict["tuple"]))
         for tup_dict in self.tup_dicts
     ]
     self.nontick_tups = [tup for tup in self.tups if tup.stream != "__tick"]
     self.bolt = BatchingBolt(input_stream=BytesIO(tups_json.encode("utf-8")), output_stream=BytesIO())
     self.bolt.initialize({}, {})
예제 #3
0
 def setUp(self):
     self.ticks_between_batches = 1
     self.tup_dicts = [{'id': 14,
                        'comp': 'some_spout',
                        'stream': 'default',
                        'task': 'some_bolt',
                        'tuple': [1, 2, 3]},
                       {'id': 15,
                        'comp': 'some_spout',
                        'stream': 'default',
                        'task': 'some_bolt',
                        'tuple': [4, 5, 6]},
                       {'id': None,
                        'comp': '__system',
                        'stream': '__tick',
                        'task': -1,
                        'tuple': [1]},
                       {'id': 16,
                        'comp': 'some_spout',
                        'stream': 'default',
                        'task': 'some_bolt',
                        'tuple': [7, 8, 9]},
                       {'id': None,
                        'comp': '__system',
                        'stream': '__tick',
                        'task': -1,
                        'tuple': [2]}]
     tups_json = '\nend\n'.join([json.dumps(tup_dict) for tup_dict in
                                 self.tup_dicts] + [''])
     self.tups = [Tuple(tup_dict['id'], tup_dict['comp'], tup_dict['stream'],
                        tup_dict['task'], tup_dict['tuple']) for tup_dict in
                  self.tup_dicts]
     self.nontick_tups = [tup for tup in self.tups if tup.stream != '__tick']
     self.bolt = BatchingBolt(input_stream=BytesIO(tups_json.encode('utf-8')),
                              output_stream=BytesIO())
     self.bolt.initialize({}, {})
예제 #4
0
class BatchingBoltTests(unittest.TestCase):
    def setUp(self):
        self.ticks_between_batches = 1
        self.tup_dicts = [{
            'id': 14,
            'comp': 'some_spout',
            'stream': 'default',
            'task': 'some_bolt',
            'tuple': [1, 2, 3]
        }, {
            'id': 15,
            'comp': 'some_spout',
            'stream': 'default',
            'task': 'some_bolt',
            'tuple': [4, 5, 6]
        }, {
            'id': None,
            'comp': '__system',
            'stream': '__tick',
            'task': -1,
            'tuple': [1]
        }, {
            'id': 16,
            'comp': 'some_spout',
            'stream': 'default',
            'task': 'some_bolt',
            'tuple': [7, 8, 9]
        }, {
            'id': None,
            'comp': '__system',
            'stream': '__tick',
            'task': -1,
            'tuple': [2]
        }]
        tups_json = '\nend\n'.join(
            [json.dumps(tup_dict) for tup_dict in self.tup_dicts] + [''])
        self.tups = [
            Tuple(tup_dict['id'],
                  tup_dict['comp'], tup_dict['stream'], tup_dict['task'],
                  tuple(tup_dict['tuple'])) for tup_dict in self.tup_dicts
        ]
        self.nontick_tups = [
            tup for tup in self.tups if tup.stream != '__tick'
        ]
        self.bolt = BatchingBolt(input_stream=BytesIO(
            tups_json.encode('utf-8')),
                                 output_stream=BytesIO())
        self.bolt.initialize({}, {})

    @patch.object(BatchingBolt, 'process_batch', autospec=True)
    def test_batching(self, process_batch_mock):
        # Add a bunch of Tuples
        for __ in self.tups:
            self.bolt._run()

        process_batch_mock.assert_called_with(self.bolt, None,
                                              self.nontick_tups)

    @patch.object(BatchingBolt, 'process_batch', autospec=True)
    def test_group_key(self, process_batch_mock):
        # Change the group key to even/odd grouping
        self.bolt.group_key = lambda t: sum(t.values) % 2

        # Add a bunch of Tuples
        for __ in self.tups:
            self.bolt._run()

        process_batch_mock.assert_has_calls([
            mock.call(self.bolt, 0,
                      [self.nontick_tups[0], self.nontick_tups[2]]),
            mock.call(self.bolt, 1, [self.nontick_tups[1]])
        ],
                                            any_order=True)

    @patch.object(BatchingBolt, 'ack', autospec=True)
    @patch.object(BatchingBolt, 'process_batch', new=lambda *args: None)
    def test_auto_ack_on(self, ack_mock):
        # Test auto-ack on (the default)
        for __ in self.tups:
            self.bolt._run()
        ack_mock.assert_has_calls(
            [mock.call(self.bolt, tup) for tup in self.tups], any_order=True)
        self.assertEqual(ack_mock.call_count, 5)

    @patch.object(BatchingBolt, 'ack', autospec=True)
    @patch.object(BatchingBolt, 'process_batch', new=lambda *args: None)
    def test_auto_ack_off(self, ack_mock):
        # Test auto-ack off
        self.bolt.auto_ack = False
        for __ in self.tups:
            self.bolt._run()
        # Assert that this wasn't called, and print out what it was called with
        # otherwise.
        ack_mock.assert_has_calls([
            mock.call(self.bolt, tup)
            for tup in self.tups if self.bolt.is_tick(tup)
        ],
                                  any_order=True)
        self.assertEqual(ack_mock.call_count, 2)

    @patch.object(BatchingBolt, 'read_handshake', new=lambda x: ({}, {}))
    @patch.object(BatchingBolt, 'raise_exception', new=lambda *a: None)
    @patch.object(BatchingBolt, 'fail', autospec=True)
    def test_auto_fail_on(self, fail_mock):
        # Need to re-register signal handler with mocked version, because
        # mock gets created after handler was originally registered.
        self.setUp()
        # Test auto-fail on (the default)
        with self.assertRaises(SystemExit):
            self.bolt.run()

        # All waiting Tuples should have failed at this point
        fail_mock.assert_has_calls([
            mock.call(self.bolt, self.nontick_tups[0]),
            mock.call(self.bolt, self.nontick_tups[1]),
            mock.call(self.bolt, self.nontick_tups[2])
        ],
                                   any_order=True)
        self.assertEqual(fail_mock.call_count, 3)

    @patch.object(BatchingBolt, 'read_handshake', new=lambda x: ({}, {}))
    @patch.object(BatchingBolt, 'raise_exception', new=lambda *a: None)
    @patch.object(BatchingBolt, 'fail', autospec=True)
    def test_auto_fail_off(self, fail_mock):
        # Need to re-register signal handler with mocked version, because
        # mock gets created after handler was originally registered.
        self.setUp()
        # Test auto-fail off
        self.bolt.auto_fail = False
        with self.assertRaises(SystemExit):
            self.bolt.run()

        # All waiting Tuples should have failed at this point
        self.assertListEqual(fail_mock.call_args_list, [])

    @patch.object(BatchingBolt, 'read_handshake', new=lambda x: ({}, {}))
    @patch.object(BatchingBolt, 'process_batch', autospec=True)
    @patch.object(BatchingBolt, 'fail', autospec=True)
    def test_auto_fail_partial_exit_on_exception_true(self, fail_mock,
                                                      process_batch_mock):
        # Need to re-register signal handler with mocked version, because
        # mock gets created after handler was originally registered.
        self.setUp()
        # Change the group key just be the sum of values, which makes 3 separate
        # batches
        self.bolt.group_key = lambda t: sum(t.values)
        self.bolt.exit_on_exception = True
        # Make sure we fail on the second batch
        work = {'status': True}  # to avoid scoping problems

        def work_once(*args):
            if work['status']:
                work['status'] = False
            else:
                raise Exception('borkt')

        process_batch_mock.side_effect = work_once
        # Run the batches
        with self.assertRaises(SystemExit):
            self.bolt.run()
        self.assertEqual(process_batch_mock.call_count, 2)
        # Only some Tuples should have failed at this point. The key is that
        # all un-acked Tuples should be failed, even for batches we haven't
        # started processing yet.
        self.assertEqual(fail_mock.call_count, 2)

    @patch.object(BatchingBolt, 'read_handshake', new=lambda x: ({}, {}))
    @patch.object(BatchingBolt, 'process_batch', autospec=True)
    @patch.object(BatchingBolt, 'fail', autospec=True)
    def test_auto_fail_partial_exit_on_exception_false(self, fail_mock,
                                                       process_batch_mock):
        # Need to re-register signal handler with mocked version, because
        # mock gets created after handler was originally registered.
        self.setUp()
        # Change the group key just be the sum of values, which makes 3 separate
        # batches
        self.bolt.group_key = lambda t: sum(t.values)
        self.bolt.exit_on_exception = False
        # Make sure we fail on the second batch
        work = {'status': True, 'raised': False}  # to avoid scoping problems

        def work_once(*args):
            if work['status']:
                work['status'] = False
            else:
                raise Exception('borkt')

        process_batch_mock.side_effect = work_once
        # Run the batches
        with self.assertRaises(SystemExit) as raises_fixture:
            self.bolt.run()
        assert raises_fixture.exception.code == 2
        self.assertEqual(process_batch_mock.call_count, 2)
        # Only Tuples in the current batch should have failed at this point.
        self.assertEqual(fail_mock.call_count, 1)

    @patch.object(BatchingBolt, 'read_tuple', autospec=True)
    @patch.object(BatchingBolt, 'send_message', autospec=True)
    def test_heartbeat_response(self, send_message_mock, read_tuple_mock):
        # Make sure we send sync for heartbeats
        read_tuple_mock.return_value = Tuple(id='foo',
                                             task=-1,
                                             stream='__heartbeat',
                                             values=(),
                                             component='__system')
        self.bolt._run()
        send_message_mock.assert_called_with(self.bolt, {'command': 'sync'})

    @patch.object(BatchingBolt, 'read_tuple', autospec=True)
    @patch.object(BatchingBolt, 'process_tick', autospec=True)
    def test_process_tick(self, process_tick_mock, read_tuple_mock):
        # Make sure we send sync for heartbeats
        read_tuple_mock.return_value = Tuple(id=None,
                                             task=-1,
                                             component='__system',
                                             stream='__tick',
                                             values=(50, ))
        self.bolt._run()
        process_tick_mock.assert_called_with(self.bolt,
                                             read_tuple_mock.return_value)
예제 #5
0
class BatchingBoltTests(unittest.TestCase):

    def setUp(self):
        self.ticks_between_batches = 1
        self.tup_dicts = [{'id': 14,
                           'comp': 'some_spout',
                           'stream': 'default',
                           'task': 'some_bolt',
                           'tuple': [1, 2, 3]},
                          {'id': 15,
                           'comp': 'some_spout',
                           'stream': 'default',
                           'task': 'some_bolt',
                           'tuple': [4, 5, 6]},
                          {'id': None,
                           'comp': '__system',
                           'stream': '__tick',
                           'task': -1,
                           'tuple': [1]},
                          {'id': 16,
                           'comp': 'some_spout',
                           'stream': 'default',
                           'task': 'some_bolt',
                           'tuple': [7, 8, 9]},
                          {'id': None,
                           'comp': '__system',
                           'stream': '__tick',
                           'task': -1,
                           'tuple': [2]}]
        tups_json = '\nend\n'.join([json.dumps(tup_dict) for tup_dict in
                                    self.tup_dicts] + [''])
        self.tups = [Tuple(tup_dict['id'], tup_dict['comp'], tup_dict['stream'],
                           tup_dict['task'], tup_dict['tuple']) for tup_dict in
                     self.tup_dicts]
        self.nontick_tups = [tup for tup in self.tups if tup.stream != '__tick']
        self.bolt = BatchingBolt(input_stream=BytesIO(tups_json.encode('utf-8')),
                                 output_stream=BytesIO())
        self.bolt.initialize({}, {})

    @patch.object(BatchingBolt, 'process_batch', autospec=True)
    def test_batching(self, process_batch_mock):
        # Add a bunch of Tuples
        for __ in self.tups:
            self.bolt._run()

        process_batch_mock.assert_called_with(self.bolt, None,
                                              self.nontick_tups)

    @patch.object(BatchingBolt, 'process_batch', autospec=True)
    def test_group_key(self, process_batch_mock):
        # Change the group key to even/odd grouping
        self.bolt.group_key = lambda t: sum(t.values) % 2

        # Add a bunch of Tuples
        for __ in self.tups:
            self.bolt._run()

        process_batch_mock.assert_has_calls([mock.call(self.bolt, 0,
                                                       [self.nontick_tups[0],
                                                        self.nontick_tups[2]]),
                                             mock.call(self.bolt, 1,
                                                       [self.nontick_tups[1]])],
                                            any_order=True)

    @patch.object(BatchingBolt, 'ack', autospec=True)
    @patch.object(BatchingBolt, 'process_batch', new=lambda *args: None)
    def test_auto_ack_on(self, ack_mock):
        # Test auto-ack on (the default)
        for __ in self.tups:
            self.bolt._run()
        ack_mock.assert_has_calls([mock.call(self.bolt, tup)
                                   for tup in self.tups],
                                  any_order=True)
        self.assertEqual(ack_mock.call_count, 5)

    @patch.object(BatchingBolt, 'ack', autospec=True)
    @patch.object(BatchingBolt, 'process_batch', new=lambda *args: None)
    def test_auto_ack_off(self, ack_mock):
        # Test auto-ack off
        self.bolt.auto_ack = False
        for __ in self.tups:
            self.bolt._run()
        # Assert that this wasn't called, and print out what it was called with
        # otherwise.
        ack_mock.assert_has_calls([mock.call(self.bolt, tup)
                                   for tup in self.tups
                                   if self.bolt.is_tick(tup)],
                                  any_order=True)
        self.assertEqual(ack_mock.call_count, 2)

    @patch.object(BatchingBolt, 'read_handshake', new=lambda x: ({}, {}))
    @patch.object(BatchingBolt, 'raise_exception', new=lambda *a: None)
    @patch('sys.exit', autospec=True)
    @patch.object(BatchingBolt, 'fail', autospec=True)
    def test_auto_fail_on(self, fail_mock, exit_mock):
        # Need to re-register signal handler with mocked version, because
        # mock gets created after handler was originally registered.
        self.setUp()
        # Test auto-fail on (the default)
        self.bolt.run()

        # All waiting Tuples should have failed at this point
        fail_mock.assert_has_calls([mock.call(self.bolt, self.nontick_tups[0]),
                                    mock.call(self.bolt, self.nontick_tups[1]),
                                    mock.call(self.bolt, self.nontick_tups[2])],
                                   any_order=True)
        self.assertEqual(fail_mock.call_count, 3)
        self.assertEqual(exit_mock.call_count, 1)

    @patch.object(BatchingBolt, 'read_handshake', new=lambda x: ({}, {}))
    @patch.object(BatchingBolt, 'raise_exception', new=lambda *a: None)
    @patch('sys.exit', autospec=True)
    @patch.object(BatchingBolt, 'fail', autospec=True)
    def test_auto_fail_off(self, fail_mock, exit_mock):
        # Need to re-register signal handler with mocked version, because
        # mock gets created after handler was originally registered.
        self.setUp()
        # Test auto-fail off
        self.bolt.auto_fail = False
        self.bolt.run()

        # All waiting Tuples should have failed at this point
        self.assertEqual(exit_mock.call_count, 1)
        self.assertListEqual(fail_mock.call_args_list, [])

    @patch.object(BatchingBolt, 'read_handshake', new=lambda x: ({}, {}))
    @patch('sys.exit', autospec=True)
    @patch.object(BatchingBolt, 'process_batch', autospec=True)
    @patch.object(BatchingBolt, 'fail', autospec=True)
    def test_auto_fail_partial(self, fail_mock, process_batch_mock, exit_mock):
        # Need to re-register signal handler with mocked version, because
        # mock gets created after handler was originally registered.
        self.setUp()
        # Change the group key just be the sum of values, which makes 3 separate
        # batches
        self.bolt.group_key = lambda t: sum(t.values)
        # Make sure we fail on the second batch
        work = {'status': True} # to avoid scoping problems
        def work_once(*args):
            if work['status']:
                work['status'] = False
            else:
                raise Exception('borkt')
        process_batch_mock.side_effect = work_once
        # Run the batches
        self.bolt.run()
        # Only some Tuples should have failed at this point. The key is that
        # all un-acked Tuples should be failed, even for batches we haven't
        # started processing yet.
        self.assertEqual(fail_mock.call_count, 2)
        self.assertEqual(exit_mock.call_count, 1)

    @patch.object(BatchingBolt, 'read_tuple', autospec=True)
    @patch.object(BatchingBolt, 'send_message', autospec=True)
    def test_heartbeat_response(self, send_message_mock, read_tuple_mock):
        # Make sure we send sync for heartbeats
        read_tuple_mock.return_value = Tuple(id='foo', task=-1,
                                             stream='__heartbeat', values=[],
                                             component='__system')
        self.bolt._run()
        send_message_mock.assert_called_with(self.bolt, {'command': 'sync'})

    @patch.object(BatchingBolt, 'read_tuple', autospec=True)
    @patch.object(BatchingBolt, 'process_tick', autospec=True)
    def test_process_tick(self, process_tick_mock, read_tuple_mock):
        # Make sure we send sync for heartbeats
        read_tuple_mock.return_value = Tuple(id=None, task=-1,
                                             component='__system',
                                             stream='__tick', values=[50])
        self.bolt._run()
        process_tick_mock.assert_called_with(self.bolt,
                                             read_tuple_mock.return_value)
예제 #6
0
class BatchingBoltTests(unittest.TestCase):
    def setUp(self):
        self.ticks_between_batches = 1
        self.tup_dicts = [
            {"id": 14, "comp": "some_spout", "stream": "default", "task": "some_bolt", "tuple": [1, 2, 3]},
            {"id": 15, "comp": "some_spout", "stream": "default", "task": "some_bolt", "tuple": [4, 5, 6]},
            {"id": None, "comp": "__system", "stream": "__tick", "task": -1, "tuple": [1]},
            {"id": 16, "comp": "some_spout", "stream": "default", "task": "some_bolt", "tuple": [7, 8, 9]},
            {"id": None, "comp": "__system", "stream": "__tick", "task": -1, "tuple": [2]},
        ]
        tups_json = "\nend\n".join([json.dumps(tup_dict) for tup_dict in self.tup_dicts] + [""])
        self.tups = [
            Tuple(tup_dict["id"], tup_dict["comp"], tup_dict["stream"], tup_dict["task"], tuple(tup_dict["tuple"]))
            for tup_dict in self.tup_dicts
        ]
        self.nontick_tups = [tup for tup in self.tups if tup.stream != "__tick"]
        self.bolt = BatchingBolt(input_stream=BytesIO(tups_json.encode("utf-8")), output_stream=BytesIO())
        self.bolt.initialize({}, {})

    @patch.object(BatchingBolt, "process_batch", autospec=True)
    def test_batching(self, process_batch_mock):
        # Add a bunch of Tuples
        for __ in self.tups:
            self.bolt._run()

        process_batch_mock.assert_called_with(self.bolt, None, self.nontick_tups)

    @patch.object(BatchingBolt, "process_batch", autospec=True)
    def test_group_key(self, process_batch_mock):
        # Change the group key to even/odd grouping
        self.bolt.group_key = lambda t: sum(t.values) % 2

        # Add a bunch of Tuples
        for __ in self.tups:
            self.bolt._run()

        process_batch_mock.assert_has_calls(
            [
                mock.call(self.bolt, 0, [self.nontick_tups[0], self.nontick_tups[2]]),
                mock.call(self.bolt, 1, [self.nontick_tups[1]]),
            ],
            any_order=True,
        )

    @patch.object(BatchingBolt, "ack", autospec=True)
    @patch.object(BatchingBolt, "process_batch", new=lambda *args: None)
    def test_auto_ack_on(self, ack_mock):
        # Test auto-ack on (the default)
        for __ in self.tups:
            self.bolt._run()
        ack_mock.assert_has_calls([mock.call(self.bolt, tup) for tup in self.tups], any_order=True)
        self.assertEqual(ack_mock.call_count, 5)

    @patch.object(BatchingBolt, "ack", autospec=True)
    @patch.object(BatchingBolt, "process_batch", new=lambda *args: None)
    def test_auto_ack_off(self, ack_mock):
        # Test auto-ack off
        self.bolt.auto_ack = False
        for __ in self.tups:
            self.bolt._run()
        # Assert that this wasn't called, and print out what it was called with
        # otherwise.
        ack_mock.assert_has_calls(
            [mock.call(self.bolt, tup) for tup in self.tups if self.bolt.is_tick(tup)], any_order=True
        )
        self.assertEqual(ack_mock.call_count, 2)

    @patch.object(BatchingBolt, "read_handshake", new=lambda x: ({}, {}))
    @patch.object(BatchingBolt, "raise_exception", new=lambda *a: None)
    @patch.object(BatchingBolt, "fail", autospec=True)
    def test_auto_fail_on(self, fail_mock):
        # Need to re-register signal handler with mocked version, because
        # mock gets created after handler was originally registered.
        self.setUp()
        # Test auto-fail on (the default)
        with self.assertRaises(SystemExit):
            self.bolt.run()

        # All waiting Tuples should have failed at this point
        fail_mock.assert_has_calls(
            [
                mock.call(self.bolt, self.nontick_tups[0]),
                mock.call(self.bolt, self.nontick_tups[1]),
                mock.call(self.bolt, self.nontick_tups[2]),
            ],
            any_order=True,
        )
        self.assertEqual(fail_mock.call_count, 3)

    @patch.object(BatchingBolt, "read_handshake", new=lambda x: ({}, {}))
    @patch.object(BatchingBolt, "raise_exception", new=lambda *a: None)
    @patch.object(BatchingBolt, "fail", autospec=True)
    def test_auto_fail_off(self, fail_mock):
        # Need to re-register signal handler with mocked version, because
        # mock gets created after handler was originally registered.
        self.setUp()
        # Test auto-fail off
        self.bolt.auto_fail = False
        with self.assertRaises(SystemExit):
            self.bolt.run()

        # All waiting Tuples should have failed at this point
        self.assertListEqual(fail_mock.call_args_list, [])

    @patch.object(BatchingBolt, "read_handshake", new=lambda x: ({}, {}))
    @patch.object(BatchingBolt, "process_batch", autospec=True)
    @patch.object(BatchingBolt, "fail", autospec=True)
    def test_auto_fail_partial(self, fail_mock, process_batch_mock):
        # Need to re-register signal handler with mocked version, because
        # mock gets created after handler was originally registered.
        self.setUp()
        # Change the group key just be the sum of values, which makes 3 separate
        # batches
        self.bolt.group_key = lambda t: sum(t.values)
        # Make sure we fail on the second batch
        work = {"status": True}  # to avoid scoping problems

        def work_once(*args):
            if work["status"]:
                work["status"] = False
            else:
                raise Exception("borkt")

        process_batch_mock.side_effect = work_once
        # Run the batches
        with self.assertRaises(SystemExit):
            self.bolt.run()
        # Only some Tuples should have failed at this point. The key is that
        # all un-acked Tuples should be failed, even for batches we haven't
        # started processing yet.
        self.assertEqual(fail_mock.call_count, 2)

    @patch.object(BatchingBolt, "read_tuple", autospec=True)
    @patch.object(BatchingBolt, "send_message", autospec=True)
    def test_heartbeat_response(self, send_message_mock, read_tuple_mock):
        # Make sure we send sync for heartbeats
        read_tuple_mock.return_value = Tuple(id="foo", task=-1, stream="__heartbeat", values=(), component="__system")
        self.bolt._run()
        send_message_mock.assert_called_with(self.bolt, {"command": "sync"})

    @patch.object(BatchingBolt, "read_tuple", autospec=True)
    @patch.object(BatchingBolt, "process_tick", autospec=True)
    def test_process_tick(self, process_tick_mock, read_tuple_mock):
        # Make sure we send sync for heartbeats
        read_tuple_mock.return_value = Tuple(id=None, task=-1, component="__system", stream="__tick", values=(50,))
        self.bolt._run()
        process_tick_mock.assert_called_with(self.bolt, read_tuple_mock.return_value)