def setUp(self): self._client = mock.Mock() self._archiver = Archiver(self._client, 'some_workflow', '123') self._job_token = Token( version=12345, name='/workflow/some_workflow/123/waiting/some_job') self._abort_token = Token( version=123456, name='/workflow/some_workflow/123/__SIGNAL__/ABORT')
def _process_signals(self, workflow, instance): """Process signals for a given workflow instance. Args: workflow: The workflow whose signals should be processed. instance: The instance whose signals should be processed. Returns: True if the worker should execute jobs in this instance. Otherwise False. """ signaller = Signaller(self._client, workflow, instance) archiver = Archiver(self._client, workflow, instance) if signaller.is_action_set(Signal.EXIT): return False if (signaller.is_action_set(Signal.ARCHIVE) and self._is_done(workflow, instance)): # TODO(pawel): enable this for all workflows after we gain # confidence that the master has enough memory to delay workflow # archiving. if workflow == 'indexing': ARCHIVE_DELAY_SEC = 7 * 24 * 60 * 60 # 7 days else: ARCHIVE_DELAY_SEC = 12 * 60 * 60 # 12 hours expiration_timestamp = int(time.time()) + ARCHIVE_DELAY_SEC if signaller.set_attribute_if_missing(Signal.ARCHIVE, Signal.TIMESTAMP_ATTR, expiration_timestamp): self._send_instance_end_email(workflow, instance) else: expiration_timestamp = signaller.get_attribute( Signal.ARCHIVE, Signal.TIMESTAMP_ATTR) archiver.archive_if_expired(expiration_timestamp) return False if signaller.is_action_set(Signal.ABORT): if archiver.archive_if_aborted(): self._send_instance_end_email(workflow, instance) return False if signaller.is_action_set(Signal.DRAIN): return False return True
class ArchiverTestCase(unittest.TestCase): def setUp(self): self._client = mock.Mock() self._archiver = Archiver(self._client, 'some_workflow', '123') self._job_token = Token( version=12345, name='/workflow/some_workflow/123/waiting/some_job') self._abort_token = Token( version=123456, name='/workflow/some_workflow/123/__SIGNAL__/ABORT') def _prepare_get_instance_tokens(self, response_tokens): query_response = QueryResponse([response_tokens]) self._client.query.return_value = query_response def _verify_get_instance_tokens(self): query = Query(namePrefix='/workflow/some_workflow/123/') query_request = QueryRequest(queries=[query]) self._client.query.assert_called_once_with(query_request) def _verify_archive_tokens(self, request_tokens): archive_request = ArchiveRequest(tokens=request_tokens) self._client.archive.assert_called_once_with(archive_request) def test_archive_if_expired_non_existent(self): self._prepare_get_instance_tokens([]) self.assertFalse(self._archiver.archive_if_expired(10)) self._verify_get_instance_tokens() def test_archive_not_expired(self): self._prepare_get_instance_tokens([]) self.assertFalse(self._archiver.archive_if_expired(time.time() + 1000)) self.assertEqual(0, self._client.query.call_count) def test_archive_expired(self): self._prepare_get_instance_tokens([self._job_token]) self.assertTrue(self._archiver.archive_if_expired(10)) self._verify_get_instance_tokens() self._verify_archive_tokens([self._job_token]) def test_archive_if_aborted_not_aborted(self): self._prepare_get_instance_tokens([self._job_token]) self.assertFalse(self._archiver.archive_if_aborted()) self._verify_get_instance_tokens() def test_archive_if_aborted_owned(self): self._job_token.owner = 'some_owner' self._job_token.expirationTime = time.time() + 1000 self._prepare_get_instance_tokens([self._job_token, self._abort_token]) self.assertFalse(self._archiver.archive_if_aborted()) self._verify_get_instance_tokens() def test_archive_if_aborted(self): self._prepare_get_instance_tokens([self._job_token, self._abort_token]) self.assertTrue(self._archiver.archive_if_aborted()) self._verify_get_instance_tokens() self._verify_archive_tokens([self._job_token, self._abort_token])