Beispiel #1
0
def main():
    parser = argparse.ArgumentParser(
        description='Start Pinball master server.')
    parser.add_argument(
        '-c',
        '--config_file',
        dest='config_file',
        required=True,
        help='full path to the pinball setting configure file')
    parser.add_argument(
        '-p',
        '--port',
        dest='port',
        type=int,
        default=PinballConfig.MASTER_PORT,
        help='port to run on')
    options = parser.parse_args(sys.argv[1:])

    PinballConfig.parse(options.config_file)
    master_port = options.port if options.port else PinballConfig.MASTER_PORT
    factory = Factory(master_port=master_port)

    # The reason why these imports are not at the top level is that some of the
    # imported code (db models initializing table names) depends on parameters
    # passed on the command line (master name).  Those imports need to be delayed
    # until after command line parameter parsing.
    from pinball.persistence.store import DbStore
    factory.create_master(DbStore())
    factory.run_master_server()
Beispiel #2
0
 def setUp(self):
     self._factory = Factory()
     self._store = EphemeralStore()
     self._factory.create_master(self._store)
     self._emailer = mock.Mock()
     self._worker = Worker(self._factory.get_client(), self._store,
                           self._emailer)
     self._client = self._factory.get_client()
Beispiel #3
0
 def setUp(self):
     self._factory = Factory()
     store = EphemeralStore()
     self._factory.create_master(store)
     emailer = Emailer('some_host', '8080')
     self._scheduler = Scheduler(self._factory.get_client(), store, emailer)
     self._client = self._factory.get_client()
     self._post_schedule_token()
Beispiel #4
0
 def setUp(self):
     self._factory = Factory()
     store = EphemeralStore()
     self._factory.create_master(store)
     emailer = Emailer('some_host', '8080')
     self._scheduler = Scheduler(self._factory.get_client(), store, emailer)
     self._client = self._factory.get_client()
     self._post_schedule_token()
Beispiel #5
0
 def setUp(self):
     self._factory = Factory()
     self._store = EphemeralStore()
     self._factory.create_master(self._store)
     self._emailer = mock.Mock()
     self._worker = Worker(self._factory.get_client(), self._store,
                           self._emailer)
     self._client = self._factory.get_client()
Beispiel #6
0
def main():
    parser = argparse.ArgumentParser(
        description='Start Pinball master server.')
    parser.add_argument(
        '-c',
        '--config_file',
        dest='config_file',
        required=True,
        help='full path to the pinball setting configure file')
    parser.add_argument(
        '-p',
        '--port',
        dest='port',
        type=int,
        default=PinballConfig.MASTER_PORT,
        help='port to run on')
    options = parser.parse_args(sys.argv[1:])

    PinballConfig.parse(options.config_file)
    master_port = options.port if options.port else PinballConfig.MASTER_PORT
    factory = Factory(master_port=master_port)

    # The reason why these imports are not at the top level is that some of the
    # imported code (db models initializing table names) depends on parameters
    # passed on the command line (master name).  Those imports need to be delayed
    # until after command line parameter parsing.
    from pinball.persistence.store import DbStore
    factory.create_master(DbStore())
    factory.run_master_server()
Beispiel #7
0
class SignallerTestCase(unittest.TestCase):
    def setUp(self):
        self._factory = Factory()
        self._factory.create_master(EphemeralStore())

    def _post_signal_tokens(self):
        """Add some signal tokens to the master."""
        request = ModifyRequest(updates=[])

        signal = Signal(action=Signal.EXIT)
        name = Name(signal='exit')
        signal_token = Token(name=name.get_signal_token_name())
        signal_token.data = pickle.dumps(signal)
        request.updates.append(signal_token)

        signal = Signal(action=Signal.DRAIN)
        name.signal = 'drain'
        name.workflow = 'some_workflow'
        signal_token = Token(name=name.get_signal_token_name())
        signal_token.data = pickle.dumps(signal)
        request.updates.append(signal_token)

        name.instance = '123'
        signal_token = Token(name=name.get_signal_token_name())
        signal_token.data = pickle.dumps(signal)
        request.updates.append(signal_token)

        signal = Signal(action=Signal.ABORT)
        name.signal = 'abort'
        signal_token = Token(name=name.get_signal_token_name())
        signal_token.data = pickle.dumps(signal)
        request.updates.append(signal_token)

        client = self._factory.get_client()
        client.modify(request)

    def test_is_action_set(self):
        client = self._factory.get_client()
        signaller = Signaller(client)
        self.assertFalse(signaller.is_action_set(Signal.EXIT))
        self.assertFalse(signaller.is_action_set(Signal.DRAIN))
        self.assertFalse(signaller.is_action_set(Signal.ABORT))

        self._post_signal_tokens()

        signaller = Signaller(client)
        self.assertTrue(signaller.is_action_set(Signal.EXIT))
        self.assertFalse(signaller.is_action_set(Signal.DRAIN))
        self.assertFalse(signaller.is_action_set(Signal.ABORT))

        signaller = Signaller(client, workflow='some_workflow')
        self.assertTrue(signaller.is_action_set(Signal.EXIT))
        self.assertTrue(signaller.is_action_set(Signal.DRAIN))
        self.assertFalse(signaller.is_action_set(Signal.ABORT))

        signaller = Signaller(client, workflow='some_workflow', instance='123')
        self.assertTrue(signaller.is_action_set(Signal.EXIT))
        self.assertTrue(signaller.is_action_set(Signal.DRAIN))
        self.assertTrue(signaller.is_action_set(Signal.ABORT))

    def test_set_action(self):
        client = self._factory.get_client()

        writing_signaller = Signaller(client)
        writing_signaller.set_action(Signal.EXIT)
        reading_signaller = Signaller(client)
        # New generation.
        self.assertFalse(reading_signaller.is_action_set(Signal.EXIT))
        # Old generation.
        with mock.patch('pinball.workflow.signaller.PinballConfig.GENERATION',
                        0):
            self.assertTrue(reading_signaller.is_action_set(Signal.EXIT))
        self.assertFalse(reading_signaller.is_action_set(Signal.DRAIN))
        self.assertFalse(reading_signaller.is_action_set(Signal.ABORT))

        writing_signaller = Signaller(client, workflow='some_workflow')
        writing_signaller.set_action(Signal.DRAIN)
        reading_signaller = Signaller(client, workflow='some_workflow')
        # Old generation.
        with mock.patch('pinball.workflow.signaller.PinballConfig.GENERATION',
                        0):
            self.assertTrue(reading_signaller.is_action_set(Signal.EXIT))
        self.assertTrue(reading_signaller.is_action_set(Signal.DRAIN))
        self.assertFalse(reading_signaller.is_action_set(Signal.ABORT))

        writing_signaller = Signaller(client,
                                      workflow='some_workflow',
                                      instance='123')
        writing_signaller.set_action(Signal.ABORT)
        reading_signaller = Signaller(client,
                                      workflow='some_workflow',
                                      instance='123')
        # Old generation.
        with mock.patch('pinball.workflow.signaller.PinballConfig.GENERATION',
                        0):
            self.assertTrue(reading_signaller.is_action_set(Signal.EXIT))
        self.assertTrue(reading_signaller.is_action_set(Signal.DRAIN))
        self.assertTrue(reading_signaller.is_action_set(Signal.ABORT))

    def test_remove_sction(self):
        client = self._factory.get_client()

        writing_signaller = Signaller(client)
        writing_signaller.set_action(Signal.EXIT)
        writing_signaller.remove_action(Signal.EXIT)
        self.assertFalse(writing_signaller.is_action_set(Signal.EXIT))
        reading_signaller = Signaller(client)
        self.assertFalse(reading_signaller.is_action_set(Signal.EXIT))

        writing_signaller = Signaller(client, workflow='some_workflow')
        writing_signaller.set_action(Signal.DRAIN)
        reading_signaller = Signaller(client, workflow='some_workflow')
        self.assertTrue(reading_signaller.is_action_set(Signal.DRAIN))
        writing_signaller.remove_action(Signal.DRAIN)
        self.assertFalse(writing_signaller.is_action_set(Signal.DRAIN))
        reading_signaller = Signaller(client)
        self.assertFalse(reading_signaller.is_action_set(Signal.DRAIN))

        writing_signaller = Signaller(client,
                                      workflow='some_workflow',
                                      instance='123')
        writing_signaller.set_action(Signal.ABORT)
        reading_signaller = Signaller(client,
                                      workflow='some_workflow',
                                      instance='123')
        self.assertTrue(reading_signaller.is_action_set(Signal.ABORT))
        writing_signaller.remove_action(Signal.ABORT)
        self.assertFalse(writing_signaller.is_action_set(Signal.ABORT))
        reading_signaller = Signaller(client)
        self.assertFalse(reading_signaller.is_action_set(Signal.ABORT))

    def test_get_attribute(self):
        client = self._factory.get_client()

        writing_signaller = Signaller(client)
        writing_signaller.set_action(Signal.EXIT)
        self.assertEqual(
            PinballConfig.GENERATION,
            writing_signaller.get_attribute(Signal.EXIT,
                                            Signal.GENERATION_ATTR))
        reading_signaller = Signaller(client)
        self.assertEqual(
            PinballConfig.GENERATION,
            reading_signaller.get_attribute(Signal.EXIT,
                                            Signal.GENERATION_ATTR))

    def test_set_attribute_if_missing(self):
        client = self._factory.get_client()

        writing_signaller = Signaller(client,
                                      workflow='some_workflow',
                                      instance='123')

        self.assertFalse(
            writing_signaller.set_attribute_if_missing(Signal.ARCHIVE,
                                                       Signal.TIMESTAMP_ATTR,
                                                       12345))

        writing_signaller.set_action(Signal.ARCHIVE)
        self.assertTrue(
            writing_signaller.set_attribute_if_missing(Signal.ARCHIVE,
                                                       Signal.TIMESTAMP_ATTR,
                                                       12345))
        self.assertEqual(
            12345,
            writing_signaller.get_attribute(Signal.ARCHIVE,
                                            Signal.TIMESTAMP_ATTR))

        self.assertFalse(
            writing_signaller.set_attribute_if_missing(Signal.ARCHIVE,
                                                       Signal.TIMESTAMP_ATTR,
                                                       123456))

        reading_signaller = Signaller(client,
                                      workflow='some_workflow',
                                      instance='123')
        self.assertEqual(
            12345,
            reading_signaller.get_attribute(Signal.ARCHIVE,
                                            Signal.TIMESTAMP_ATTR))
Beispiel #8
0
class WorkerTestCase(unittest.TestCase):
    def setUp(self):
        self._factory = Factory()
        self._store = EphemeralStore()
        self._factory.create_master(self._store)
        self._emailer = mock.Mock()
        self._worker = Worker(self._factory.get_client(), self._store,
                              self._emailer)
        self._client = self._factory.get_client()

    def _get_parent_job_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job_state=Name.WAITING_STATE,
                    job='parent_job')
        job = ShellJob(name=name.job,
                       inputs=[Name.WORKFLOW_START_INPUT],
                       outputs=['child_job'],
                       command='echo parent',
                       emails=['*****@*****.**'])
        return Token(name=name.get_job_token_name(), data=pickle.dumps(job))

    def _get_child_job_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job_state=Name.WAITING_STATE,
                    job='child_job')
        job = ShellJob(name=name.job,
                       inputs=['parent_job'],
                       outputs=[],
                       command='echo child',
                       emails=['*****@*****.**'])
        return Token(name=name.get_job_token_name(), data=pickle.dumps(job))

    def _post_job_tokens(self):
        """Add waiting job tokens to the master."""
        request = ModifyRequest(updates=[])
        request.updates.append(self._get_parent_job_token())
        request.updates.append(self._get_child_job_token())
        self._client.modify(request)

    def _post_workflow_start_event_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job='parent_job',
                    input_name=Name.WORKFLOW_START_INPUT,
                    event='workflow_start_event')
        event = Event(creator='SimpleWorkflowTest')
        token = Token(name=name.get_event_token_name(),
                      data=pickle.dumps(event))
        request = ModifyRequest(updates=[token])
        self._client.modify(request)

    def _verify_token_names(self, names):
        request = GroupRequest(namePrefix='/workflow/')
        response = self._client.group(request)
        names = sorted(names)
        counts = sorted(response.counts.keys())
        self.assertEqual(names, counts)

    def _verify_archived_token_names(self, names):
        active_tokens = self._store.read_active_tokens()
        all_tokens = self._store.read_tokens()
        archived_token_names = []
        for token in all_tokens:
            if not token in active_tokens:
                archived_token_names.append(token.name)
        names = sorted(names)
        archived_token_names = sorted(archived_token_names)
        self.assertEqual(names, archived_token_names)

    def _get_token(self, name):
        query = Query(namePrefix=name)
        request = QueryRequest(queries=[query])
        response = self._client.query(request)
        self.assertEqual(1, len(response.tokens))
        self.assertEqual(1, len(response.tokens[0]))
        return response.tokens[0][0]

    def _get_stored_token(self, name):
        tokens = self._store.read_tokens(name_prefix=name)
        self.assertEqual(1, len(tokens))
        return tokens[0]

    def _verify_parent_job_waiting(self):
        token_names = [
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='child_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job='parent_job',
                 input_name=Name.WORKFLOW_START_INPUT,
                 event='workflow_start_event').get_event_token_name()]
        self._verify_token_names(token_names)

    def _verify_parent_job_runnable(self):
        token_names = [Name(workflow='some_workflow',
                            instance='12345',
                            job_state=Name.RUNNABLE_STATE,
                            job='parent_job').get_job_token_name(),
                       Name(workflow='some_workflow',
                            instance='12345',
                            job_state=Name.WAITING_STATE,
                            job='child_job').get_job_token_name()]
        self._verify_token_names(token_names)

    def test_get_triggering_events(self):
        self.assertEqual([], Worker._get_triggering_events([]))

        self.assertEqual(['a'], Worker._get_triggering_events([['a']]))

        events = Worker._get_triggering_events([['a', 'b']])
        self.assertTrue(events == ['a'] or events == ['b'])

        events = Worker._get_triggering_events([['a', 'b'], ['1', '2']])
        self.assertTrue(events == ['a', '1'] or events == ['a', '2'] or
                        events == ['b', '1'] or events == ['b', '2'])

    def test_move_job_token_to_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        job_name = Name(workflow='some_workflow',
                        instance='12345',
                        job_state=Name.WAITING_STATE,
                        job='parent_job')
        job_token = self._get_token(job_name.get_job_token_name())
        event_name = Name(workflow='some_workflow',
                          instance='12345',
                          job='parent_job',
                          input_name=Name.WORKFLOW_START_INPUT,
                          event='workflow_start_event')
        event_token = self._get_token(event_name.get_event_token_name())
        self._worker._move_job_token_to_runnable(job_token, [event_token])
        # Event token should have been removed and the parent job should be
        # runnable.
        self._verify_parent_job_runnable()

    def test_make_job_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        parent_job_name = Name(workflow='some_workflow',
                               instance='12345',
                               job_state=Name.WAITING_STATE,
                               job='parent_job').get_job_token_name()
        child_job_name = Name(workflow='some_workflow',
                              instance='12345',
                              job_state=Name.WAITING_STATE,
                              job='child_job').get_job_token_name()

        parent_job_token = self._get_token(parent_job_name)
        child_job_token = self._get_token(child_job_name)

        self._worker._make_job_runnable(child_job_token)
        # Child job is missing triggering tokens so it cannot be made runnable.
        self._verify_parent_job_waiting()

        self._worker._make_job_runnable(parent_job_token)
        # Parent job has all triggering tokens so it can be made runnable.
        self._verify_parent_job_runnable()

    def test_make_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        self._worker._make_runnable('some_other_workflow', '12345')
        # Workflow instance does not exist so nothing should have changed.
        self._verify_parent_job_waiting()

        self._worker._make_runnable('some_workflow', 'some_other_instance')
        # Workflow instance does not exist so nothing should have changed.
        self._verify_parent_job_waiting()

        self._worker._make_runnable('some_workflow', '12345')
        self._verify_parent_job_runnable()

    def test_own_runnable_job_token(self):
        self._post_job_tokens()

        self._worker._own_runnable_job_token()
        # Event token is not present so nothing should have changed.
        token_names = [Name(workflow='some_workflow',
                            instance='12345',
                            job_state=Name.WAITING_STATE,
                            job='parent_job').get_job_token_name(),
                       Name(workflow='some_workflow',
                            instance='12345',
                            job_state=Name.WAITING_STATE,
                            job='child_job').get_job_token_name()]
        self._verify_token_names(token_names)
        self.assertIsNone(self._worker._owned_job_token)

        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        # Worker should now own a runnable job token.
        self._verify_parent_job_runnable()
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.RUNNABLE_STATE,
                 job='parent_job').get_job_token_name())
        self.assertEqual(parent_token, self._worker._owned_job_token)

    def _add_history_to_owned_token(self):
        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)
        self._worker._owned_job_token.data = pickle.dumps(job)

    def test_get_output_event_tokens(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        self.assertIsNotNone(self._worker._owned_job_token)

        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)

        event_tokens = self._worker._get_output_event_tokens(job)
        self.assertEqual(1, len(event_tokens))
        event_token_name = Name.from_event_token_name(event_tokens[0].name)
        expected_prefix = Name(workflow='some_workflow',
                               instance='12345',
                               job='child_job',
                               input_name='parent_job').get_input_prefix()
        self.assertEqual(expected_prefix, event_token_name.get_input_prefix())

    def test_move_job_token_to_waiting(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)
        self._worker._owned_job_token.data = pickle.dumps(job)

        self._worker._move_job_token_to_waiting(job, True)

        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        self.assertEqual(execution_record.start_time,
                         job.history[0].start_time)

    def test_keep_job_token_in_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        job.history.append('some_historic_record')

        self._worker._keep_job_token_in_runnable(job)

        self._verify_parent_job_runnable()
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.RUNNABLE_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        self.assertEqual('some_historic_record', job.history[0])

    @staticmethod
    def _from_job(workflow, instance, job_name, job, data_builder, emailer):
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        executed_job = copy.copy(job)
        executed_job.history.append(execution_record)
        job_executor = mock.Mock()
        job_executor.job = executed_job
        job_executor.prepare.return_value = True
        job_executor.execute.return_value = True
        return job_executor

    @mock.patch('pinball.workflow.worker.JobExecutor')
    def test_execute_job(self, job_executor_mock):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        job_executor = mock.Mock()
        job_executor_mock.from_job.return_value = job_executor

        job_executor_mock.from_job.side_effect = WorkerTestCase._from_job

        self._worker._execute_job()

        self.assertIsNone(self._worker._owned_job_token)
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

    def test_send_instance_end_email(self):
        data_builder = mock.Mock()
        self._worker._data_builder = data_builder

        schedule_data = mock.Mock()
        schedule_data.emails = ['*****@*****.**']
        data_builder.get_schedule.return_value = schedule_data

        instance_data = mock.Mock()
        data_builder.get_instance.return_value = instance_data

        job_data = mock.Mock()
        data_builder.get_jobs.return_value = [job_data]

        self._worker._send_instance_end_email('some_workflow', '12345')

        self._emailer.send_instance_end_message.assert_called_once_with(
            ['*****@*****.**'], instance_data, [job_data])

    def test_send_job_failure_emails(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        job.history.append('some_historic_record')
        executor = mock.Mock()
        self._worker._executor = executor
        executor.job = job

        data_builder = mock.Mock()
        self._worker._data_builder = data_builder

        schedule_data = mock.Mock()
        schedule_data.emails = ['*****@*****.**']
        data_builder.get_schedule.return_value = schedule_data

        execution_data = mock.Mock()
        data_builder.get_execution.return_value = execution_data

        self._worker._send_job_failure_emails(True)

        self._emailer.send_job_execution_end_message.assert_any_call(
            ['*****@*****.**',
             '*****@*****.**'], execution_data)

    @mock.patch('pinball.workflow.worker.JobExecutor')
    def test_run(self, job_executor_mock):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        job_executor_mock.from_job.side_effect = WorkerTestCase._from_job

        self._worker._test_only_end_if_no_runnable = True
        self._worker.run()
        with mock.patch('pinball.workflow.archiver.time') as time_patch:
            # add one day
            time_patch.time.return_value = time.time() + 24 * 60 * 60
            self._worker.run()

        parent_job_token_name = Name(workflow='some_workflow',
                                     instance='12345',
                                     job_state=Name.WAITING_STATE,
                                     job='parent_job').get_job_token_name()
        child_job_token_name = Name(workflow='some_workflow',
                                    instance='12345',
                                    job_state=Name.WAITING_STATE,
                                    job='child_job').get_job_token_name()
        signal_string = Signal.action_to_string(Signal.ARCHIVE)
        signal_token_name = Name(workflow='some_workflow',
                                 instance='12345',
                                 signal=signal_string).get_signal_token_name()

        token_names = [parent_job_token_name,
                       child_job_token_name,
                       signal_token_name]
        self._verify_archived_token_names(token_names)

        self.assertEqual(2, job_executor_mock.from_job.call_count)

        parent_token = self._get_stored_token(parent_job_token_name)
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

        child_token = self._get_stored_token(child_job_token_name)
        job = pickle.loads(child_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

        signal_token = self._get_stored_token(signal_token_name)
        signal = pickle.loads(signal_token.data)
        self.assertEqual(Signal.ARCHIVE, signal.action)
Beispiel #9
0
class SignallerTestCase(unittest.TestCase):
    def setUp(self):
        self._factory = Factory()
        self._factory.create_master(EphemeralStore())

    def _post_signal_tokens(self):
        """Add some signal tokens to the master."""
        request = ModifyRequest(updates=[])

        signal = Signal(action=Signal.EXIT)
        name = Name(signal='exit')
        signal_token = Token(name=name.get_signal_token_name())
        signal_token.data = pickle.dumps(signal)
        request.updates.append(signal_token)

        signal = Signal(action=Signal.DRAIN)
        name.signal = 'drain'
        name.workflow = 'some_workflow'
        signal_token = Token(name=name.get_signal_token_name())
        signal_token.data = pickle.dumps(signal)
        request.updates.append(signal_token)

        name.instance = '123'
        signal_token = Token(name=name.get_signal_token_name())
        signal_token.data = pickle.dumps(signal)
        request.updates.append(signal_token)

        signal = Signal(action=Signal.ABORT)
        name.signal = 'abort'
        signal_token = Token(name=name.get_signal_token_name())
        signal_token.data = pickle.dumps(signal)
        request.updates.append(signal_token)

        client = self._factory.get_client()
        client.modify(request)

    def test_is_action_set(self):
        client = self._factory.get_client()
        signaller = Signaller(client)
        self.assertFalse(signaller.is_action_set(Signal.EXIT))
        self.assertFalse(signaller.is_action_set(Signal.DRAIN))
        self.assertFalse(signaller.is_action_set(Signal.ABORT))

        self._post_signal_tokens()

        signaller = Signaller(client)
        self.assertTrue(signaller.is_action_set(Signal.EXIT))
        self.assertFalse(signaller.is_action_set(Signal.DRAIN))
        self.assertFalse(signaller.is_action_set(Signal.ABORT))

        signaller = Signaller(client, workflow='some_workflow')
        self.assertTrue(signaller.is_action_set(Signal.EXIT))
        self.assertTrue(signaller.is_action_set(Signal.DRAIN))
        self.assertFalse(signaller.is_action_set(Signal.ABORT))

        signaller = Signaller(client, workflow='some_workflow', instance='123')
        self.assertTrue(signaller.is_action_set(Signal.EXIT))
        self.assertTrue(signaller.is_action_set(Signal.DRAIN))
        self.assertTrue(signaller.is_action_set(Signal.ABORT))

    def test_set_action(self):
        client = self._factory.get_client()

        writing_signaller = Signaller(client)
        writing_signaller.set_action(Signal.EXIT)
        reading_signaller = Signaller(client)
        # New generation.
        self.assertFalse(reading_signaller.is_action_set(Signal.EXIT))
        # Old generation.
        with mock.patch('pinball.workflow.signaller.PinballConfig.GENERATION', 0):
            self.assertTrue(reading_signaller.is_action_set(Signal.EXIT))
        self.assertFalse(reading_signaller.is_action_set(Signal.DRAIN))
        self.assertFalse(reading_signaller.is_action_set(Signal.ABORT))

        writing_signaller = Signaller(client, workflow='some_workflow')
        writing_signaller.set_action(Signal.DRAIN)
        reading_signaller = Signaller(client, workflow='some_workflow')
        # Old generation.
        with mock.patch('pinball.workflow.signaller.PinballConfig.GENERATION', 0):
            self.assertTrue(reading_signaller.is_action_set(Signal.EXIT))
        self.assertTrue(reading_signaller.is_action_set(Signal.DRAIN))
        self.assertFalse(reading_signaller.is_action_set(Signal.ABORT))

        writing_signaller = Signaller(client, workflow='some_workflow',
                                      instance='123')
        writing_signaller.set_action(Signal.ABORT)
        reading_signaller = Signaller(client, workflow='some_workflow',
                                      instance='123')
        # Old generation.
        with mock.patch('pinball.workflow.signaller.PinballConfig.GENERATION', 0):
            self.assertTrue(reading_signaller.is_action_set(Signal.EXIT))
        self.assertTrue(reading_signaller.is_action_set(Signal.DRAIN))
        self.assertTrue(reading_signaller.is_action_set(Signal.ABORT))

    def test_remove_sction(self):
        client = self._factory.get_client()

        writing_signaller = Signaller(client)
        writing_signaller.set_action(Signal.EXIT)
        writing_signaller.remove_action(Signal.EXIT)
        self.assertFalse(writing_signaller.is_action_set(Signal.EXIT))
        reading_signaller = Signaller(client)
        self.assertFalse(reading_signaller.is_action_set(Signal.EXIT))

        writing_signaller = Signaller(client, workflow='some_workflow')
        writing_signaller.set_action(Signal.DRAIN)
        reading_signaller = Signaller(client, workflow='some_workflow')
        self.assertTrue(reading_signaller.is_action_set(Signal.DRAIN))
        writing_signaller.remove_action(Signal.DRAIN)
        self.assertFalse(writing_signaller.is_action_set(Signal.DRAIN))
        reading_signaller = Signaller(client)
        self.assertFalse(reading_signaller.is_action_set(Signal.DRAIN))

        writing_signaller = Signaller(client, workflow='some_workflow',
                                      instance='123')
        writing_signaller.set_action(Signal.ABORT)
        reading_signaller = Signaller(client, workflow='some_workflow',
                                      instance='123')
        self.assertTrue(reading_signaller.is_action_set(Signal.ABORT))
        writing_signaller.remove_action(Signal.ABORT)
        self.assertFalse(writing_signaller.is_action_set(Signal.ABORT))
        reading_signaller = Signaller(client)
        self.assertFalse(reading_signaller.is_action_set(Signal.ABORT))

    def test_get_attribute(self):
        client = self._factory.get_client()

        writing_signaller = Signaller(client)
        writing_signaller.set_action(Signal.EXIT)
        self.assertEqual(PinballConfig.GENERATION,
                         writing_signaller.get_attribute(
                             Signal.EXIT,
                             Signal.GENERATION_ATTR))
        reading_signaller = Signaller(client)
        self.assertEqual(PinballConfig.GENERATION,
                         reading_signaller.get_attribute(
                             Signal.EXIT,
                             Signal.GENERATION_ATTR))

    def test_set_attribute_if_missing(self):
        client = self._factory.get_client()

        writing_signaller = Signaller(client, workflow='some_workflow',
                                      instance='123')

        self.assertFalse(writing_signaller.set_attribute_if_missing(
                         Signal.ARCHIVE, Signal.TIMESTAMP_ATTR, 12345))

        writing_signaller.set_action(Signal.ARCHIVE)
        self.assertTrue(writing_signaller.set_attribute_if_missing(
                        Signal.ARCHIVE, Signal.TIMESTAMP_ATTR, 12345))
        self.assertEqual(12345,
                         writing_signaller.get_attribute(
                             Signal.ARCHIVE,
                             Signal.TIMESTAMP_ATTR))

        self.assertFalse(writing_signaller.set_attribute_if_missing(
                         Signal.ARCHIVE, Signal.TIMESTAMP_ATTR, 123456))

        reading_signaller = Signaller(client, workflow='some_workflow',
                                      instance='123')
        self.assertEqual(12345,
                         reading_signaller.get_attribute(
                             Signal.ARCHIVE,
                             Signal.TIMESTAMP_ATTR))
Beispiel #10
0
class SchedulerTestCase(unittest.TestCase):
    def setUp(self):
        self._factory = Factory()
        store = EphemeralStore()
        self._factory.create_master(store)
        emailer = Emailer('some_host', '8080')
        self._scheduler = Scheduler(self._factory.get_client(), store, emailer)
        self._client = self._factory.get_client()
        self._post_schedule_token()

    @staticmethod
    def _get_schedule_token():
        name = Name(workflow='workflow_0')
        now = int(time.time())
        token = Token(name=name.get_workflow_schedule_token_name(),
                      owner='some_owner',
                      expirationTime=now - 10)
        schedule = WorkflowSchedule(next_run_time=now - 10,
                                    recurrence_seconds=10,
                                    workflow='workflow_0')
        token.data = pickle.dumps(schedule)
        return token

    def _post_schedule_token(self):
        """Add schedule token to the master."""
        request = ModifyRequest()
        request.updates = [SchedulerTestCase._get_schedule_token()]
        self._client.modify(request)

    def test_own_schedule_token(self):
        self._scheduler._own_schedule_token_list()
        self.assertIsNotNone(self._scheduler._owned_schedule_token_list)

    def test_advance_schedule(self):
        self._scheduler._own_schedule_token_list()
        self._scheduler._owned_schedule_token = \
            self._scheduler._owned_schedule_token_list[0]
        token = self._scheduler._owned_schedule_token

        owned_schedule = pickle.loads(token.data)
        self._scheduler._advance_schedule(owned_schedule)
        now = int(time.time())
        self.assertGreater(token.expirationTime, now - 10)
        schedule = pickle.loads(token.data)
        self.assertEqual(token.expirationTime, schedule.next_run_time)

    def test_run_or_reschedule_incorrect_expiration_time(self):
        self._scheduler._own_schedule_token_list()
        self._scheduler._owned_schedule_token = \
            self._scheduler._owned_schedule_token_list[0]
        token = self._scheduler._owned_schedule_token

        schedule = pickle.loads(token.data)
        schedule.next_run_time = int(time.time() + 1000)
        token.data = pickle.dumps(schedule)
        self.assertRaises(AssertionError, self._scheduler._run_or_reschedule)

    def _run_or_reschedule(self,
                           overrun_policy,
                           is_running=True,
                           is_failed=True,
                           is_abort_called=False):
        self._scheduler._own_schedule_token_list()
        self._scheduler._owned_schedule_token = \
            self._scheduler._owned_schedule_token_list[0]
        token = self._scheduler._owned_schedule_token

        schedule = MockWorkflowSchedule(is_running, is_failed)
        schedule.overrun_policy = overrun_policy
        token.data = pickle.dumps(schedule)
        token.expirationTime = schedule.next_run_time
        old_expiration_time = token.expirationTime

        self._scheduler._run_or_reschedule()

        token = self._scheduler._owned_schedule_token
        new_expiration_time = token.expirationTime
        self.assertGreater(new_expiration_time, old_expiration_time)
        schedule = pickle.loads(token.data)
        self.assertEqual(is_abort_called, schedule.abort_called)

    def test_run_START_NEW(self):
        self._run_or_reschedule(OverrunPolicy.START_NEW)
        self.assertIsNotNone(self._scheduler._request)

    def test_reschedule_SKIP(self):
        self._run_or_reschedule(OverrunPolicy.SKIP)
        self.assertIsNone(self._scheduler._request)

    def test_run_ABORT_RUNNING(self):
        self._run_or_reschedule(OverrunPolicy.ABORT_RUNNING,
                                is_abort_called=True)
        self.assertIsNotNone(self._scheduler._request)

    def test_reschedule_DELAY(self):
        self._run_or_reschedule(OverrunPolicy.DELAY)
        token = self._scheduler._owned_schedule_token
        schedule = pickle.loads(token.data)
        self.assertLess(schedule.next_run_time, token.expirationTime)
        self.assertIsNone(self._scheduler._request)

    def test_run_DELAY(self):
        self._run_or_reschedule(OverrunPolicy.DELAY, is_running=False)
        self.assertIsNotNone(self._scheduler._request)

    def test_reschedule_DELAY_UNTIL_SUCCESS(self):
        self._run_or_reschedule(OverrunPolicy.DELAY_UNTIL_SUCCESS,
                                is_running=False,
                                is_failed=True)
        token = self._scheduler._owned_schedule_token
        schedule = pickle.loads(token.data)
        self.assertLess(schedule.next_run_time, token.expirationTime)
        self.assertIsNone(self._scheduler._request)

    def test_run_DELAY_UNTIL_SUCCESS(self):
        self._run_or_reschedule(OverrunPolicy.DELAY_UNTIL_SUCCESS,
                                is_running=False,
                                is_failed=False)
        self.assertIsNotNone(self._scheduler._request)
Beispiel #11
0
def main():
    parser = argparse.ArgumentParser(
        description='Interact with Pinball master server.')
    parser.add_argument('-p',
                        '--port',
                        dest='port',
                        type=int,
                        default=PinballConfig.MASTER_PORT,
                        help='port of the pinball master server')
    parser.add_argument('-s',
                        '--host',
                        dest='host',
                        default='localhost',
                        help='hostname of the pinball master server')
    parser.add_argument('-f',
                        '--force',
                        dest='force',
                        action='store_true',
                        default=False,
                        help='do not ask for confirmation')
    parser.add_argument('-r',
                        '--recursive',
                        dest='recursive',
                        action='store_true',
                        default=False,
                        help='perform the operation recursively')
    parser.add_argument('-n',
                        '--name',
                        dest='name',
                        help='token name')
    parser.add_argument('-v',
                        '--version',
                        dest='version',
                        type=int,
                        help='token version')
    parser.add_argument('-o',
                        '--owner',
                        dest='owner',
                        help='token owner; must be provided if '
                             'expiration_time is set')
    parser.add_argument('-t',
                        '--expiration_time',
                        dest='expiration_time',
                        type=int,
                        help='ownership expiration time in seconds since '
                             'epoch; must be provided if owner is set')
    parser.add_argument('-d',
                        '--data',
                        dest='data',
                        help='token data')
    parser.add_argument('-i',
                        '--priority',
                        dest='priority',
                        type=float,
                        default=0,
                        help='token priority')
    parser.add_argument('command',
                        choices=_COMMANDS.keys(),
                        help='command name')
    parser.add_argument('command_args',
                        nargs='*')
    options = parser.parse_args(sys.argv[1:])

    command = _COMMANDS[options.command]()
    command.prepare(options)
    factory = Factory(master_hostname=options.host, master_port=options.port)
    client = factory.get_client()
    print command.execute(client, None)
Beispiel #12
0
def main():
    _register_signal_listener()

    parser = argparse.ArgumentParser(
        description='Start Pinball master and workers.')
    parser.add_argument(
        '-c',
        '--config_file',
        dest='config_file',
        required=True,
        help='full path to the pinball setting configure file')
    parser.add_argument(
        '-m',
        '--mode',
        dest='mode',
        choices=['master', 'scheduler', 'workers', 'ui'],
        default='master',
        help='execution mode')

    options = parser.parse_args(sys.argv[1:])
    PinballConfig.parse(options.config_file)

    if hasattr(PinballConfig, 'MASTER_NAME') and PinballConfig.MASTER_NAME:
        master_name(PinballConfig.MASTER_NAME)
    _pinball_imports()
    if PinballConfig.UI_HOST:
        emailer = Emailer(PinballConfig.UI_HOST, PinballConfig.UI_PORT)
    else:
        emailer = Emailer(socket.gethostname(), PinballConfig.UI_PORT)

    if options.mode == 'ui':
        hostport = '%s:%d' % (socket.gethostname(), PinballConfig.UI_PORT)
        cache_thread.start_cache_thread(DbStore())
        if not PinballConfig.UI_HOST:
            hostport = 'localhost:%d' % PinballConfig.UI_PORT

        # Disable reloader to prevent auto refresh on file changes.  The
        # problem with auto-refresh is that it starts multiple processes.  Some
        # of those processes will become orphans if we kill the UI in a wrong
        # way.
        management.call_command('runserver', hostport, interactive=False,
                                use_reloader=False)
        return

    factory = Factory(master_hostname=PinballConfig.MASTER_HOST,
                      master_port=PinballConfig.MASTER_PORT)
    threads = []
    if options.mode == 'master':
        factory.create_master(DbStore())
    elif options.mode == 'scheduler':
        threads.append(_create_scheduler(factory, emailer))
    else:
        assert options.mode == 'workers'
        if PinballConfig.UI_HOST:
            emailer = Emailer(PinballConfig.UI_HOST, PinballConfig.UI_PORT)
        else:
            emailer = Emailer(socket.gethostname(), PinballConfig.UI_PORT)
        threads = _create_workers(PinballConfig.WORKERS, factory, emailer)

    try:
        if options.mode == 'master':
            factory.run_master_server()
        else:
            _wait_for_threads(threads)
    except KeyboardInterrupt:
        LOG.info('Exiting')
        sys.exit()
Beispiel #13
0
class InspectorTestCase(unittest.TestCase):
    def setUp(self):
        self._factory = Factory()
        self._factory.create_master(EphemeralStore())
        self._inspector = Inspector(self._factory.get_client())

    def _post_job_tokens(self):
        """Add some job tokens to the master."""
        request = ModifyRequest(updates=[])
        name = Name(workflow='some_workflow', instance='12345')
        for job_id in range(0, 2):
            if job_id % 2 == 0:
                name.job_state = Name.WAITING_STATE
            else:
                name.job_state = Name.RUNNABLE_STATE
            name.job = 'some_job_%d' % job_id
            job_token = Token(name=name.get_job_token_name())
            request.updates.append(job_token)
        client = self._factory.get_client()
        client.modify(request)

    def _post_event_tokens(self):
        """Add some event tokens to the master."""
        request = ModifyRequest(updates=[])
        name = Name(workflow='some_workflow', instance='12345')
        for job_id in range(0, 2):
            for input_id in range(0, 2):
                for event_id in range(0, 2):
                    name.job = 'some_job_%d' % job_id
                    name.input = 'some_input_%d' % input_id
                    name.event = 'some_event_%d' % event_id
                    event_token = Token(name=name.get_event_token_name())
                    request.updates.append(event_token)
        client = self._factory.get_client()
        client.modify(request)

    def test_inspect_empty_tree(self):
        self.assertEqual([], self._inspector.get_workflow_names())
        self.assertEqual([], self._inspector.get_workflow_instances(
            'some_workflow'))
        self.assertEqual([], self._inspector.get_waiting_job_names(
            'some_workflow', '12345'))
        self.assertEqual([], self._inspector.get_runnable_job_names(
            'some_workflow', '12345'))
        self.assertEqual([], self._inspector.get_event_names('some_workflow',
                                                             '12345',
                                                             'some_job_0',
                                                             'some_input_0'))

    def test_inspect_non_empty_tree(self):
        self._post_job_tokens()
        self._post_event_tokens()
        self.assertEqual(['some_workflow'],
                         self._inspector.get_workflow_names())
        self.assertEqual(['12345'], self._inspector.get_workflow_instances(
            'some_workflow'))
        self.assertEqual(['some_job_0'], self._inspector.get_waiting_job_names(
            'some_workflow', '12345'))
        self.assertEqual(['some_job_1'],
                         self._inspector.get_runnable_job_names(
                             'some_workflow', '12345'))
        event_names = self._inspector.get_event_names('some_workflow',
                                                      '12345',
                                                      'some_job_0',
                                                      'some_input_0')
        event_names = sorted(event_names)
        self.assertEqual(['some_event_0', 'some_event_1'], event_names)
Beispiel #14
0
 def setUp(self):
     self._factory = Factory()
     self._factory.create_master(EphemeralStore())
     self._inspector = Inspector(self._factory.get_client())
Beispiel #15
0
 def setUp(self):
     self._factory = Factory()
     self._factory.create_master(EphemeralStore())
Beispiel #16
0
class SchedulerTestCase(unittest.TestCase):
    def setUp(self):
        self._factory = Factory()
        store = EphemeralStore()
        self._factory.create_master(store)
        emailer = Emailer('some_host', '8080')
        self._scheduler = Scheduler(self._factory.get_client(), store, emailer)
        self._client = self._factory.get_client()
        self._post_schedule_token()

    @staticmethod
    def _get_schedule_token():
        name = Name(workflow='workflow_0')
        now = int(time.time())
        token = Token(name=name.get_workflow_schedule_token_name(),
                      owner='some_owner',
                      expirationTime=now - 10)
        schedule = WorkflowSchedule(next_run_time=now - 10,
                                    recurrence_seconds=10,
                                    workflow='workflow_0')
        token.data = pickle.dumps(schedule)
        return token

    def _post_schedule_token(self):
        """Add schedule token to the master."""
        request = ModifyRequest()
        request.updates = [SchedulerTestCase._get_schedule_token()]
        self._client.modify(request)

    def test_own_schedule_token(self):
        self._scheduler._own_schedule_token()
        self.assertIsNotNone(self._scheduler._owned_schedule_token)

    def test_advance_schedule(self):
        self._scheduler._own_schedule_token()
        token = self._scheduler._owned_schedule_token
        owned_schedule = pickle.loads(token.data)
        self._scheduler._advance_schedule(owned_schedule)
        now = int(time.time())
        self.assertGreater(token.expirationTime, now - 10)
        schedule = pickle.loads(token.data)
        self.assertEqual(token.expirationTime, schedule.next_run_time)

    def test_run_or_reschedule_incorrect_expiration_time(self):
        self._scheduler._own_schedule_token()
        token = self._scheduler._owned_schedule_token
        schedule = pickle.loads(token.data)
        schedule.next_run_time = int(time.time() + 1000)
        token.data = pickle.dumps(schedule)
        self.assertRaises(AssertionError, self._scheduler._run_or_reschedule)

    def _run_or_reschedule(self, overrun_policy, is_running=True,
                           is_failed=True, is_abort_called=False):
        self._scheduler._own_schedule_token()
        token = self._scheduler._owned_schedule_token
        schedule = MockWorkflowSchedule(is_running, is_failed)
        schedule.overrun_policy = overrun_policy
        token.data = pickle.dumps(schedule)
        token.expirationTime = schedule.next_run_time
        old_expiration_time = token.expirationTime

        self._scheduler._run_or_reschedule()

        token = self._scheduler._owned_schedule_token
        new_expiration_time = token.expirationTime
        self.assertGreater(new_expiration_time, old_expiration_time)
        schedule = pickle.loads(token.data)
        self.assertEqual(is_abort_called, schedule.abort_called)

    def test_run_START_NEW(self):
        self._run_or_reschedule(OverrunPolicy.START_NEW)
        self.assertIsNotNone(self._scheduler._request)

    def test_reschedule_SKIP(self):
        self._run_or_reschedule(OverrunPolicy.SKIP)
        self.assertIsNone(self._scheduler._request)

    def test_run_ABORT_RUNNING(self):
        self._run_or_reschedule(OverrunPolicy.ABORT_RUNNING,
                                is_abort_called=True)
        self.assertIsNotNone(self._scheduler._request)

    def test_reschedule_DELAY(self):
        self._run_or_reschedule(OverrunPolicy.DELAY)
        token = self._scheduler._owned_schedule_token
        schedule = pickle.loads(token.data)
        self.assertLess(schedule.next_run_time, token.expirationTime)
        self.assertIsNone(self._scheduler._request)

    def test_run_DELAY(self):
        self._run_or_reschedule(OverrunPolicy.DELAY, is_running=False)
        self.assertIsNotNone(self._scheduler._request)

    def test_reschedule_DELAY_UNTIL_SUCCESS(self):
        self._run_or_reschedule(OverrunPolicy.DELAY_UNTIL_SUCCESS,
                                is_running=False, is_failed=True)
        token = self._scheduler._owned_schedule_token
        schedule = pickle.loads(token.data)
        self.assertLess(schedule.next_run_time, token.expirationTime)
        self.assertIsNone(self._scheduler._request)

    def test_run_DELAY_UNTIL_SUCCESS(self):
        self._run_or_reschedule(OverrunPolicy.DELAY_UNTIL_SUCCESS,
                                is_running=False, is_failed=False)
        self.assertIsNotNone(self._scheduler._request)
Beispiel #17
0
def main():
    parser = argparse.ArgumentParser(
        description='Interact with Pinball master server.')
    parser.add_argument('-p',
                        '--port',
                        dest='port',
                        type=int,
                        default=PinballConfig.MASTER_PORT,
                        help='port of the pinball master server')
    parser.add_argument('-s',
                        '--host',
                        dest='host',
                        default='localhost',
                        help='hostname of the pinball master server')
    parser.add_argument('-f',
                        '--force',
                        dest='force',
                        action='store_true',
                        default=False,
                        help='do not ask for confirmation')
    parser.add_argument('-r',
                        '--recursive',
                        dest='recursive',
                        action='store_true',
                        default=False,
                        help='perform the operation recursively')
    parser.add_argument('-n', '--name', dest='name', help='token name')
    parser.add_argument('-v',
                        '--version',
                        dest='version',
                        type=int,
                        help='token version')
    parser.add_argument('-o',
                        '--owner',
                        dest='owner',
                        help='token owner; must be provided if '
                        'expiration_time is set')
    parser.add_argument('-t',
                        '--expiration_time',
                        dest='expiration_time',
                        type=int,
                        help='ownership expiration time in seconds since '
                        'epoch; must be provided if owner is set')
    parser.add_argument('-d', '--data', dest='data', help='token data')
    parser.add_argument('-i',
                        '--priority',
                        dest='priority',
                        type=float,
                        default=0,
                        help='token priority')
    parser.add_argument('command',
                        choices=_COMMANDS.keys(),
                        help='command name')
    parser.add_argument('command_args', nargs='*')
    options = parser.parse_args(sys.argv[1:])

    command = _COMMANDS[options.command]()
    command.prepare(options)
    factory = Factory(master_hostname=options.host, master_port=options.port)
    client = factory.get_client()
    print command.execute(client, None)
Beispiel #18
0
 def setUp(self):
     self._factory = Factory()
     self._factory.create_master(EphemeralStore())
Beispiel #19
0
class WorkerTestCase(unittest.TestCase):
    def setUp(self):
        self._factory = Factory()
        self._store = EphemeralStore()
        self._factory.create_master(self._store)
        self._emailer = mock.Mock()
        self._worker = Worker(self._factory.get_client(), self._store,
                              self._emailer)
        self._client = self._factory.get_client()

    def _get_parent_job_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job_state=Name.WAITING_STATE,
                    job='parent_job')
        job = ShellJob(name=name.job,
                       inputs=[Name.WORKFLOW_START_INPUT],
                       outputs=['child_job'],
                       command='echo parent',
                       emails=['*****@*****.**'])
        return Token(name=name.get_job_token_name(), data=pickle.dumps(job))

    def _get_child_job_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job_state=Name.WAITING_STATE,
                    job='child_job')
        job = ShellJob(name=name.job,
                       inputs=['parent_job'],
                       outputs=[],
                       command='echo child',
                       emails=['*****@*****.**'])
        return Token(name=name.get_job_token_name(), data=pickle.dumps(job))

    def _post_job_tokens(self):
        """Add waiting job tokens to the master."""
        request = ModifyRequest(updates=[])
        request.updates.append(self._get_parent_job_token())
        request.updates.append(self._get_child_job_token())
        self._client.modify(request)

    def _post_workflow_start_event_token(self):
        name = Name(workflow='some_workflow',
                    instance='12345',
                    job='parent_job',
                    input_name=Name.WORKFLOW_START_INPUT,
                    event='workflow_start_event')
        event = Event(creator='SimpleWorkflowTest')
        token = Token(name=name.get_event_token_name(),
                      data=pickle.dumps(event))
        request = ModifyRequest(updates=[token])
        self._client.modify(request)

    def _verify_token_names(self, names):
        request = GroupRequest(namePrefix='/workflow/')
        response = self._client.group(request)
        names = sorted(names)
        counts = sorted(response.counts.keys())
        self.assertEqual(names, counts)

    def _verify_archived_token_names(self, names):
        active_tokens = self._store.read_active_tokens()
        all_tokens = self._store.read_tokens()
        archived_token_names = []
        for token in all_tokens:
            if not token in active_tokens:
                archived_token_names.append(token.name)
        names = sorted(names)
        archived_token_names = sorted(archived_token_names)
        self.assertEqual(names, archived_token_names)

    def _get_token(self, name):
        query = Query(namePrefix=name)
        request = QueryRequest(queries=[query])
        response = self._client.query(request)
        self.assertEqual(1, len(response.tokens))
        self.assertEqual(1, len(response.tokens[0]))
        return response.tokens[0][0]

    def _get_stored_token(self, name):
        tokens = self._store.read_tokens(name_prefix=name)
        self.assertEqual(1, len(tokens))
        return tokens[0]

    def _verify_parent_job_waiting(self):
        token_names = [
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='child_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job='parent_job',
                 input_name=Name.WORKFLOW_START_INPUT,
                 event='workflow_start_event').get_event_token_name()
        ]
        self._verify_token_names(token_names)

    def _verify_parent_job_runnable(self):
        token_names = [
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.RUNNABLE_STATE,
                 job='parent_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='child_job').get_job_token_name()
        ]
        self._verify_token_names(token_names)

    def test_get_triggering_events(self):
        self.assertEqual([], Worker._get_triggering_events([]))

        self.assertEqual(['a'], Worker._get_triggering_events([['a']]))

        events = Worker._get_triggering_events([['a', 'b']])
        self.assertTrue(events == ['a'] or events == ['b'])

        events = Worker._get_triggering_events([['a', 'b'], ['1', '2']])
        self.assertTrue(events == ['a', '1'] or events == ['a', '2']
                        or events == ['b', '1'] or events == ['b', '2'])

    def test_move_job_token_to_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        job_name = Name(workflow='some_workflow',
                        instance='12345',
                        job_state=Name.WAITING_STATE,
                        job='parent_job')
        job_token = self._get_token(job_name.get_job_token_name())
        event_name = Name(workflow='some_workflow',
                          instance='12345',
                          job='parent_job',
                          input_name=Name.WORKFLOW_START_INPUT,
                          event='workflow_start_event')
        event_token = self._get_token(event_name.get_event_token_name())
        self._worker._move_job_token_to_runnable(job_token, [event_token])
        # Event token should have been removed and the parent job should be
        # runnable.
        self._verify_parent_job_runnable()

    def test_make_job_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        parent_job_name = Name(workflow='some_workflow',
                               instance='12345',
                               job_state=Name.WAITING_STATE,
                               job='parent_job').get_job_token_name()
        child_job_name = Name(workflow='some_workflow',
                              instance='12345',
                              job_state=Name.WAITING_STATE,
                              job='child_job').get_job_token_name()

        parent_job_token = self._get_token(parent_job_name)
        child_job_token = self._get_token(child_job_name)

        self._worker._make_job_runnable(child_job_token)
        # Child job is missing triggering tokens so it cannot be made runnable.
        self._verify_parent_job_waiting()

        self._worker._make_job_runnable(parent_job_token)
        # Parent job has all triggering tokens so it can be made runnable.
        self._verify_parent_job_runnable()

    def test_make_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        self._worker._make_runnable('some_other_workflow', '12345')
        # Workflow instance does not exist so nothing should have changed.
        self._verify_parent_job_waiting()

        self._worker._make_runnable('some_workflow', 'some_other_instance')
        # Workflow instance does not exist so nothing should have changed.
        self._verify_parent_job_waiting()

        self._worker._make_runnable('some_workflow', '12345')
        self._verify_parent_job_runnable()

    def test_own_runnable_job_token(self):
        self._post_job_tokens()

        self._worker._own_runnable_job_token()
        # Event token is not present so nothing should have changed.
        token_names = [
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name(),
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='child_job').get_job_token_name()
        ]
        self._verify_token_names(token_names)
        self.assertIsNone(self._worker._owned_job_token)

        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        # Worker should now own a runnable job token.
        self._verify_parent_job_runnable()
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.RUNNABLE_STATE,
                 job='parent_job').get_job_token_name())
        self.assertEqual(parent_token, self._worker._owned_job_token)

    def _add_history_to_owned_token(self):
        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)
        self._worker._owned_job_token.data = pickle.dumps(job)

    def test_get_output_event_tokens(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        self.assertIsNotNone(self._worker._owned_job_token)

        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)

        event_tokens = self._worker._get_output_event_tokens(job)
        self.assertEqual(1, len(event_tokens))
        event_token_name = Name.from_event_token_name(event_tokens[0].name)
        expected_prefix = Name(workflow='some_workflow',
                               instance='12345',
                               job='child_job',
                               input_name='parent_job').get_input_prefix()
        self.assertEqual(expected_prefix, event_token_name.get_input_prefix())

    def test_move_job_token_to_waiting(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)
        self._worker._owned_job_token.data = pickle.dumps(job)

        self._worker._move_job_token_to_waiting(job, True)

        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        self.assertEqual(execution_record.start_time,
                         job.history[0].start_time)

    def test_keep_job_token_in_runnable(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        job.history.append('some_historic_record')

        self._worker._keep_job_token_in_runnable(job)

        self._verify_parent_job_runnable()
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.RUNNABLE_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        self.assertEqual('some_historic_record', job.history[0])

    @staticmethod
    def _from_job(workflow, instance, job_name, job, data_builder, emailer):
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        executed_job = copy.copy(job)
        executed_job.history.append(execution_record)
        job_executor = mock.Mock()
        job_executor.job = executed_job
        job_executor.prepare.return_value = True
        job_executor.execute.return_value = True
        return job_executor

    @mock.patch('pinball.workflow.worker.JobExecutor')
    def test_execute_job(self, job_executor_mock):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        job_executor = mock.Mock()
        job_executor_mock.from_job.return_value = job_executor

        job_executor_mock.from_job.side_effect = WorkerTestCase._from_job

        self._worker._execute_job()

        self.assertIsNone(self._worker._owned_job_token)
        parent_token = self._get_token(
            Name(workflow='some_workflow',
                 instance='12345',
                 job_state=Name.WAITING_STATE,
                 job='parent_job').get_job_token_name())
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

    def test_send_instance_end_email(self):
        data_builder = mock.Mock()
        self._worker._data_builder = data_builder

        schedule_data = mock.Mock()
        schedule_data.emails = ['*****@*****.**']
        data_builder.get_schedule.return_value = schedule_data

        instance_data = mock.Mock()
        data_builder.get_instance.return_value = instance_data

        job_data = mock.Mock()
        data_builder.get_jobs.return_value = [job_data]

        self._worker._send_instance_end_email('some_workflow', '12345')

        self._emailer.send_instance_end_message.assert_called_once_with(
            ['*****@*****.**'], instance_data, [job_data])

    def test_send_job_failure_emails(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()

        job = pickle.loads(self._worker._owned_job_token.data)
        job.history.append('some_historic_record')
        executor = mock.Mock()
        self._worker._executor = executor
        executor.job = job

        data_builder = mock.Mock()
        self._worker._data_builder = data_builder

        schedule_data = mock.Mock()
        schedule_data.emails = ['*****@*****.**']
        data_builder.get_schedule.return_value = schedule_data

        execution_data = mock.Mock()
        data_builder.get_execution.return_value = execution_data

        self._worker._send_job_failure_emails(True)

        self._emailer.send_job_execution_end_message.assert_any_call(
            ['*****@*****.**', '*****@*****.**'],
            execution_data)

    @mock.patch('pinball.workflow.worker.JobExecutor')
    def test_run(self, job_executor_mock):
        self._post_job_tokens()
        self._post_workflow_start_event_token()

        job_executor_mock.from_job.side_effect = WorkerTestCase._from_job

        self._worker._test_only_end_if_no_runnable = True
        self._worker.run()
        with mock.patch('pinball.workflow.archiver.time') as time_patch:
            # add one day
            time_patch.time.return_value = time.time() + 24 * 60 * 60
            self._worker.run()

        parent_job_token_name = Name(workflow='some_workflow',
                                     instance='12345',
                                     job_state=Name.WAITING_STATE,
                                     job='parent_job').get_job_token_name()
        child_job_token_name = Name(workflow='some_workflow',
                                    instance='12345',
                                    job_state=Name.WAITING_STATE,
                                    job='child_job').get_job_token_name()
        signal_string = Signal.action_to_string(Signal.ARCHIVE)
        signal_token_name = Name(workflow='some_workflow',
                                 instance='12345',
                                 signal=signal_string).get_signal_token_name()

        token_names = [
            parent_job_token_name, child_job_token_name, signal_token_name
        ]
        self._verify_archived_token_names(token_names)

        self.assertEqual(2, job_executor_mock.from_job.call_count)

        parent_token = self._get_stored_token(parent_job_token_name)
        job = pickle.loads(parent_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

        child_token = self._get_stored_token(child_job_token_name)
        job = pickle.loads(child_token.data)
        self.assertEqual(1, len(job.history))
        execution_record = job.history[0]
        self.assertEqual(0, execution_record.exit_code)
        self.assertEqual(1234567, execution_record.end_time)

        signal_token = self._get_stored_token(signal_token_name)
        signal = pickle.loads(signal_token.data)
        self.assertEqual(Signal.ARCHIVE, signal.action)