Exemplo n.º 1
0
    def test_abort_running(self, data_builder_mock, signaller_mock):
        client = mock.Mock()
        store = mock.Mock()

        data_builder = mock.Mock()
        data_builder_mock.return_value = data_builder

        schedule = WorkflowSchedule(workflow='some_workflow')
        failed_instance = WorkflowInstanceData('some_workflow',
                                               '123',
                                               status=Status.FAILURE)
        running_instance = WorkflowInstanceData('some_workflow',
                                                '12345',
                                                status=Status.RUNNING)

        data_builder.get_instances.return_value = [
            failed_instance, running_instance
        ]

        signaller = mock.Mock()
        signaller_mock.return_value = signaller

        schedule.abort_running(client, store)

        signaller_mock.assert_called_once_with(client,
                                               workflow='some_workflow',
                                               instance='12345')
        signaller.set_action.assert_called_once_with(Signal.ABORT)
Exemplo n.º 2
0
    def test_abort_running(self, data_builder_mock, signaller_mock):
        client = mock.Mock()
        store = mock.Mock()

        data_builder = mock.Mock()
        data_builder_mock.return_value = data_builder

        schedule = WorkflowSchedule(workflow='some_workflow')
        failed_instance = WorkflowInstanceData('some_workflow',
                                               '123',
                                               status=Status.FAILURE)
        running_instance = WorkflowInstanceData('some_workflow',
                                                '12345',
                                                status=Status.RUNNING)

        data_builder.get_instances.return_value = [failed_instance,
                                                   running_instance]

        signaller = mock.Mock()
        signaller_mock.return_value = signaller

        schedule.abort_running(client, store)

        signaller_mock.assert_called_once_with(client,
                                               workflow='some_workflow',
                                               instance='12345')
        signaller.set_action.assert_called_once_with(Signal.ABORT)
Exemplo n.º 3
0
    def test_is_failed(self, data_builder_mock):
        store = mock.Mock()
        data_builder = mock.Mock()
        data_builder_mock.return_value = data_builder

        schedule = WorkflowSchedule(workflow='some_workflow')
        data_builder.get_workflow.return_value = None
        self.assertFalse(schedule.is_failed(store))

        workflow_data = WorkflowData('some_workflow', status=Status.FAILURE)
        data_builder.get_workflow.return_value = workflow_data
        self.assertTrue(schedule.is_failed(store))
Exemplo n.º 4
0
    def test_is_failed(self, data_builder_mock):
        store = mock.Mock()
        data_builder = mock.Mock()
        data_builder_mock.return_value = data_builder

        schedule = WorkflowSchedule(workflow='some_workflow')
        data_builder.get_workflow.return_value = None
        self.assertFalse(schedule.is_failed(store))

        workflow_data = WorkflowData('some_workflow', status=Status.FAILURE)
        data_builder.get_workflow.return_value = workflow_data
        self.assertTrue(schedule.is_failed(store))
Exemplo n.º 5
0
 def test_corresponds_to(self):
     some_schedule = WorkflowSchedule(next_run_time=20,
                                      recurrence_seconds=10,
                                      workflow='some_workflow')
     corresponding_schedule = WorkflowSchedule(next_run_time=10,
                                               recurrence_seconds=10,
                                               workflow='some_workflow')
     self.assertTrue(some_schedule.corresponds_to(corresponding_schedule))
     non_corresponding_schedule = WorkflowSchedule(
         next_run_time=20, recurrence_seconds=10,
         workflow='some_other_workflow')
     self.assertFalse(some_schedule.corresponds_to(
                      non_corresponding_schedule))
Exemplo n.º 6
0
 def setUp(self):
     # 1370354400 = 06/04/2012 2:00p UTC
     self.next_run_time = 1370354400
     # 1370289240 = 06/03/2013 7:53p UTC
     self.previous_run_time = 1370289240
     self.workflow_schedule = WorkflowSchedule(
         next_run_time=self.next_run_time,
         recurrence_seconds=24 * 60 * 60,
         workflow='some_workflow',
         emails=['some_email'])
     self.workflow = WorkflowDef('some_workflow', self.workflow_schedule,
                                 'some_emails')
     self.job1 = JobDef(
         'some_job_1', CommandJobTemplate('some_template1',
                                          'some_command1'), self.workflow)
     self.workflow.add_job(self.job1)
     self.job2 = JobDef(
         'some_job_2', CommandJobTemplate('some_template2',
                                          'some_command2'), self.workflow)
     self.workflow.add_job(self.job2)
     self.job2.add_dep(self.job1)
     self.job3 = JobDef(
         'some_job_3', CommandJobTemplate('some_template3',
                                          'some_command3'), self.workflow)
     self.workflow.add_job(self.job3)
     self.job4 = JobDef(
         'some_job_4', CommandJobTemplate('some_template4',
                                          'some_command4'), self.workflow)
     self.workflow.add_job(self.job4)
     self.job4.add_dep(self.job3)
Exemplo n.º 7
0
 def _get_schedule_token():
     name = Name(workflow='workflow_0')
     now = int(time.time())
     token = Token(name=name.get_workflow_schedule_token_name(),
                   owner='some_owner',
                   expirationTime=now - 10)
     schedule = WorkflowSchedule(next_run_time=now - 10,
                                 recurrence_seconds=10,
                                 workflow='workflow_0')
     token.data = pickle.dumps(schedule)
     return token
Exemplo n.º 8
0
    def test_run(self, load_path_mock):
        config_parser = mock.Mock()

        def load_path(params):
            self.assertEqual([PARSER_CALLER_KEY], params.keys())
            return config_parser
        load_path_mock.return_value = load_path
        name = Name(workflow='some_workflow',
                    instance='123',
                    job_state=Name.WAITING_STATE,
                    job='some_job')
        config_parser.get_workflow_tokens.return_value = [
            Token(name=name.get_job_token_name())]

        schedule = WorkflowSchedule(workflow='some_workflow')
        store = EphemeralStore()
        emailer = Emailer('some_host', '8080')
        request = schedule.run(emailer, store)

        self.assertEqual(1, len(request.updates))
Exemplo n.º 9
0
    def test_run(self, load_path_mock):
        config_parser = mock.Mock()
        load_path_mock.return_value = config_parser
        name = Name(workflow='some_workflow',
                    instance='123',
                    job_state=Name.WAITING_STATE,
                    job='some_job')
        config_parser.get_workflow_tokens.return_value = [
            Token(name=name.get_job_token_name())
        ]

        schedule = WorkflowSchedule(workflow='some_workflow')
        store = EphemeralStore()
        emailer = Emailer('some_host', '8080')
        request = schedule.run(emailer, store)
        self.assertEqual(load_path_mock.call_args_list, [
            mock.call('pinball_ext.workflow.parser.PyWorkflowParser', {},
                      'schedule')
        ])

        self.assertEqual(1, len(request.updates))
Exemplo n.º 10
0
    def test_run(self, load_path_mock):
        config_parser = mock.Mock()

        def load_path(params):
            self.assertEqual([], params.keys())
            return config_parser

        load_path_mock.return_value = load_path
        name = Name(workflow='some_workflow',
                    instance='123',
                    job_state=Name.WAITING_STATE,
                    job='some_job')
        config_parser.get_workflow_tokens.return_value = [
            Token(name=name.get_job_token_name())
        ]

        schedule = WorkflowSchedule(workflow='some_workflow')
        store = EphemeralStore()
        emailer = Emailer('some_host', '8080')
        request = schedule.run(emailer, store)

        self.assertEqual(1, len(request.updates))
Exemplo n.º 11
0
 def get_schedule_token(self, workflow):
     schedule_config = self._repository.get_schedule(workflow)
     timestamp = schedule_to_timestamp(schedule_config.time,
                                       schedule_config.start_date)
     recurrence = recurrence_str_to_sec(schedule_config.recurrence)
     overrun_policy = OverrunPolicy.from_string(
         schedule_config.overrun_policy)
     schedule = WorkflowSchedule(
         next_run_time=timestamp,
         recurrence_seconds=recurrence,
         overrun_policy=overrun_policy,
         workflow=schedule_config.workflow,
         emails=schedule_config.emails,
         #TODO(mao): to make it flexible that allow users specify through UI
         max_running_instances=PinballConfig.DEFAULT_MAX_WORKFLOW_RUNNING_INSTANCES)
     schedule.advance_next_run_time()
     timestamp = schedule.next_run_time
     token_name = (
         Name(workflow=schedule_config.workflow
              ).get_workflow_schedule_token_name())
     return Token(name=token_name, owner='parser',
                  expirationTime=timestamp,
                  data=pickle.dumps(schedule))
Exemplo n.º 12
0
 def get_schedule_token(self, workflow):
     schedule_config = self._repository.get_schedule(workflow)
     timestamp = schedule_to_timestamp(schedule_config.time,
                                       schedule_config.start_date)
     recurrence = recurrence_str_to_sec(schedule_config.recurrence)
     overrun_policy = OverrunPolicy.from_string(
         schedule_config.overrun_policy)
     schedule = WorkflowSchedule(
         next_run_time=timestamp,
         recurrence_seconds=recurrence,
         overrun_policy=overrun_policy,
         workflow=schedule_config.workflow,
         emails=schedule_config.emails,
         #TODO(mao): to make it flexible that allow users specify through UI
         max_running_instances=PinballConfig.
         DEFAULT_MAX_WORKFLOW_RUNNING_INSTANCES)
     schedule.advance_next_run_time()
     timestamp = schedule.next_run_time
     token_name = (Name(workflow=schedule_config.workflow).
                   get_workflow_schedule_token_name())
     return Token(name=token_name,
                  owner='parser',
                  expirationTime=timestamp,
                  data=pickle.dumps(schedule))
Exemplo n.º 13
0
    def test_run(self, load_path_mock):
        config_parser = mock.Mock()
        load_path_mock.return_value = config_parser
        name = Name(workflow='some_workflow',
                    instance='123',
                    job_state=Name.WAITING_STATE,
                    job='some_job')
        config_parser.get_workflow_tokens.return_value = [
            Token(name=name.get_job_token_name())]

        schedule = WorkflowSchedule(workflow='some_workflow')
        store = EphemeralStore()
        emailer = Emailer('some_host', '8080')
        request = schedule.run(emailer, store)
        self.assertEqual(
            load_path_mock.call_args_list,
            [
                mock.call('pinball_ext.workflow.parser.PyWorkflowParser',
                          {},
                          'schedule')
            ]
        )

        self.assertEqual(1, len(request.updates))
Exemplo n.º 14
0
def _generate_schedule_tokens(workflows):
    result = []
    for w in range(workflows):
        next_run_time = time.time() + (365 + w) * 24 * 60 * 60
        recurrence = min(365 * 24 * 60 * 60, 60**w)
        workflow = 'workflow_%d' % w
        schedule = WorkflowSchedule(next_run_time,
                                    recurrence_seconds=recurrence,
                                    overrun_policy=w % 4,
                                    workflow=workflow)
        name = Name(workflow=workflow)
        result.append(
            Token(name=name.get_workflow_schedule_token_name(),
                  version=100000000 * w,
                  owner='some_owner',
                  expirationTime=next_run_time,
                  data=pickle.dumps(schedule)))
    return result
Exemplo n.º 15
0
 def test_corresponds_to(self):
     some_schedule = WorkflowSchedule(next_run_time=20,
                                      recurrence_seconds=10,
                                      workflow='some_workflow')
     corresponding_schedule = WorkflowSchedule(next_run_time=10,
                                               recurrence_seconds=10,
                                               workflow='some_workflow')
     self.assertTrue(some_schedule.corresponds_to(corresponding_schedule))
     non_corresponding_schedule = WorkflowSchedule(
         next_run_time=20,
         recurrence_seconds=10,
         workflow='some_other_workflow')
     self.assertFalse(
         some_schedule.corresponds_to(non_corresponding_schedule))
Exemplo n.º 16
0
    def parse_schedule(self, workflow_name, workflow):
        """Parse schedule config and create workflow schedule."""
        recurrence = workflow.schedule.recurrence.total_seconds()

        overrun_policy = workflow.schedule.overrun_policy
        if overrun_policy is None:
            overrun_policy = OverrunPolicy.SKIP

        max_running_instances = workflow.schedule.max_running_instances
        if max_running_instances is None:
            max_running_instances =\
                PinballConfig.DEFAULT_MAX_WORKFLOW_RUNNING_INSTANCES

        notify_emails = workflow.notify_emails.split(',')

        return WorkflowSchedule(next_run_time=int(
            calendar.timegm(
                workflow.schedule.reference_timestamp.timetuple())),
                                recurrence_seconds=recurrence,
                                overrun_policy=overrun_policy,
                                parser_params=self.parser_params,
                                workflow=workflow_name,
                                emails=notify_emails,
                                max_running_instances=max_running_instances)