Esempio n. 1
0
 def _simulate(self):
     """Simulate execution of active jobs."""
     tokens = self._store.read_tokens()
     satisfied_deps = set()
     executed_jobs = []
     jobs = {}
     for token in tokens:
         event_name = Name.from_event_token_name(token.name)
         if event_name.event:
             satisfied_deps.add((event_name.input, event_name.job))
         else:
             job_name = Name.from_job_token_name(token.name)
             if job_name.job:
                 job = pickle.loads(token.data)
                 jobs[job.name] = job
     dep_counts = collections.defaultdict(int)
     while satisfied_deps:
         last_satisfied_deps = satisfied_deps
         satisfied_deps = set()
         for (_, job_name) in last_satisfied_deps:
             dep_counts[job_name] += 1
             if dep_counts[job_name] == 2:
                 executed_jobs.append(job_name)
                 job = jobs[job_name]
                 for output in job.outputs:
                     satisfied_deps.add((job_name, output))
     return executed_jobs
Esempio n. 2
0
 def _simulate(self):
     """Simulate execution of active jobs."""
     tokens = self._store.read_tokens()
     satisfied_deps = set()
     executed_jobs = []
     jobs = {}
     for token in tokens:
         event_name = Name.from_event_token_name(token.name)
         if event_name.event:
             satisfied_deps.add((event_name.input, event_name.job))
         else:
             job_name = Name.from_job_token_name(token.name)
             if job_name.job:
                 job = pickle.loads(token.data)
                 jobs[job.name] = job
     dep_counts = collections.defaultdict(int)
     while satisfied_deps:
         last_satisfied_deps = satisfied_deps
         satisfied_deps = set()
         for (_, job_name) in last_satisfied_deps:
             dep_counts[job_name] += 1
             if dep_counts[job_name] == 2:
                 executed_jobs.append(job_name)
                 job = jobs[job_name]
                 for output in job.outputs:
                     satisfied_deps.add((job_name, output))
     return executed_jobs
Esempio n. 3
0
 def change_instance(self, instance):
     """Move all tokens to a specific instance."""
     self._instance = instance
     new_events = {}
     for event_name, event in self._new_events.items():
         name = Name.from_event_token_name(event_name)
         name.instance = instance
         new_events[name.get_event_token_name()] = event
     self._new_events = new_events
Esempio n. 4
0
 def test_event_token_name(self):
     NAME = "/workflow/some_workflow/some_instance/input/some_job/" "some_input/some_event"
     name = Name.from_event_token_name(NAME)
     self.assertEqual("some_workflow", name.workflow)
     self.assertEqual("some_instance", name.instance)
     self.assertEqual("some_job", name.job)
     self.assertEqual("some_input", name.input)
     self.assertEqual("some_event", name.event)
     self.assertEqual(NAME, name.get_event_token_name())
Esempio n. 5
0
 def change_instance(self, instance):
     """Move all tokens to a specific instance."""
     self._instance = instance
     new_events = {}
     for event_name, event in self._new_events.items():
         name = Name.from_event_token_name(event_name)
         name.instance = instance
         new_events[name.get_event_token_name()] = event
     self._new_events = new_events
Esempio n. 6
0
    def test_get_workflow_tokens(self, repository_mock):
        repository = mock.Mock()
        repository_mock.return_value = repository

        repository.get_job_names.return_value = ['some_job']

        job_config = JobConfig()
        job_config.workflow = 'some_workflow'
        job_config.job = 'some_job'
        job_config.is_condition = False
        job_config.template = ('tests.pinball.parser.'
                               'repository_config_parser_test.SomeJobTemplate')
        job_config.template_params = {'some_param': 'some_value'}
        job_config.parents = []
        job_config.emails = [
            '*****@*****.**', '*****@*****.**'
        ]
        job_config.max_attempts = 10
        job_config.retry_delay_sec = 20
        job_config.priority = 100
        repository.get_job.return_value = job_config

        parser = RepositoryConfigParser()

        workflow_tokens = parser.get_workflow_tokens('some_workflow')

        self.assertEqual(2, len(workflow_tokens))

        # Verify the triggering event token.
        if Name.from_event_token_name(workflow_tokens[0].name).workflow:
            event_token = workflow_tokens[0]
        else:
            event_token = workflow_tokens[1]

        event_name = Name.from_event_token_name(event_token.name)
        self.assertEqual('some_workflow', event_name.workflow)
        self.assertEqual('some_job', event_name.job)
        self.assertEqual('__WORKFLOW_START__', event_name.input)

        event = pickle.loads(event_token.data)
        self.assertEqual('repository_config_parser', event.creator)

        repository.get_job_names.assert_called_once_with('some_workflow')
        repository.get_job.assert_called_once_with('some_workflow', 'some_job')
Esempio n. 7
0
 def test_event_token_name(self):
     NAME = ('/workflow/some_workflow/some_instance/input/some_job/'
             'some_input/some_event')
     name = Name.from_event_token_name(NAME)
     self.assertEqual('some_workflow', name.workflow)
     self.assertEqual('some_instance', name.instance)
     self.assertEqual('some_job', name.job)
     self.assertEqual('some_input', name.input)
     self.assertEqual('some_event', name.event)
     self.assertEqual(NAME, name.get_event_token_name())
    def test_get_workflow_tokens(self, repository_mock):
        repository = mock.Mock()
        repository_mock.return_value = repository

        repository.get_job_names.return_value = ['some_job']

        job_config = JobConfig()
        job_config.workflow = 'some_workflow'
        job_config.job = 'some_job'
        job_config.is_condition = False
        job_config.template = ('tests.pinball.parser.'
                               'repository_config_parser_test.SomeJobTemplate')
        job_config.template_params = {'some_param': 'some_value'}
        job_config.parents = []
        job_config.emails = ['*****@*****.**',
                             '*****@*****.**']
        job_config.max_attempts = 10
        job_config.retry_delay_sec = 20
        job_config.priority = 100
        repository.get_job.return_value = job_config

        parser = RepositoryConfigParser()

        workflow_tokens = parser.get_workflow_tokens('some_workflow')

        self.assertEqual(2, len(workflow_tokens))

        # Verify the triggering event token.
        if Name.from_event_token_name(workflow_tokens[0].name).workflow:
            event_token = workflow_tokens[0]
        else:
            event_token = workflow_tokens[1]

        event_name = Name.from_event_token_name(event_token.name)
        self.assertEqual('some_workflow', event_name.workflow)
        self.assertEqual('some_job', event_name.job)
        self.assertEqual('__WORKFLOW_START__', event_name.input)

        event = pickle.loads(event_token.data)
        self.assertEqual('repository_config_parser', event.creator)

        repository.get_job_names.assert_called_once_with('some_workflow')
        repository.get_job.assert_called_once_with('some_workflow', 'some_job')
Esempio n. 9
0
    def _filter_event_tokens(self, tokens):
        """Filter out all tokens which are not event tokens.

        Args:
            tokens: The tokens to filter.
        """
        for token in tokens:
            name = Name.from_event_token_name(token.name)
            if not self._instance and name.instance:
                self._instance = name.instance
            if name.event:
                event = pickle.loads(token.data)
                self._existing_events[token.name] = event
Esempio n. 10
0
    def _filter_event_tokens(self, tokens):
        """Filter out all tokens which are not event tokens.

        Args:
            tokens: The tokens to filter.
        """
        for token in tokens:
            name = Name.from_event_token_name(token.name)
            if not self._instance and name.instance:
                self._instance = name.instance
            if name.event:
                event = pickle.loads(token.data)
                self._existing_events[token.name] = event
Esempio n. 11
0
 def get_event_names(self, workflow_name, instance, job, input_name):
     """Return names of events under a workflow instance, job, and input."""
     request = GroupRequest()
     name = Name()
     name.workflow = workflow_name
     name.instance = instance
     name.job = job
     name.input = input_name
     request.namePrefix = name.get_input_prefix()
     request.groupSuffix = Name.DELIMITER
     response = self._client.group(request)
     events = []
     if response.counts:
         for event in response.counts.keys():
             name = Name.from_event_token_name(event)
             events.append(name.event)
     return events
Esempio n. 12
0
 def get_event_names(self, workflow_name, instance, job, input_name):
     """Return names of events under a workflow instance, job, and input."""
     request = GroupRequest()
     name = Name()
     name.workflow = workflow_name
     name.instance = instance
     name.job = job
     name.input = input_name
     request.namePrefix = name.get_input_prefix()
     request.groupSuffix = Name.DELIMITER
     response = self._client.group(request)
     events = []
     if response.counts:
         for event in response.counts.keys():
             name = Name.from_event_token_name(event)
             events.append(name.event)
     return events
Esempio n. 13
0
    def test_get_output_event_tokens(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        self.assertIsNotNone(self._worker._owned_job_token)

        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)

        event_tokens = self._worker._get_output_event_tokens(job)
        self.assertEqual(1, len(event_tokens))
        event_token_name = Name.from_event_token_name(event_tokens[0].name)
        expected_prefix = Name(workflow='some_workflow',
                               instance='12345',
                               job='child_job',
                               input_name='parent_job').get_input_prefix()
        self.assertEqual(expected_prefix, event_token_name.get_input_prefix())
Esempio n. 14
0
    def test_get_output_event_tokens(self):
        self._post_job_tokens()
        self._post_workflow_start_event_token()
        self._worker._own_runnable_job_token()
        self.assertIsNotNone(self._worker._owned_job_token)

        job = pickle.loads(self._worker._owned_job_token.data)
        execution_record = ExecutionRecord(start_time=123456,
                                           end_time=1234567,
                                           exit_code=0)
        job.history.append(execution_record)

        event_tokens = self._worker._get_output_event_tokens(job)
        self.assertEqual(1, len(event_tokens))
        event_token_name = Name.from_event_token_name(event_tokens[0].name)
        expected_prefix = Name(workflow='some_workflow',
                               instance='12345',
                               job='child_job',
                               input_name='parent_job').get_input_prefix()
        self.assertEqual(expected_prefix, event_token_name.get_input_prefix())
Esempio n. 15
0
    def run(self, emailer, store):
        if not self._check_workflow_instances(emailer, self.workflow, store):
            LOG.warn('too many instances running for workflow %s', self.workflow)
            return None

        config_parser = load_path(PinballConfig.PARSER)(self.parser_params)
        workflow_tokens = config_parser.get_workflow_tokens(self.workflow)
        if not workflow_tokens:
            LOG.error('workflow %s not found', self.workflow)
            return None
        result = ModifyRequest()
        result.updates = workflow_tokens
        assert result.updates
        token = result.updates[0]
        name = Name.from_job_token_name(token.name)
        if not name.instance:
            name = Name.from_event_token_name(token.name)
        LOG.info('exporting workflow %s instance %s.  Its tokens are under %s',
                 name.workflow, name.instance, name.get_instance_prefix())
        return result
Esempio n. 16
0
    def run(self, emailer, store):
        if not self._check_workflow_instances(emailer, self.workflow, store):
            LOG.warn('too many instances running for workflow %s',
                     self.workflow)
            return None

        config_parser = load_path(PinballConfig.PARSER)(self.parser_params)
        workflow_tokens = config_parser.get_workflow_tokens(self.workflow)
        if not workflow_tokens:
            LOG.error('workflow %s not found', self.workflow)
            return None
        result = ModifyRequest()
        result.updates = workflow_tokens
        assert result.updates
        token = result.updates[0]
        name = Name.from_job_token_name(token.name)
        if not name.instance:
            name = Name.from_event_token_name(token.name)
        LOG.info('exporting workflow %s instance %s.  Its tokens are under %s',
                 name.workflow, name.instance, name.get_instance_prefix())
        return result
Esempio n. 17
0
    def _add_events_to_job(job, triggering_event_tokens):
        """Put triggering events inside the job.

        Args:
            job: The job which should be augmented with the events.
            triggering_event_tokens: List of event tokens that triggered the
                job.
        """
        assert not job.events
        for event_token in triggering_event_tokens:
            if event_token.data:
                event = pickle.loads(event_token.data)
                # Optimization to make the job data structure smaller: do not
                # append events with no attributes.
                if event.attributes:
                    job.events.append(event)
            else:
                # This logic is here for backwards compatibility.
                # TODO(pawel): remove this logic after the transition to the
                # new model has been completed.
                name = Name.from_event_token_name(event_token.name)
                assert name.input == Name.WORKFLOW_START_INPUT
Esempio n. 18
0
    def _add_events_to_job(job, triggering_event_tokens):
        """Put triggering events inside the job.

        Args:
            job: The job which should be augmented with the events.
            triggering_event_tokens: List of event tokens that triggered the
                job.
        """
        assert not job.events
        for event_token in triggering_event_tokens:
            if event_token.data:
                event = pickle.loads(event_token.data)
                # Optimization to make the job data structure smaller: do not
                # append events with no attributes.
                if event.attributes:
                    job.events.append(event)
            else:
                # This logic is here for backwards compatibility.
                # TODO(pawel): remove this logic after the transition to the
                # new model has been completed.
                name = Name.from_event_token_name(event_token.name)
                assert name.input == Name.WORKFLOW_START_INPUT